zexus 1.6.8 → 1.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (177) hide show
  1. package/README.md +12 -5
  2. package/package.json +1 -1
  3. package/src/__init__.py +7 -0
  4. package/src/zexus/__init__.py +1 -1
  5. package/src/zexus/__pycache__/__init__.cpython-312.pyc +0 -0
  6. package/src/zexus/__pycache__/capability_system.cpython-312.pyc +0 -0
  7. package/src/zexus/__pycache__/debug_sanitizer.cpython-312.pyc +0 -0
  8. package/src/zexus/__pycache__/environment.cpython-312.pyc +0 -0
  9. package/src/zexus/__pycache__/error_reporter.cpython-312.pyc +0 -0
  10. package/src/zexus/__pycache__/input_validation.cpython-312.pyc +0 -0
  11. package/src/zexus/__pycache__/lexer.cpython-312.pyc +0 -0
  12. package/src/zexus/__pycache__/module_cache.cpython-312.pyc +0 -0
  13. package/src/zexus/__pycache__/module_manager.cpython-312.pyc +0 -0
  14. package/src/zexus/__pycache__/object.cpython-312.pyc +0 -0
  15. package/src/zexus/__pycache__/security.cpython-312.pyc +0 -0
  16. package/src/zexus/__pycache__/security_enforcement.cpython-312.pyc +0 -0
  17. package/src/zexus/__pycache__/syntax_validator.cpython-312.pyc +0 -0
  18. package/src/zexus/__pycache__/zexus_ast.cpython-312.pyc +0 -0
  19. package/src/zexus/__pycache__/zexus_token.cpython-312.pyc +0 -0
  20. package/src/zexus/access_control_system/__pycache__/__init__.cpython-312.pyc +0 -0
  21. package/src/zexus/access_control_system/__pycache__/access_control.cpython-312.pyc +0 -0
  22. package/src/zexus/advanced_types.py +17 -2
  23. package/src/zexus/blockchain/__init__.py +411 -0
  24. package/src/zexus/blockchain/accelerator.py +1160 -0
  25. package/src/zexus/blockchain/chain.py +660 -0
  26. package/src/zexus/blockchain/consensus.py +821 -0
  27. package/src/zexus/blockchain/contract_vm.py +1019 -0
  28. package/src/zexus/blockchain/crypto.py +79 -14
  29. package/src/zexus/blockchain/events.py +526 -0
  30. package/src/zexus/blockchain/loadtest.py +721 -0
  31. package/src/zexus/blockchain/monitoring.py +350 -0
  32. package/src/zexus/blockchain/mpt.py +716 -0
  33. package/src/zexus/blockchain/multichain.py +951 -0
  34. package/src/zexus/blockchain/multiprocess_executor.py +338 -0
  35. package/src/zexus/blockchain/network.py +886 -0
  36. package/src/zexus/blockchain/node.py +666 -0
  37. package/src/zexus/blockchain/rpc.py +1203 -0
  38. package/src/zexus/blockchain/rust_bridge.py +421 -0
  39. package/src/zexus/blockchain/storage.py +423 -0
  40. package/src/zexus/blockchain/tokens.py +750 -0
  41. package/src/zexus/blockchain/upgradeable.py +1004 -0
  42. package/src/zexus/blockchain/verification.py +1602 -0
  43. package/src/zexus/blockchain/wallet.py +621 -0
  44. package/src/zexus/capability_system.py +184 -9
  45. package/src/zexus/cli/__pycache__/main.cpython-312.pyc +0 -0
  46. package/src/zexus/cli/main.py +383 -34
  47. package/src/zexus/cli/zpm.py +1 -1
  48. package/src/zexus/compiler/__pycache__/bytecode.cpython-312.pyc +0 -0
  49. package/src/zexus/compiler/__pycache__/lexer.cpython-312.pyc +0 -0
  50. package/src/zexus/compiler/__pycache__/parser.cpython-312.pyc +0 -0
  51. package/src/zexus/compiler/__pycache__/semantic.cpython-312.pyc +0 -0
  52. package/src/zexus/compiler/__pycache__/zexus_ast.cpython-312.pyc +0 -0
  53. package/src/zexus/compiler/bytecode.py +124 -7
  54. package/src/zexus/compiler/compat_runtime.py +6 -2
  55. package/src/zexus/compiler/lexer.py +16 -5
  56. package/src/zexus/compiler/parser.py +108 -7
  57. package/src/zexus/compiler/semantic.py +18 -19
  58. package/src/zexus/compiler/zexus_ast.py +26 -1
  59. package/src/zexus/concurrency_system.py +79 -0
  60. package/src/zexus/config.py +54 -0
  61. package/src/zexus/crypto_bridge.py +244 -8
  62. package/src/zexus/dap/__init__.py +10 -0
  63. package/src/zexus/dap/__main__.py +4 -0
  64. package/src/zexus/dap/dap_server.py +391 -0
  65. package/src/zexus/dap/debug_engine.py +298 -0
  66. package/src/zexus/environment.py +112 -9
  67. package/src/zexus/evaluator/__pycache__/bytecode_compiler.cpython-312.pyc +0 -0
  68. package/src/zexus/evaluator/__pycache__/core.cpython-312.pyc +0 -0
  69. package/src/zexus/evaluator/__pycache__/expressions.cpython-312.pyc +0 -0
  70. package/src/zexus/evaluator/__pycache__/functions.cpython-312.pyc +0 -0
  71. package/src/zexus/evaluator/__pycache__/resource_limiter.cpython-312.pyc +0 -0
  72. package/src/zexus/evaluator/__pycache__/statements.cpython-312.pyc +0 -0
  73. package/src/zexus/evaluator/__pycache__/unified_execution.cpython-312.pyc +0 -0
  74. package/src/zexus/evaluator/__pycache__/utils.cpython-312.pyc +0 -0
  75. package/src/zexus/evaluator/bytecode_compiler.py +457 -37
  76. package/src/zexus/evaluator/core.py +644 -50
  77. package/src/zexus/evaluator/expressions.py +358 -62
  78. package/src/zexus/evaluator/functions.py +458 -20
  79. package/src/zexus/evaluator/resource_limiter.py +4 -4
  80. package/src/zexus/evaluator/statements.py +774 -122
  81. package/src/zexus/evaluator/unified_execution.py +573 -72
  82. package/src/zexus/evaluator/utils.py +14 -2
  83. package/src/zexus/evaluator_original.py +1 -1
  84. package/src/zexus/event_loop.py +186 -0
  85. package/src/zexus/lexer.py +742 -458
  86. package/src/zexus/lsp/__init__.py +1 -1
  87. package/src/zexus/lsp/definition_provider.py +163 -9
  88. package/src/zexus/lsp/server.py +22 -8
  89. package/src/zexus/lsp/symbol_provider.py +182 -9
  90. package/src/zexus/module_cache.py +239 -9
  91. package/src/zexus/module_manager.py +129 -1
  92. package/src/zexus/object.py +76 -6
  93. package/src/zexus/parser/__pycache__/parser.cpython-312.pyc +0 -0
  94. package/src/zexus/parser/__pycache__/strategy_context.cpython-312.pyc +0 -0
  95. package/src/zexus/parser/__pycache__/strategy_structural.cpython-312.pyc +0 -0
  96. package/src/zexus/parser/parser.py +1349 -408
  97. package/src/zexus/parser/strategy_context.py +755 -58
  98. package/src/zexus/parser/strategy_structural.py +121 -21
  99. package/src/zexus/persistence.py +15 -1
  100. package/src/zexus/renderer/__init__.py +61 -0
  101. package/src/zexus/renderer/__pycache__/__init__.cpython-312.pyc +0 -0
  102. package/src/zexus/renderer/__pycache__/backend.cpython-312.pyc +0 -0
  103. package/src/zexus/renderer/__pycache__/canvas.cpython-312.pyc +0 -0
  104. package/src/zexus/renderer/__pycache__/color_system.cpython-312.pyc +0 -0
  105. package/src/zexus/renderer/__pycache__/layout.cpython-312.pyc +0 -0
  106. package/src/zexus/renderer/__pycache__/main_renderer.cpython-312.pyc +0 -0
  107. package/src/zexus/renderer/__pycache__/painter.cpython-312.pyc +0 -0
  108. package/src/zexus/renderer/backend.py +261 -0
  109. package/src/zexus/renderer/canvas.py +78 -0
  110. package/src/zexus/renderer/color_system.py +201 -0
  111. package/src/zexus/renderer/graphics.py +31 -0
  112. package/src/zexus/renderer/layout.py +222 -0
  113. package/src/zexus/renderer/main_renderer.py +66 -0
  114. package/src/zexus/renderer/painter.py +30 -0
  115. package/src/zexus/renderer/tk_backend.py +208 -0
  116. package/src/zexus/renderer/web_backend.py +260 -0
  117. package/src/zexus/runtime/__init__.py +10 -2
  118. package/src/zexus/runtime/__pycache__/__init__.cpython-312.pyc +0 -0
  119. package/src/zexus/runtime/__pycache__/async_runtime.cpython-312.pyc +0 -0
  120. package/src/zexus/runtime/__pycache__/load_manager.cpython-312.pyc +0 -0
  121. package/src/zexus/runtime/file_flags.py +137 -0
  122. package/src/zexus/runtime/load_manager.py +368 -0
  123. package/src/zexus/safety/__pycache__/__init__.cpython-312.pyc +0 -0
  124. package/src/zexus/safety/__pycache__/memory_safety.cpython-312.pyc +0 -0
  125. package/src/zexus/security.py +424 -34
  126. package/src/zexus/stdlib/fs.py +23 -18
  127. package/src/zexus/stdlib/http.py +289 -186
  128. package/src/zexus/stdlib/sockets.py +207 -163
  129. package/src/zexus/stdlib/websockets.py +282 -0
  130. package/src/zexus/stdlib_integration.py +369 -2
  131. package/src/zexus/strategy_recovery.py +6 -3
  132. package/src/zexus/type_checker.py +423 -0
  133. package/src/zexus/virtual_filesystem.py +189 -2
  134. package/src/zexus/vm/__init__.py +113 -3
  135. package/src/zexus/vm/__pycache__/async_optimizer.cpython-312.pyc +0 -0
  136. package/src/zexus/vm/__pycache__/bytecode.cpython-312.pyc +0 -0
  137. package/src/zexus/vm/__pycache__/bytecode_converter.cpython-312.pyc +0 -0
  138. package/src/zexus/vm/__pycache__/cache.cpython-312.pyc +0 -0
  139. package/src/zexus/vm/__pycache__/compiler.cpython-312.pyc +0 -0
  140. package/src/zexus/vm/__pycache__/gas_metering.cpython-312.pyc +0 -0
  141. package/src/zexus/vm/__pycache__/jit.cpython-312.pyc +0 -0
  142. package/src/zexus/vm/__pycache__/parallel_vm.cpython-312.pyc +0 -0
  143. package/src/zexus/vm/__pycache__/vm.cpython-312.pyc +0 -0
  144. package/src/zexus/vm/async_optimizer.py +80 -6
  145. package/src/zexus/vm/binary_bytecode.py +659 -0
  146. package/src/zexus/vm/bytecode.py +59 -11
  147. package/src/zexus/vm/bytecode_converter.py +26 -12
  148. package/src/zexus/vm/cabi.c +1985 -0
  149. package/src/zexus/vm/cabi.cpython-312-x86_64-linux-gnu.so +0 -0
  150. package/src/zexus/vm/cabi.h +127 -0
  151. package/src/zexus/vm/cache.py +561 -17
  152. package/src/zexus/vm/compiler.py +818 -51
  153. package/src/zexus/vm/fastops.c +15743 -0
  154. package/src/zexus/vm/fastops.cpython-312-x86_64-linux-gnu.so +0 -0
  155. package/src/zexus/vm/fastops.pyx +288 -0
  156. package/src/zexus/vm/gas_metering.py +50 -9
  157. package/src/zexus/vm/jit.py +364 -20
  158. package/src/zexus/vm/native_jit_backend.py +1816 -0
  159. package/src/zexus/vm/native_runtime.cpp +1388 -0
  160. package/src/zexus/vm/native_runtime.cpython-312-x86_64-linux-gnu.so +0 -0
  161. package/src/zexus/vm/optimizer.py +161 -11
  162. package/src/zexus/vm/parallel_vm.py +140 -45
  163. package/src/zexus/vm/peephole_optimizer.py +82 -4
  164. package/src/zexus/vm/profiler.py +38 -18
  165. package/src/zexus/vm/register_allocator.py +16 -5
  166. package/src/zexus/vm/register_vm.py +8 -5
  167. package/src/zexus/vm/vm.py +3581 -531
  168. package/src/zexus/vm/wasm_compiler.py +658 -0
  169. package/src/zexus/zexus_ast.py +137 -11
  170. package/src/zexus/zexus_token.py +16 -5
  171. package/src/zexus/zpm/installer.py +55 -15
  172. package/src/zexus/zpm/package_manager.py +1 -1
  173. package/src/zexus/zpm/registry.py +257 -28
  174. package/src/zexus.egg-info/PKG-INFO +16 -6
  175. package/src/zexus.egg-info/SOURCES.txt +129 -17
  176. package/src/zexus.egg-info/entry_points.txt +1 -0
  177. package/src/zexus.egg-info/requires.txt +4 -0
@@ -23,7 +23,9 @@ from .zexus_ast import (
23
23
  Program, LetStatement, ExpressionStatement, PrintStatement, ReturnStatement,
24
24
  IfStatement, WhileStatement, Identifier, IntegerLiteral, StringLiteral,
25
25
  Boolean as AST_Boolean, InfixExpression, PrefixExpression, CallExpression,
26
- ActionStatement, BlockStatement, MapLiteral, ListLiteral, AwaitExpression
26
+ ActionStatement, BlockStatement, MapLiteral, ListLiteral, AwaitExpression,
27
+ EnumDeclaration, ImportStatement, EventDeclaration, ProtocolDeclaration,
28
+ EmitStatement
27
29
  )
28
30
 
29
31
  # --- Bytecode representation ---
@@ -47,12 +49,68 @@ class BytecodeGenerator:
47
49
 
48
50
  def generate(self, program: Program) -> Bytecode:
49
51
  self.bytecode = Bytecode()
50
- for stmt in getattr(program, "statements", []):
51
- self._emit_statement(stmt, self.bytecode)
52
+ statements = list(getattr(program, "statements", []) or [])
53
+ total = len(statements)
54
+ index = 0
55
+ while index < total:
56
+ stmt = statements[index]
57
+
58
+ # Pattern-match legacy import syntax: import "module" [as alias]
59
+ if (
60
+ isinstance(stmt, ExpressionStatement)
61
+ and isinstance(stmt.expression, Identifier)
62
+ and stmt.expression.value == "import"
63
+ ):
64
+ module_value = None
65
+ alias_value = None
66
+ consumed = 1
67
+
68
+ if index + 1 < total:
69
+ module_stmt = statements[index + 1]
70
+ if isinstance(module_stmt, ExpressionStatement):
71
+ module_expr = getattr(module_stmt, "expression", None)
72
+ if isinstance(module_expr, StringLiteral):
73
+ module_value = module_expr.value
74
+ consumed = 2
75
+ elif isinstance(module_expr, Identifier):
76
+ module_value = module_expr.value
77
+ consumed = 2
78
+
79
+ if module_value is not None and index + consumed < total:
80
+ next_stmt = statements[index + consumed]
81
+ if (
82
+ isinstance(next_stmt, ExpressionStatement)
83
+ and isinstance(next_stmt.expression, Identifier)
84
+ and next_stmt.expression.value == "as"
85
+ and index + consumed + 1 < total
86
+ ):
87
+ alias_stmt = statements[index + consumed + 1]
88
+ if isinstance(alias_stmt, ExpressionStatement) and isinstance(alias_stmt.expression, Identifier):
89
+ alias_value = alias_stmt.expression.value
90
+ consumed += 2
91
+
92
+ if module_value is not None:
93
+ import_node = ImportStatement(module_path=module_value, alias=alias_value)
94
+ self._emit_statement(
95
+ import_node,
96
+ self.bytecode,
97
+ is_top_level=True,
98
+ is_last=(index + consumed - 1 == total - 1),
99
+ )
100
+ index += consumed
101
+ continue
102
+
103
+ self._emit_statement(
104
+ stmt,
105
+ self.bytecode,
106
+ is_top_level=True,
107
+ is_last=(index == total - 1),
108
+ )
109
+ index += 1
52
110
  return self.bytecode
53
111
 
54
112
  # Statement lowering
55
- def _emit_statement(self, stmt, bc: Bytecode):
113
+ def _emit_statement(self, stmt, bc: Bytecode, *, is_top_level: bool = False, is_last: bool = False):
56
114
  t = type(stmt).__name__
57
115
 
58
116
  if t == "LetStatement":
@@ -64,8 +122,8 @@ class BytecodeGenerator:
64
122
 
65
123
  if t == "ExpressionStatement":
66
124
  self._emit_expression(stmt.expression, bc)
67
- # drop result (no-op) or keep for top-level
68
- bc.add_instruction("POP", None)
125
+ if not (is_top_level and is_last):
126
+ bc.add_instruction("POP", None)
69
127
  return
70
128
 
71
129
  if t == "PrintStatement":
@@ -125,7 +183,61 @@ class BytecodeGenerator:
125
183
  bc.instructions[jump_pos] = ("JUMP_IF_FALSE", end_pos)
126
184
  return
127
185
 
128
- # Event/emit/enum/import handled at higher-level generator earlier; treat as NOP here
186
+ if t == "EnumDeclaration":
187
+ enum_name = getattr(stmt.name, "value", stmt.name)
188
+ members = getattr(stmt, "members", {}) or {}
189
+ resolved_members = {}
190
+ next_auto = 0
191
+ for key, explicit in members.items():
192
+ value = explicit if explicit is not None else next_auto
193
+ resolved_members[key] = value
194
+ next_auto = (value + 1) if explicit is not None else (next_auto + 1)
195
+
196
+ name_idx = bc.add_constant(enum_name)
197
+ members_idx = bc.add_constant(resolved_members)
198
+ bc.add_instruction("DEFINE_ENUM", (name_idx, members_idx))
199
+ return
200
+
201
+ if t == "ImportStatement":
202
+ module_idx = bc.add_constant(getattr(stmt, "module_path", None))
203
+ alias = getattr(stmt, "alias", None)
204
+ if alias is not None:
205
+ alias_idx = bc.add_constant(alias)
206
+ bc.add_instruction("IMPORT", (module_idx, alias_idx))
207
+ else:
208
+ bc.add_instruction("IMPORT", (module_idx,))
209
+ return
210
+
211
+ if t == "EventDeclaration":
212
+ event_name = getattr(stmt.name, "value", stmt.name)
213
+ name_idx = bc.add_constant(event_name)
214
+ bc.add_instruction("REGISTER_EVENT", name_idx)
215
+ return
216
+
217
+ if t == "ProtocolDeclaration":
218
+ proto_name = getattr(stmt.name, "value", stmt.name)
219
+ spec = getattr(stmt, "spec", {}) or {}
220
+ name_idx = bc.add_constant(proto_name)
221
+ spec_idx = bc.add_constant(spec)
222
+ bc.add_instruction("DEFINE_PROTOCOL", (name_idx, spec_idx))
223
+ return
224
+
225
+ if t == "EmitStatement":
226
+ event_name = getattr(stmt.name, "value", stmt.name)
227
+ name_idx = bc.add_constant(event_name)
228
+ if getattr(stmt, "payload", None) is not None:
229
+ self._emit_expression(stmt.payload, bc)
230
+ bc.add_instruction("EMIT_EVENT", (name_idx,))
231
+ return
232
+
233
+ if t == "StreamStatement":
234
+ # Streams are currently handled at runtime only; compiler no-op.
235
+ return
236
+
237
+ if t == "WatchStatement":
238
+ # Watch statements are runtime constructs; compiler emits no bytecode.
239
+ return
240
+
129
241
  return
130
242
 
131
243
  # Expression lowering
@@ -141,6 +253,11 @@ class BytecodeGenerator:
141
253
  bc.add_instruction("LOAD_CONST", const_idx)
142
254
  return
143
255
 
256
+ if typ == "FloatLiteral":
257
+ const_idx = bc.add_constant(float(expr.value))
258
+ bc.add_instruction("LOAD_CONST", const_idx)
259
+ return
260
+
144
261
  if typ == "StringLiteral":
145
262
  const_idx = bc.add_constant(expr.value)
146
263
  bc.add_instruction("LOAD_CONST", const_idx)
@@ -182,8 +182,12 @@ except Exception as e:
182
182
  def unwrap_return_value(obj):
183
183
  return obj
184
184
 
185
- # Minimal renderer fallback
186
- RENDER_REGISTRY = {'screens': {}, 'components': {}, 'themes': {}, 'canvases': {}, 'current_theme': None}
185
+ # Minimal renderer fallback (used when the real backend is unavailable)
186
+ try:
187
+ from ..renderer import backend as _BACKEND
188
+ RENDER_REGISTRY = _BACKEND.inspect_registry()
189
+ except Exception:
190
+ RENDER_REGISTRY = {'screens': {}, 'components': {}, 'themes': {}, 'canvases': {}, 'current_theme': None}
187
191
 
188
192
  # Try to create small wrappers for builtin functions by reading from object.File when present
189
193
  try:
@@ -39,12 +39,17 @@ class Lexer:
39
39
  return self.input[self.read_position]
40
40
 
41
41
  def next_token(self):
42
- self.skip_whitespace()
42
+ # Avoid recursion: long files can contain thousands of consecutive
43
+ # comments/blank lines.
44
+ while True:
45
+ self.skip_whitespace()
46
+
47
+ # Skip single line comments
48
+ if self.ch == '#' and self.peek_char() != '{':
49
+ self.skip_comment()
50
+ continue
43
51
 
44
- # Skip single line comments
45
- if self.ch == '#' and self.peek_char() != '{':
46
- self.skip_comment()
47
- return self.next_token()
52
+ break
48
53
 
49
54
  tok = None
50
55
  current_line = self.line
@@ -211,6 +216,10 @@ class Lexer:
211
216
  "action": ACTION,
212
217
  "async": ASYNC,
213
218
  "await": AWAIT,
219
+ "event": EVENT,
220
+ "emit": EMIT,
221
+ "import": IMPORT,
222
+ "as": AS,
214
223
  "enum": ENUM,
215
224
  "protocol": PROTOCOL,
216
225
  "interface": INTERFACE,
@@ -232,6 +241,8 @@ class Lexer:
232
241
  "catch": CATCH,
233
242
  "external": EXTERNAL,
234
243
  "from": FROM,
244
+ "stream": STREAM,
245
+ "watch": WATCH,
235
246
  # Blockchain & Smart Contract keywords
236
247
  "ledger": LEDGER,
237
248
  "state": STATE,
@@ -119,6 +119,10 @@ class ProductionParser:
119
119
  return self.parse_protocol_declaration()
120
120
  elif self.cur_token_is(IMPORT):
121
121
  return self.parse_import_statement()
122
+ elif self.cur_token_is(STREAM):
123
+ return self.parse_stream_statement()
124
+ elif self.cur_token_is(WATCH):
125
+ return self.parse_watch_statement()
122
126
  else:
123
127
  return self.parse_expression_statement()
124
128
  except Exception as e:
@@ -666,8 +670,32 @@ class ProductionParser:
666
670
  if not self.expect_peek(IDENT):
667
671
  return None
668
672
  name = Identifier(self.cur_token.literal)
669
- body = self.parse_block()
670
- return EventDeclaration(name=name, properties=body)
673
+ if not self.expect_peek(LBRACE):
674
+ return None
675
+
676
+ properties = {}
677
+ self.next_token()
678
+ while not self.cur_token_is(RBRACE) and not self.cur_token_is(EOF):
679
+ if self.cur_token_is(IDENT) or self.cur_token_is(STRING):
680
+ key = self.cur_token.literal
681
+ value = None
682
+ if self.peek_token_is(COLON):
683
+ self.next_token()
684
+ self.next_token()
685
+ value = self.parse_expression(LOWEST)
686
+ properties[key] = value
687
+
688
+ if self.peek_token_is(COMMA):
689
+ self.next_token()
690
+ self.next_token()
691
+ continue
692
+ self.next_token()
693
+
694
+ if not self.cur_token_is(RBRACE):
695
+ self.errors.append("Unclosed event declaration")
696
+ return None
697
+
698
+ return EventDeclaration(name=name, properties=properties)
671
699
 
672
700
  def parse_emit_statement(self):
673
701
  if not self.expect_peek(IDENT):
@@ -740,15 +768,88 @@ class ProductionParser:
740
768
  def parse_import_statement(self):
741
769
  if not self.expect_peek(STRING):
742
770
  return None
771
+
743
772
  module_path = self.cur_token.literal
744
773
  alias = None
745
- if self.peek_token_is(IDENT) and self.peek_token.literal == "as":
746
- self.next_token()
747
- self.next_token()
748
- if self.cur_token_is(IDENT):
749
- alias = self.cur_token.literal
774
+
775
+ if self.peek_token_is(AS) or (self.peek_token_is(IDENT) and self.peek_token.literal == "as"):
776
+ self.next_token() # advance to 'as'
777
+
778
+ if not self.expect_peek(IDENT):
779
+ self.errors.append(f"Line {self.cur_token.line}: Expected identifier after 'as' in import statement")
780
+ return None
781
+
782
+ alias = self.cur_token.literal
783
+
750
784
  return ImportStatement(module_path=module_path, alias=alias)
751
785
 
786
+ def parse_stream_statement(self):
787
+ token = self.cur_token
788
+
789
+ if not self.expect_peek(IDENT):
790
+ return None
791
+ stream_name = self.cur_token.literal
792
+
793
+ if not self.expect_peek(AS):
794
+ return None
795
+
796
+ if self.peek_token_is(IDENT) or self.peek_token_is(EVENT):
797
+ self.next_token()
798
+ else:
799
+ self.errors.append(f"Line {getattr(token, 'line', 'unknown')}: Expected event identifier after 'as'")
800
+ return None
801
+
802
+ event_token = self.cur_token
803
+ event_literal = getattr(event_token, 'literal', None) or getattr(event_token, 'value', None)
804
+ if not event_literal:
805
+ event_literal = event_token.type.lower()
806
+ event_var = Identifier(event_literal)
807
+
808
+ if not self.expect_peek(LAMBDA):
809
+ return None
810
+
811
+ self.next_token()
812
+
813
+ if self.cur_token_is(LBRACE):
814
+ handler = self.parse_block()
815
+ else:
816
+ handler_expr = self.parse_expression(LOWEST)
817
+ if handler_expr is None:
818
+ return None
819
+ handler = BlockStatement()
820
+ handler.statements.append(ExpressionStatement(handler_expr))
821
+
822
+ return StreamStatement(stream_name=stream_name, event_var=event_var, handler=handler)
823
+
824
+ def parse_watch_statement(self):
825
+ self.next_token()
826
+
827
+ lambda_handler = self.infix_parse_fns.pop(LAMBDA, None)
828
+ try:
829
+ watched_expr = self.parse_expression(LOWEST)
830
+ finally:
831
+ if lambda_handler is not None:
832
+ self.infix_parse_fns[LAMBDA] = lambda_handler
833
+
834
+ if watched_expr is None:
835
+ return None
836
+
837
+ if not self.expect_peek(LAMBDA):
838
+ return None
839
+
840
+ self.next_token()
841
+
842
+ if self.cur_token_is(LBRACE):
843
+ reaction = self.parse_block()
844
+ else:
845
+ reaction = BlockStatement()
846
+ stmt = self.parse_statement()
847
+ if stmt is None:
848
+ return None
849
+ reaction.statements.append(stmt)
850
+
851
+ return WatchStatement(watched_expr=watched_expr, reaction=reaction)
852
+
752
853
  # Token utilities
753
854
  def next_token(self):
754
855
  self.cur_token = self.peek_token
@@ -54,31 +54,33 @@ class SemanticAnalyzer:
54
54
  errors.append("Invalid AST: missing 'statements' list")
55
55
  return errors
56
56
 
57
- # Run new checks: await usage and protocol/event validation
58
- # Walk AST with context
57
+ visited = set()
58
+
59
59
  def walk(node, in_async=False):
60
- # Protect against cycles by tracking visited node ids
61
- if not hasattr(walk, '_visited'):
62
- walk._visited = set()
63
60
  if node is None:
64
61
  return
65
62
  node_id = id(node)
66
- if node_id in walk._visited:
63
+ if node_id in visited:
67
64
  return
68
- walk._visited.add(node_id)
69
- # Quick type checks for relevant nodes
65
+ visited.add(node_id)
66
+
70
67
  if isinstance(node, AwaitExpression):
71
68
  if not in_async:
72
69
  errors.append("Semantic error: 'await' used outside an async function")
73
- # ActionStatement may have is_async flag
70
+ return
71
+ expr = getattr(node, "expression", None)
72
+ if hasattr(expr, "__dict__"):
73
+ walk(expr, in_async=True)
74
+ return
75
+
74
76
  if isinstance(node, ActionStatement):
75
77
  body = getattr(node, "body", None)
76
78
  async_flag = getattr(node, "is_async", False)
77
79
  if body:
78
- # walk body with in_async = async_flag
79
80
  for s in getattr(body, "statements", []):
80
81
  walk(s, in_async=async_flag)
81
82
  return
83
+
82
84
  if isinstance(node, ProtocolDeclaration):
83
85
  spec = getattr(node, "spec", {})
84
86
  methods = spec.get("methods") if isinstance(spec, dict) else None
@@ -89,26 +91,23 @@ class SemanticAnalyzer:
89
91
  if not isinstance(m, str):
90
92
  errors.append(f"Protocol '{node.name.value}' has non-string method name: {m}")
91
93
  return
94
+
92
95
  if isinstance(node, EventDeclaration):
93
96
  props = getattr(node, "properties", None)
94
- if not isinstance(props, (MapLiteral, BlockStatement)):
97
+ if not isinstance(props, (MapLiteral, BlockStatement, dict)):
95
98
  errors.append(f"Event '{node.name.value}' properties should be a map or block")
96
- # further checks can be added
97
99
  return
98
100
 
99
- # Generic traversal
100
- for attr in dir(node):
101
- if attr.startswith("_") or attr in ("token_literal", "__repr__"):
101
+ for attr, val in vars(node).items():
102
+ if attr.startswith("_") or attr in ("token", "token_literal"):
102
103
  continue
103
- val = getattr(node, attr)
104
104
  if isinstance(val, list):
105
105
  for item in val:
106
- if hasattr(item, "__class__"):
106
+ if hasattr(item, "__dict__"):
107
107
  walk(item, in_async=in_async)
108
- elif hasattr(val, "__class__"):
108
+ elif hasattr(val, "__dict__"):
109
109
  walk(val, in_async=in_async)
110
110
 
111
- # Walk top-level statements
112
111
  for s in stmts:
113
112
  walk(s, in_async=False)
114
113
 
@@ -451,4 +451,29 @@ class ImportStatement(Statement):
451
451
  return "import"
452
452
 
453
453
  def __repr__(self):
454
- return f"ImportStatement({self.module_path} as {self.alias})"
454
+ return f"ImportStatement({self.module_path} as {self.alias})"
455
+
456
+
457
+ class StreamStatement(Statement):
458
+ def __init__(self, stream_name, event_var, handler):
459
+ self.stream_name = stream_name
460
+ self.event_var = event_var
461
+ self.handler = handler
462
+
463
+ def token_literal(self):
464
+ return "stream"
465
+
466
+ def __repr__(self):
467
+ return f"StreamStatement({self.stream_name})"
468
+
469
+
470
+ class WatchStatement(Statement):
471
+ def __init__(self, watched_expr, reaction):
472
+ self.watched_expr = watched_expr
473
+ self.reaction = reaction
474
+
475
+ def token_literal(self):
476
+ return "watch"
477
+
478
+ def __repr__(self):
479
+ return f"WatchStatement({self.watched_expr})"
@@ -468,6 +468,85 @@ class ConcurrencyManager:
468
468
  f"tasks={stats['tasks_completed']}/{stats['tasks_total']})")
469
469
 
470
470
 
471
+ # ---------------------------------------------------------------------------
472
+ # AsyncChannel — asyncio-native channel for the shared event loop
473
+ # ---------------------------------------------------------------------------
474
+
475
+ class AsyncChannel:
476
+ """
477
+ Async-native channel backed by :class:`asyncio.Queue`.
478
+
479
+ Unlike :class:`Channel` (which uses ``threading`` primitives), this
480
+ channel is designed to be used inside coroutines running on the shared
481
+ Zexus event loop. ``send`` and ``receive`` are ``async`` methods.
482
+
483
+ Example (inside a Zexus ``async action``)::
484
+
485
+ ch = AsyncChannel("numbers", capacity=10)
486
+ await ch.send(42)
487
+ val = await ch.receive() # 42
488
+ ch.close()
489
+ """
490
+
491
+ def __init__(self, name: str, element_type: Optional[str] = None,
492
+ capacity: int = 0):
493
+ self.name = name
494
+ self.element_type = element_type
495
+ self.capacity = capacity
496
+ self._closed = False
497
+
498
+ import asyncio as _asyncio
499
+ if capacity > 0:
500
+ self._queue: _asyncio.Queue = _asyncio.Queue(maxsize=capacity)
501
+ else:
502
+ self._queue = _asyncio.Queue()
503
+
504
+ @property
505
+ def is_open(self) -> bool:
506
+ return not self._closed
507
+
508
+ async def send(self, value, *, timeout: Optional[float] = None):
509
+ """Send *value* into the channel (async, may block if full)."""
510
+ if self._closed:
511
+ raise RuntimeError(f"Cannot send on closed async channel '{self.name}'")
512
+ import asyncio as _asyncio
513
+ if timeout is not None:
514
+ await _asyncio.wait_for(self._queue.put(value), timeout=timeout)
515
+ else:
516
+ await self._queue.put(value)
517
+
518
+ async def receive(self, *, timeout: Optional[float] = None):
519
+ """Receive a value from the channel (async, may block if empty)."""
520
+ if self._closed and self._queue.empty():
521
+ return None
522
+ import asyncio as _asyncio
523
+ try:
524
+ if timeout is not None:
525
+ value = await _asyncio.wait_for(self._queue.get(), timeout=timeout)
526
+ else:
527
+ value = await self._queue.get()
528
+ if isinstance(value, _ChannelClosedSentinel):
529
+ return None
530
+ return value
531
+ except _asyncio.TimeoutError:
532
+ if self._closed:
533
+ return None
534
+ raise RuntimeError(f"Timeout receiving from async channel '{self.name}'")
535
+
536
+ def close(self):
537
+ """Close the channel. Pending receivers will get ``None``."""
538
+ self._closed = True
539
+ try:
540
+ self._queue.put_nowait(_CHANNEL_CLOSED_SENTINEL)
541
+ except Exception:
542
+ pass
543
+
544
+ def __repr__(self) -> str:
545
+ mode = f"buffered({self.capacity})" if self.capacity > 0 else "unbuffered"
546
+ status = "closed" if self._closed else "open"
547
+ return f"AsyncChannel<{self.element_type}>({self.name}, {mode}, {status})"
548
+
549
+
471
550
  # Global singleton instance
472
551
  _concurrency_manager: Optional[ConcurrencyManager] = None
473
552
 
@@ -28,6 +28,11 @@ DEFAULT_RUNTIME = {
28
28
  'enable_advanced_parsing': True,
29
29
  'enable_debug_logs': False,
30
30
  'enable_parser_debug': False, # OPTIMIZATION: Disable parser debug output for speed
31
+ # Large-file stability: advanced parsing does extra analysis and can be
32
+ # significantly heavier on very large sources. For big files, we prefer the
33
+ # traditional streaming parser for stability.
34
+ 'advanced_parsing_max_lines': 2000,
35
+ 'advanced_parsing_max_tokens': 50000,
31
36
  # Legacy runtime flags expected by older modules
32
37
  'use_hybrid_compiler': True,
33
38
  'fallback_to_interpreter': True,
@@ -40,8 +45,15 @@ class Config:
40
45
  def __init__(self):
41
46
  self.config_dir = Path.home() / ".zexus"
42
47
  self.config_file = self.config_dir / "config.json"
48
+
49
+ # Fast caching attribute for hot paths
50
+ self.fast_debug_enabled = False
51
+
43
52
  self._data = DEFAULT_CONFIG.copy()
44
53
  self._ensure_loaded()
54
+
55
+ # Update cache from initial loaded data
56
+ self.fast_debug_enabled = (self.debug_level != 'none')
45
57
 
46
58
  # ensure runtime defaults exist for backward compatibility
47
59
  self._data.setdefault('runtime', {})
@@ -93,6 +105,7 @@ class Config:
93
105
  raise ValueError('Invalid debug level')
94
106
  self._data.setdefault('debug', {})['level'] = value
95
107
  self._data['debug']['enabled'] = (value != 'none')
108
+ self.fast_debug_enabled = (value != 'none')
96
109
  self._write()
97
110
 
98
111
  def enable_debug(self, level='full'):
@@ -143,6 +156,47 @@ class Config:
143
156
  self.debug_level = 'none'
144
157
 
145
158
  # Legacy runtime properties
159
+ @property
160
+ def enable_parser_debug(self):
161
+ return bool(self._data.get('runtime', {}).get('enable_parser_debug', False))
162
+
163
+ @enable_parser_debug.setter
164
+ def enable_parser_debug(self, value):
165
+ self._data.setdefault('runtime', {})['enable_parser_debug'] = bool(value)
166
+ self._write()
167
+
168
+ @property
169
+ def advanced_parsing_max_lines(self):
170
+ try:
171
+ return int(self._data.get('runtime', {}).get('advanced_parsing_max_lines', 2000))
172
+ except Exception:
173
+ return 2000
174
+
175
+ @advanced_parsing_max_lines.setter
176
+ def advanced_parsing_max_lines(self, value):
177
+ try:
178
+ v = int(value)
179
+ except Exception:
180
+ v = 2000
181
+ self._data.setdefault('runtime', {})['advanced_parsing_max_lines'] = v
182
+ self._write()
183
+
184
+ @property
185
+ def advanced_parsing_max_tokens(self):
186
+ try:
187
+ return int(self._data.get('runtime', {}).get('advanced_parsing_max_tokens', 50000))
188
+ except Exception:
189
+ return 50000
190
+
191
+ @advanced_parsing_max_tokens.setter
192
+ def advanced_parsing_max_tokens(self, value):
193
+ try:
194
+ v = int(value)
195
+ except Exception:
196
+ v = 50000
197
+ self._data.setdefault('runtime', {})['advanced_parsing_max_tokens'] = v
198
+ self._write()
199
+
146
200
  @property
147
201
  def use_hybrid_compiler(self):
148
202
  return self._data.get('runtime', {}).get('use_hybrid_compiler', True)