zexus 1.6.3 → 1.6.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/package.json +1 -1
- package/src/zexus/__init__.py +1 -1
- package/src/zexus/cli/main.py +1 -1
- package/src/zexus/cli/zpm.py +1 -1
- package/src/zexus/evaluator/core.py +10 -19
- package/src/zexus/evaluator/functions.py +48 -21
- package/src/zexus/evaluator/statements.py +41 -14
- package/src/zexus/lexer.py +1 -1
- package/src/zexus/parser/parser.py +54 -3
- package/src/zexus/parser/strategy_context.py +78 -4
- package/src/zexus/parser/strategy_structural.py +30 -5
- package/src/zexus/security.py +43 -25
- package/src/zexus/zexus_ast.py +3 -2
- package/src/zexus/zpm/package_manager.py +1 -1
- package/src/zexus.egg-info/PKG-INFO +2 -2
- package/src/zexus.egg-info/SOURCES.txt +3 -2
package/README.md
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
<div align="center">
|
|
4
4
|
|
|
5
|
-

|
|
6
6
|
[](LICENSE)
|
|
7
7
|
[](https://python.org)
|
|
8
8
|
[](https://github.com/Zaidux/zexus-interpreter)
|
package/package.json
CHANGED
package/src/zexus/__init__.py
CHANGED
package/src/zexus/cli/main.py
CHANGED
|
@@ -91,7 +91,7 @@ def show_all_commands():
|
|
|
91
91
|
console.print("\n[bold green]💡 Tip:[/bold green] Use 'zx <command> --help' for detailed command options\n")
|
|
92
92
|
|
|
93
93
|
@click.group(invoke_without_command=True)
|
|
94
|
-
@click.version_option(version="1.6.
|
|
94
|
+
@click.version_option(version="1.6.5", prog_name="Zexus")
|
|
95
95
|
@click.option('--syntax-style', type=click.Choice(['universal', 'tolerable', 'auto']),
|
|
96
96
|
default='auto', help='Syntax style to use (universal=strict, tolerable=flexible)')
|
|
97
97
|
@click.option('--advanced-parsing', is_flag=True, default=True,
|
package/src/zexus/cli/zpm.py
CHANGED
|
@@ -517,30 +517,21 @@ class Evaluator(ExpressionEvaluatorMixin, StatementEvaluatorMixin, FunctionEvalu
|
|
|
517
517
|
if is_error(obj):
|
|
518
518
|
return obj
|
|
519
519
|
|
|
520
|
-
#
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
# This could be either a property name (obj.prop) or an index variable (arr[i])
|
|
524
|
-
# We need to check if it's being used as an index (numeric) or property (string)
|
|
525
|
-
# First try to evaluate it as an identifier (variable lookup)
|
|
526
|
-
prop_result = self.eval_identifier(node.property, env)
|
|
527
|
-
if not is_error(prop_result) and not isinstance(prop_result, type(NULL)):
|
|
528
|
-
# Successfully found a variable, use its value as the property/index
|
|
529
|
-
property_name = prop_result.value if hasattr(prop_result, 'value') else str(prop_result)
|
|
530
|
-
else:
|
|
531
|
-
# Not found as variable, treat as literal property name (for obj.prop syntax)
|
|
532
|
-
property_name = node.property.value
|
|
533
|
-
elif isinstance(node.property, zexus_ast.IntegerLiteral):
|
|
534
|
-
# Direct integer index like arr[0]
|
|
535
|
-
property_name = node.property.value
|
|
536
|
-
elif isinstance(node.property, zexus_ast.PropertyAccessExpression):
|
|
537
|
-
# Nested property access - evaluate it
|
|
520
|
+
# Determine property name based on whether it's computed (obj[expr]) or literal (obj.prop)
|
|
521
|
+
if hasattr(node, 'computed') and node.computed:
|
|
522
|
+
# Computed property (obj[expr]) - always evaluate the expression
|
|
538
523
|
prop_result = self.eval_node(node.property, env, stack_trace)
|
|
539
524
|
if is_error(prop_result):
|
|
540
525
|
return prop_result
|
|
541
526
|
property_name = prop_result.value if hasattr(prop_result, 'value') else str(prop_result)
|
|
527
|
+
elif isinstance(node.property, zexus_ast.Identifier):
|
|
528
|
+
# Literal property (obj.prop) - use the identifier name directly
|
|
529
|
+
property_name = node.property.value
|
|
530
|
+
elif isinstance(node.property, zexus_ast.IntegerLiteral):
|
|
531
|
+
# Direct integer index like arr[0] (for backwards compatibility)
|
|
532
|
+
property_name = node.property.value
|
|
542
533
|
else:
|
|
543
|
-
#
|
|
534
|
+
# Fallback: evaluate the property expression
|
|
544
535
|
prop_result = self.eval_node(node.property, env, stack_trace)
|
|
545
536
|
if is_error(prop_result):
|
|
546
537
|
return prop_result
|
|
@@ -354,42 +354,67 @@ class FunctionEvaluatorMixin:
|
|
|
354
354
|
|
|
355
355
|
elif isinstance(fn, EntityDefinition):
|
|
356
356
|
debug_log(" Creating entity instance (old format)")
|
|
357
|
-
# Entity constructor: Person("Alice", 30)
|
|
357
|
+
# Entity constructor: Person("Alice", 30) or Person{name: "Alice", age: 30}
|
|
358
358
|
# Create instance with positional arguments mapped to properties
|
|
359
359
|
from ..object import EntityInstance, String, Integer
|
|
360
360
|
|
|
361
361
|
values = {}
|
|
362
|
-
# Map positional arguments to property names
|
|
363
|
-
if isinstance(fn.properties, dict):
|
|
364
|
-
prop_names = list(fn.properties.keys())
|
|
365
|
-
else:
|
|
366
|
-
prop_names = [prop['name'] for prop in fn.properties]
|
|
367
362
|
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
363
|
+
# Special case: If single argument is a Map, use it as field values
|
|
364
|
+
# This handles Entity{field: value} syntax which becomes Entity(Map{...})
|
|
365
|
+
if len(args) == 1 and isinstance(args[0], Map):
|
|
366
|
+
debug_log(" Single Map argument detected - using as field values")
|
|
367
|
+
map_arg = args[0]
|
|
368
|
+
# Extract key-value pairs from the Map
|
|
369
|
+
for key, value in map_arg.pairs.items():
|
|
370
|
+
# Convert key to string if it's a String object
|
|
371
|
+
key_str = key.value if isinstance(key, String) else str(key)
|
|
372
|
+
values[key_str] = value
|
|
373
|
+
else:
|
|
374
|
+
# Map positional arguments to property names
|
|
375
|
+
if isinstance(fn.properties, dict):
|
|
376
|
+
prop_names = list(fn.properties.keys())
|
|
377
|
+
else:
|
|
378
|
+
prop_names = [prop['name'] for prop in fn.properties]
|
|
379
|
+
|
|
380
|
+
for i, arg in enumerate(args):
|
|
381
|
+
if i < len(prop_names):
|
|
382
|
+
values[prop_names[i]] = arg
|
|
371
383
|
|
|
372
384
|
return EntityInstance(fn, values)
|
|
373
385
|
|
|
374
386
|
# Handle SecurityEntityDefinition (from security.py with methods support)
|
|
375
387
|
from ..security import EntityDefinition as SecurityEntityDef, EntityInstance as SecurityEntityInstance
|
|
388
|
+
from ..object import String
|
|
376
389
|
if isinstance(fn, SecurityEntityDef):
|
|
377
390
|
debug_log(" Creating entity instance (with methods)")
|
|
378
391
|
|
|
379
392
|
values = {}
|
|
380
|
-
# Map positional arguments to property names, INCLUDING INHERITED PROPERTIES
|
|
381
|
-
# Use get_all_properties() to get the full property list in correct order
|
|
382
|
-
if hasattr(fn, 'get_all_properties'):
|
|
383
|
-
# Get all properties (parent + child) in correct order
|
|
384
|
-
all_props = fn.get_all_properties()
|
|
385
|
-
prop_names = list(all_props.keys())
|
|
386
|
-
else:
|
|
387
|
-
# Fallback for old-style properties
|
|
388
|
-
prop_names = list(fn.properties.keys()) if isinstance(fn.properties, dict) else [prop['name'] for prop in fn.properties]
|
|
389
393
|
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
394
|
+
# Special case: If single argument is a Map, use it as field values
|
|
395
|
+
# This handles Entity{field: value} syntax which becomes Entity(Map{...})
|
|
396
|
+
if len(args) == 1 and isinstance(args[0], Map):
|
|
397
|
+
debug_log(" Single Map argument detected - using as field values")
|
|
398
|
+
map_arg = args[0]
|
|
399
|
+
# Extract key-value pairs from the Map
|
|
400
|
+
for key, value in map_arg.pairs.items():
|
|
401
|
+
# Convert key to string if it's a String object
|
|
402
|
+
key_str = key.value if isinstance(key, String) else str(key)
|
|
403
|
+
values[key_str] = value
|
|
404
|
+
else:
|
|
405
|
+
# Map positional arguments to property names, INCLUDING INHERITED PROPERTIES
|
|
406
|
+
# Use get_all_properties() to get the full property list in correct order
|
|
407
|
+
if hasattr(fn, 'get_all_properties'):
|
|
408
|
+
# Get all properties (parent + child) in correct order
|
|
409
|
+
all_props = fn.get_all_properties()
|
|
410
|
+
prop_names = list(all_props.keys())
|
|
411
|
+
else:
|
|
412
|
+
# Fallback for old-style properties
|
|
413
|
+
prop_names = list(fn.properties.keys()) if isinstance(fn.properties, dict) else [prop['name'] for prop in fn.properties]
|
|
414
|
+
|
|
415
|
+
for i, arg in enumerate(args):
|
|
416
|
+
if i < len(prop_names):
|
|
417
|
+
values[prop_names[i]] = arg
|
|
393
418
|
|
|
394
419
|
debug_log(f" Entity instance created with {len(values)} properties: {list(values.keys())}")
|
|
395
420
|
# Use create_instance to handle dependency injection
|
|
@@ -1977,6 +2002,8 @@ class FunctionEvaluatorMixin:
|
|
|
1977
2002
|
return Integer(len(arg.value))
|
|
1978
2003
|
if isinstance(arg, List):
|
|
1979
2004
|
return Integer(len(arg.elements))
|
|
2005
|
+
if isinstance(arg, Map):
|
|
2006
|
+
return Integer(len(arg.pairs))
|
|
1980
2007
|
# Handle Python list (shouldn't happen, but defensive)
|
|
1981
2008
|
if isinstance(arg, list):
|
|
1982
2009
|
return Integer(len(arg))
|
|
@@ -221,7 +221,7 @@ class StatementEvaluatorMixin:
|
|
|
221
221
|
return value
|
|
222
222
|
|
|
223
223
|
# Set as const in environment
|
|
224
|
-
env.
|
|
224
|
+
env.set(node.name.value, value)
|
|
225
225
|
return NULL
|
|
226
226
|
|
|
227
227
|
def eval_data_statement(self, node, env, stack_trace):
|
|
@@ -337,6 +337,14 @@ class StatementEvaluatorMixin:
|
|
|
337
337
|
instance.pairs[String("__immutable__")] = Boolean(is_immutable)
|
|
338
338
|
instance.pairs[String("__verified__")] = Boolean(is_verified)
|
|
339
339
|
|
|
340
|
+
# Check if single argument is a Map (from MapLiteral syntax like Block{index: 42})
|
|
341
|
+
# If so, extract field values from the map instead of treating it as positional args
|
|
342
|
+
kwargs = None
|
|
343
|
+
if len(args) == 1 and isinstance(args[0], Map):
|
|
344
|
+
# Extract keyword arguments from the Map
|
|
345
|
+
kwargs = args[0].pairs
|
|
346
|
+
debug_log("dataclass_constructor", f"Extracted {len(kwargs)} kwargs from Map")
|
|
347
|
+
|
|
340
348
|
# Process each field with validation (parent fields first, then child fields)
|
|
341
349
|
arg_index = 0
|
|
342
350
|
for field in all_fields:
|
|
@@ -348,8 +356,20 @@ class StatementEvaluatorMixin:
|
|
|
348
356
|
|
|
349
357
|
field_value = NULL
|
|
350
358
|
|
|
351
|
-
# Get value from
|
|
352
|
-
if
|
|
359
|
+
# Get value from keyword args (map syntax) or positional args
|
|
360
|
+
if kwargs is not None:
|
|
361
|
+
# Try to get value from keyword arguments (map)
|
|
362
|
+
field_value = kwargs.get(field_name, NULL)
|
|
363
|
+
if field_value == NULL:
|
|
364
|
+
# Try with String key
|
|
365
|
+
field_value = kwargs.get(String(field_name), NULL)
|
|
366
|
+
if field_value == NULL and field.default_value is not None:
|
|
367
|
+
# Use default if not provided
|
|
368
|
+
field_value = evaluator_self.eval_node(field.default_value, parent_env, stack_trace)
|
|
369
|
+
if is_error(field_value):
|
|
370
|
+
return field_value
|
|
371
|
+
elif arg_index < len(args):
|
|
372
|
+
# Positional argument
|
|
353
373
|
field_value = args[arg_index]
|
|
354
374
|
arg_index += 1
|
|
355
375
|
elif field.default_value is not None:
|
|
@@ -702,10 +722,10 @@ class StatementEvaluatorMixin:
|
|
|
702
722
|
"default": Builtin(default_static)
|
|
703
723
|
}
|
|
704
724
|
|
|
705
|
-
# Register constructor in environment
|
|
725
|
+
# Register constructor in environment
|
|
706
726
|
# For specialized generics (e.g., Box<number>), don't fail if already registered
|
|
707
727
|
try:
|
|
708
|
-
env.
|
|
728
|
+
env.set(type_name, constructor)
|
|
709
729
|
except ValueError as e:
|
|
710
730
|
# If it's a specialized generic that's already registered, just return the existing one
|
|
711
731
|
if '<' in type_name and '>' in type_name:
|
|
@@ -757,15 +777,23 @@ class StatementEvaluatorMixin:
|
|
|
757
777
|
if is_error(obj):
|
|
758
778
|
return obj
|
|
759
779
|
|
|
760
|
-
#
|
|
761
|
-
if hasattr(node.name
|
|
762
|
-
|
|
763
|
-
else:
|
|
764
|
-
# Evaluate property expression
|
|
780
|
+
# Determine property key based on whether it's computed (obj[expr]) or literal (obj.prop)
|
|
781
|
+
if hasattr(node.name, 'computed') and node.name.computed:
|
|
782
|
+
# Computed property (obj[expr]) - evaluate the expression
|
|
765
783
|
prop_result = self.eval_node(node.name.property, env, stack_trace)
|
|
766
784
|
if is_error(prop_result):
|
|
767
785
|
return prop_result
|
|
768
786
|
prop_key = prop_result.value if hasattr(prop_result, 'value') else str(prop_result)
|
|
787
|
+
else:
|
|
788
|
+
# Literal property (obj.prop) - use the identifier name directly
|
|
789
|
+
if hasattr(node.name.property, 'value'):
|
|
790
|
+
prop_key = node.name.property.value
|
|
791
|
+
else:
|
|
792
|
+
# Fallback: evaluate it
|
|
793
|
+
prop_result = self.eval_node(node.name.property, env, stack_trace)
|
|
794
|
+
if is_error(prop_result):
|
|
795
|
+
return prop_result
|
|
796
|
+
prop_key = prop_result.value if hasattr(prop_result, 'value') else str(prop_result)
|
|
769
797
|
|
|
770
798
|
# Evaluate value first
|
|
771
799
|
value = self.eval_node(node.value, env, stack_trace)
|
|
@@ -1708,11 +1736,10 @@ class StatementEvaluatorMixin:
|
|
|
1708
1736
|
|
|
1709
1737
|
# Pass the AST nodes as storage_vars, not the storage dict
|
|
1710
1738
|
contract = SmartContract(node.name.value, node.storage_vars, actions)
|
|
1711
|
-
|
|
1739
|
+
# Deploy with evaluated storage values to avoid storing AST nodes
|
|
1740
|
+
contract.deploy(evaluated_storage_values=storage)
|
|
1712
1741
|
|
|
1713
|
-
#
|
|
1714
|
-
for var_name, init_val in storage.items():
|
|
1715
|
-
contract.storage.set(var_name, init_val)
|
|
1742
|
+
# Storage values are now set during deploy(), no need to set again
|
|
1716
1743
|
|
|
1717
1744
|
# Check if contract has a constructor and execute it
|
|
1718
1745
|
if 'constructor' in actions:
|
package/src/zexus/lexer.py
CHANGED
|
@@ -473,7 +473,7 @@ class Lexer:
|
|
|
473
473
|
"break": BREAK, # NEW: Break loop keyword
|
|
474
474
|
"throw": THROW, # NEW: Throw error keyword
|
|
475
475
|
"external": EXTERNAL, # NEW: External keyword
|
|
476
|
-
"from": FROM, #
|
|
476
|
+
# "from": FROM, # NOT a keyword - only recognized contextually in import statements
|
|
477
477
|
"screen": SCREEN, # NEW: renderer keyword
|
|
478
478
|
"component": COMPONENT, # NEW: renderer keyword
|
|
479
479
|
"theme": THEME, # NEW: renderer keyword
|
|
@@ -27,6 +27,7 @@ precedences = {
|
|
|
27
27
|
SLASH: PRODUCT, STAR: PRODUCT, MOD: PRODUCT,
|
|
28
28
|
LPAREN: CALL,
|
|
29
29
|
LBRACKET: CALL,
|
|
30
|
+
LBRACE: CALL, # Entity{...} constructor syntax
|
|
30
31
|
DOT: CALL,
|
|
31
32
|
}
|
|
32
33
|
|
|
@@ -103,6 +104,7 @@ class UltimateParser:
|
|
|
103
104
|
ASSIGN: self.parse_assignment_expression,
|
|
104
105
|
LAMBDA: self.parse_lambda_infix, # support arrow-style lambdas: params => body
|
|
105
106
|
LPAREN: self.parse_call_expression,
|
|
107
|
+
LBRACE: self.parse_constructor_call_expression, # Entity{field: value} syntax
|
|
106
108
|
LBRACKET: self.parse_index_expression,
|
|
107
109
|
DOT: self.parse_method_call_expression,
|
|
108
110
|
}
|
|
@@ -1506,7 +1508,7 @@ class UltimateParser:
|
|
|
1506
1508
|
arguments = self.parse_expression_list(RPAREN)
|
|
1507
1509
|
return MethodCallExpression(object=left, method=method, arguments=arguments)
|
|
1508
1510
|
else:
|
|
1509
|
-
return PropertyAccessExpression(object=left, property=method)
|
|
1511
|
+
return PropertyAccessExpression(object=left, property=method, computed=False)
|
|
1510
1512
|
|
|
1511
1513
|
def parse_export_statement(self):
|
|
1512
1514
|
token = self.cur_token
|
|
@@ -1709,7 +1711,7 @@ class UltimateParser:
|
|
|
1709
1711
|
return None
|
|
1710
1712
|
|
|
1711
1713
|
field_name = Identifier(self.cur_token.literal)
|
|
1712
|
-
target = PropertyAccessExpression(obj_name, field_name)
|
|
1714
|
+
target = PropertyAccessExpression(obj_name, field_name, computed=False)
|
|
1713
1715
|
|
|
1714
1716
|
# Expect assignment
|
|
1715
1717
|
if not self.expect_peek(ASSIGN):
|
|
@@ -2664,6 +2666,43 @@ class UltimateParser:
|
|
|
2664
2666
|
not self.peek_token_is(RBRACKET) and
|
|
2665
2667
|
precedence <= self.peek_precedence()):
|
|
2666
2668
|
|
|
2669
|
+
# CRITICAL FIX: Stop if next token is on a new line and could start a new statement
|
|
2670
|
+
# This prevents expressions from spanning multiple logical lines
|
|
2671
|
+
if self.cur_token.line < self.peek_token.line:
|
|
2672
|
+
# Next token is on a new line - check if it could start a new statement
|
|
2673
|
+
next_could_be_statement = (
|
|
2674
|
+
self.peek_token.type == IDENT or
|
|
2675
|
+
self.peek_token.type == LET or
|
|
2676
|
+
self.peek_token.type == CONST or
|
|
2677
|
+
self.peek_token.type == RETURN or
|
|
2678
|
+
self.peek_token.type == IF or
|
|
2679
|
+
self.peek_token.type == WHILE or
|
|
2680
|
+
self.peek_token.type == FOR
|
|
2681
|
+
)
|
|
2682
|
+
if next_could_be_statement:
|
|
2683
|
+
# Additional check: is the next token followed by [ or = ?
|
|
2684
|
+
# This would indicate it's an assignment/index expression starting
|
|
2685
|
+
if self.peek_token.type == IDENT:
|
|
2686
|
+
# Save current state to peek ahead
|
|
2687
|
+
saved_cur = self.cur_token
|
|
2688
|
+
saved_peek = self.peek_token
|
|
2689
|
+
saved_pos = self.cur_pos
|
|
2690
|
+
|
|
2691
|
+
# Peek ahead one more token
|
|
2692
|
+
self.next_token() # Now peek_token is what we want to check
|
|
2693
|
+
next_next = self.peek_token
|
|
2694
|
+
|
|
2695
|
+
# Restore state
|
|
2696
|
+
self.cur_token = saved_cur
|
|
2697
|
+
self.peek_token = saved_peek
|
|
2698
|
+
self.cur_pos = saved_pos
|
|
2699
|
+
|
|
2700
|
+
# If next token after IDENT is LBRACKET or ASSIGN, it's likely a new statement
|
|
2701
|
+
if next_next.type in (LBRACKET, ASSIGN, LPAREN):
|
|
2702
|
+
break
|
|
2703
|
+
else:
|
|
2704
|
+
break
|
|
2705
|
+
|
|
2667
2706
|
if self.peek_token.type not in self.infix_parse_fns:
|
|
2668
2707
|
return left_exp
|
|
2669
2708
|
|
|
@@ -2728,6 +2767,18 @@ class UltimateParser:
|
|
|
2728
2767
|
exp.arguments = self.parse_expression_list(RPAREN)
|
|
2729
2768
|
return exp
|
|
2730
2769
|
|
|
2770
|
+
def parse_constructor_call_expression(self, function):
|
|
2771
|
+
"""Parse constructor call with map literal syntax: Entity{field: value, ...}
|
|
2772
|
+
|
|
2773
|
+
This converts Entity{a: 1, b: 2} into Entity({a: 1, b: 2})
|
|
2774
|
+
"""
|
|
2775
|
+
# Current token is LBRACE, parse it as a map literal
|
|
2776
|
+
map_literal = self.parse_map_literal()
|
|
2777
|
+
|
|
2778
|
+
# Create a call expression with the map as the single argument
|
|
2779
|
+
exp = CallExpression(function=function, arguments=[map_literal])
|
|
2780
|
+
return exp
|
|
2781
|
+
|
|
2731
2782
|
def parse_prefix_expression(self):
|
|
2732
2783
|
expression = PrefixExpression(operator=self.cur_token.literal, right=None)
|
|
2733
2784
|
self.next_token()
|
|
@@ -2800,7 +2851,7 @@ class UltimateParser:
|
|
|
2800
2851
|
# Expect closing bracket
|
|
2801
2852
|
if not self.expect_peek(RBRACKET):
|
|
2802
2853
|
return None
|
|
2803
|
-
return PropertyAccessExpression(object=left, property=index_expr)
|
|
2854
|
+
return PropertyAccessExpression(object=left, property=index_expr, computed=True)
|
|
2804
2855
|
|
|
2805
2856
|
def _lookahead_token_after_matching_paren(self):
|
|
2806
2857
|
"""Character-level lookahead: detect if the matching ')' is followed by '=>' (arrow).
|
|
@@ -1533,9 +1533,40 @@ class ContextStackParser:
|
|
|
1533
1533
|
default_val = BooleanLiteral(True)
|
|
1534
1534
|
elif val_token.type == FALSE:
|
|
1535
1535
|
default_val = BooleanLiteral(False)
|
|
1536
|
+
elif val_token.type == LBRACE:
|
|
1537
|
+
# Map literal: {}
|
|
1538
|
+
# Find matching RBRACE
|
|
1539
|
+
map_start = current_idx
|
|
1540
|
+
depth = 1
|
|
1541
|
+
current_idx += 1
|
|
1542
|
+
while current_idx < brace_end and depth > 0:
|
|
1543
|
+
if tokens[current_idx].type == LBRACE:
|
|
1544
|
+
depth += 1
|
|
1545
|
+
elif tokens[current_idx].type == RBRACE:
|
|
1546
|
+
depth -= 1
|
|
1547
|
+
current_idx += 1
|
|
1548
|
+
# Parse the map literal
|
|
1549
|
+
map_tokens = tokens[map_start:current_idx]
|
|
1550
|
+
default_val = self._parse_map_literal(map_tokens)
|
|
1551
|
+
elif val_token.type == LBRACKET:
|
|
1552
|
+
# List literal: []
|
|
1553
|
+
# Find matching RBRACKET
|
|
1554
|
+
list_start = current_idx
|
|
1555
|
+
depth = 1
|
|
1556
|
+
current_idx += 1
|
|
1557
|
+
while current_idx < brace_end and depth > 0:
|
|
1558
|
+
if tokens[current_idx].type == LBRACKET:
|
|
1559
|
+
depth += 1
|
|
1560
|
+
elif tokens[current_idx].type == RBRACKET:
|
|
1561
|
+
depth -= 1
|
|
1562
|
+
current_idx += 1
|
|
1563
|
+
# Parse the list literal
|
|
1564
|
+
list_tokens = tokens[list_start:current_idx]
|
|
1565
|
+
default_val = self._parse_list_literal(list_tokens)
|
|
1536
1566
|
elif val_token.type == IDENT:
|
|
1537
1567
|
default_val = Identifier(val_token.literal)
|
|
1538
|
-
|
|
1568
|
+
current_idx += 1
|
|
1569
|
+
# Note: current_idx already advanced for LBRACE and LBRACKET cases
|
|
1539
1570
|
|
|
1540
1571
|
# Use AstNodeShim for compatibility with evaluator
|
|
1541
1572
|
storage_vars.append(AstNodeShim(
|
|
@@ -3361,6 +3392,11 @@ class ContextStackParser:
|
|
|
3361
3392
|
# E.g., after RPAREN (end of function call) or after a complete value
|
|
3362
3393
|
prev_token = run_tokens[-1] if run_tokens else None
|
|
3363
3394
|
if prev_token and prev_token.type not in {DOT, LPAREN, LBRACKET, LBRACE, ASSIGN}:
|
|
3395
|
+
# CRITICAL: Also check for newline - new line + IDENT often indicates new statement
|
|
3396
|
+
last_line = prev_token.line if hasattr(prev_token, 'line') else 0
|
|
3397
|
+
current_line = t.line if hasattr(t, 'line') else 0
|
|
3398
|
+
is_new_line = current_line > last_line
|
|
3399
|
+
|
|
3364
3400
|
# Check if this starts a new statement (assignment or function call)
|
|
3365
3401
|
k = j + 1
|
|
3366
3402
|
is_new_statement_start = False
|
|
@@ -3373,6 +3409,22 @@ class ContextStackParser:
|
|
|
3373
3409
|
# Assignment: ident = or ident.prop =
|
|
3374
3410
|
elif next_tok.type == ASSIGN:
|
|
3375
3411
|
is_new_statement_start = True
|
|
3412
|
+
# CRITICAL FIX: Indexed assignment: ident[...] =
|
|
3413
|
+
elif next_tok.type == LBRACKET:
|
|
3414
|
+
# Scan for matching RBRACKET followed by ASSIGN
|
|
3415
|
+
bracket_depth = 1
|
|
3416
|
+
scan_idx = k + 1
|
|
3417
|
+
while scan_idx < len(tokens) and scan_idx < k + 20:
|
|
3418
|
+
if tokens[scan_idx].type == LBRACKET:
|
|
3419
|
+
bracket_depth += 1
|
|
3420
|
+
elif tokens[scan_idx].type == RBRACKET:
|
|
3421
|
+
bracket_depth -= 1
|
|
3422
|
+
if bracket_depth == 0:
|
|
3423
|
+
# Found matching closing bracket, check for ASSIGN
|
|
3424
|
+
if scan_idx + 1 < len(tokens) and tokens[scan_idx + 1].type == ASSIGN:
|
|
3425
|
+
is_new_statement_start = True
|
|
3426
|
+
break
|
|
3427
|
+
scan_idx += 1
|
|
3376
3428
|
elif next_tok.type == DOT:
|
|
3377
3429
|
# Property assignment: scan for ASSIGN
|
|
3378
3430
|
while k < len(tokens) and k < j + 10:
|
|
@@ -3388,7 +3440,9 @@ class ContextStackParser:
|
|
|
3388
3440
|
else:
|
|
3389
3441
|
break
|
|
3390
3442
|
|
|
3391
|
-
if
|
|
3443
|
+
# Break if this is a new statement AND on a new line
|
|
3444
|
+
# (or if we're sure it's a new statement regardless of line)
|
|
3445
|
+
if is_new_statement_start and (is_new_line or prev_token.type == RPAREN):
|
|
3392
3446
|
break
|
|
3393
3447
|
|
|
3394
3448
|
# update nesting for parentheses/brackets/braces
|
|
@@ -3932,6 +3986,24 @@ class ContextStackParser:
|
|
|
3932
3986
|
if i < n and tokens[i].type == RPAREN:
|
|
3933
3987
|
i += 1 # Skip RPAREN
|
|
3934
3988
|
return CallExpression(Identifier(name), args, type_args=type_args)
|
|
3989
|
+
|
|
3990
|
+
# Check for constructor call with map literal: Entity{field: value, ...}
|
|
3991
|
+
elif i < n and tokens[i].type == LBRACE:
|
|
3992
|
+
# Parse the map literal as the single argument
|
|
3993
|
+
start = i
|
|
3994
|
+
depth = 1
|
|
3995
|
+
i += 1 # Skip LBRACE
|
|
3996
|
+
# Find matching RBRACE
|
|
3997
|
+
while i < n and depth > 0:
|
|
3998
|
+
if tokens[i].type == LBRACE:
|
|
3999
|
+
depth += 1
|
|
4000
|
+
elif tokens[i].type == RBRACE:
|
|
4001
|
+
depth -= 1
|
|
4002
|
+
i += 1
|
|
4003
|
+
# Parse the map literal tokens (including braces)
|
|
4004
|
+
map_literal = self._parse_map_literal(tokens[start:i])
|
|
4005
|
+
return CallExpression(Identifier(name), [map_literal], type_args=type_args)
|
|
4006
|
+
|
|
3935
4007
|
else:
|
|
3936
4008
|
return Identifier(name)
|
|
3937
4009
|
|
|
@@ -3997,7 +4069,8 @@ class ContextStackParser:
|
|
|
3997
4069
|
# Property access: expr.name
|
|
3998
4070
|
current_expr = PropertyAccessExpression(
|
|
3999
4071
|
object=current_expr,
|
|
4000
|
-
property=Identifier(name_token.literal)
|
|
4072
|
+
property=Identifier(name_token.literal),
|
|
4073
|
+
computed=False
|
|
4001
4074
|
)
|
|
4002
4075
|
continue
|
|
4003
4076
|
|
|
@@ -4052,7 +4125,8 @@ class ContextStackParser:
|
|
|
4052
4125
|
prop_expr = self._parse_expression(inner_tokens) if inner_tokens else Identifier('')
|
|
4053
4126
|
current_expr = PropertyAccessExpression(
|
|
4054
4127
|
object=current_expr,
|
|
4055
|
-
property=prop_expr
|
|
4128
|
+
property=prop_expr,
|
|
4129
|
+
computed=True
|
|
4056
4130
|
)
|
|
4057
4131
|
continue
|
|
4058
4132
|
|
|
@@ -728,9 +728,11 @@ class StructuralAnalyzer:
|
|
|
728
728
|
if tj.line > last_line:
|
|
729
729
|
# Check if we have balanced parens in run_tokens (statement is syntactically complete)
|
|
730
730
|
paren_count = sum(1 if tok.type == LPAREN else -1 if tok.type == RPAREN else 0 for tok in run_tokens)
|
|
731
|
-
if
|
|
731
|
+
bracket_count = sum(1 if tok.type == LBRACKET else -1 if tok.type == RBRACKET else 0 for tok in run_tokens)
|
|
732
|
+
if paren_count == 0 and bracket_count == 0:
|
|
732
733
|
# Check if run_tokens contains an assignment (this is a complete assignment statement)
|
|
733
734
|
has_assign = any(tok.type == ASSIGN for tok in run_tokens)
|
|
735
|
+
print(f" has_assign={has_assign}, tj.type={tj.type}")
|
|
734
736
|
if has_assign:
|
|
735
737
|
# Current token is on a new line and could start a new statement
|
|
736
738
|
# Check if it's IDENT (could be method call, function call, or property access)
|
|
@@ -738,8 +740,10 @@ class StructuralAnalyzer:
|
|
|
738
740
|
# CRITICAL FIX: Don't break if the previous token was ASSIGN
|
|
739
741
|
# This means the IDENT is the RHS value, not a new statement
|
|
740
742
|
prev_tok = run_tokens[-1] if run_tokens else None
|
|
743
|
+
print(f" prev_tok={prev_tok.literal if prev_tok else None}, type={prev_tok.type if prev_tok else None}")
|
|
741
744
|
if prev_tok and prev_tok.type == ASSIGN:
|
|
742
745
|
# This IDENT is the RHS of the assignment, not a new statement
|
|
746
|
+
print(f" -> Continuing (RHS of assignment)")
|
|
743
747
|
pass # Don't break, continue collecting
|
|
744
748
|
else:
|
|
745
749
|
# This is likely a new statement on a new line
|
|
@@ -760,6 +764,26 @@ class StructuralAnalyzer:
|
|
|
760
764
|
# Look ahead: IDENT DOT IDENT ASSIGN is a property assignment
|
|
761
765
|
if j + 3 < n and tokens[j + 2].type == IDENT and tokens[j + 3].type == ASSIGN:
|
|
762
766
|
is_assignment_start = True
|
|
767
|
+
# Pattern 3: IDENT followed by LBRACKET could be indexed assignment (arr[i] = ...)
|
|
768
|
+
elif tj.type == IDENT and j + 1 < n and tokens[j + 1].type == LBRACKET:
|
|
769
|
+
# Look ahead to find matching RBRACKET and then ASSIGN
|
|
770
|
+
# This pattern is: IDENT [ ... ] ASSIGN
|
|
771
|
+
bracket_depth = 0
|
|
772
|
+
k = j + 1
|
|
773
|
+
found_assign_after_bracket = False
|
|
774
|
+
while k < n:
|
|
775
|
+
if tokens[k].type == LBRACKET:
|
|
776
|
+
bracket_depth += 1
|
|
777
|
+
elif tokens[k].type == RBRACKET:
|
|
778
|
+
bracket_depth -= 1
|
|
779
|
+
if bracket_depth == 0:
|
|
780
|
+
# Found matching closing bracket, check if next is ASSIGN
|
|
781
|
+
if k + 1 < n and tokens[k + 1].type == ASSIGN:
|
|
782
|
+
found_assign_after_bracket = True
|
|
783
|
+
break
|
|
784
|
+
k += 1
|
|
785
|
+
if found_assign_after_bracket:
|
|
786
|
+
is_assignment_start = True
|
|
763
787
|
|
|
764
788
|
is_new_statement = (
|
|
765
789
|
tj.type in stop_types or
|
|
@@ -993,11 +1017,12 @@ class StructuralAnalyzer:
|
|
|
993
1017
|
continue
|
|
994
1018
|
|
|
995
1019
|
# NEW: Check for line-based statement boundaries
|
|
996
|
-
# If we have balanced parens and the next token is on a new line and could start a new statement, create boundary
|
|
1020
|
+
# If we have balanced parens/brackets and the next token is on a new line and could start a new statement, create boundary
|
|
997
1021
|
if cur:
|
|
998
|
-
# Check if parens are balanced
|
|
1022
|
+
# Check if parens and brackets are balanced
|
|
999
1023
|
paren_count = sum(1 if tok.type == LPAREN else -1 if tok.type == RPAREN else 0 for tok in cur)
|
|
1000
|
-
if
|
|
1024
|
+
bracket_count = sum(1 if tok.type == LBRACKET else -1 if tok.type == RBRACKET else 0 for tok in cur)
|
|
1025
|
+
if paren_count == 0 and bracket_count == 0:
|
|
1001
1026
|
# Check if there's an ASSIGN in cur (this is a complete assignment statement)
|
|
1002
1027
|
has_assign = any(tok.type == ASSIGN for tok in cur)
|
|
1003
1028
|
if has_assign:
|
|
@@ -1005,7 +1030,7 @@ class StructuralAnalyzer:
|
|
|
1005
1030
|
last_line = cur[-1].line if cur else 0
|
|
1006
1031
|
if t.line > last_line:
|
|
1007
1032
|
# Check if current token could start a new statement
|
|
1008
|
-
# IDENT
|
|
1033
|
+
# IDENT could be a new statement (including indexed assignments like map[key] = val)
|
|
1009
1034
|
if t.type == IDENT:
|
|
1010
1035
|
# This is likely a new statement on a new line
|
|
1011
1036
|
results.append(cur)
|
package/src/zexus/security.py
CHANGED
|
@@ -734,8 +734,10 @@ class StorageBackend:
|
|
|
734
734
|
class InMemoryBackend(StorageBackend):
|
|
735
735
|
def __init__(self):
|
|
736
736
|
self.data = {}
|
|
737
|
-
def set(self, key, value):
|
|
738
|
-
|
|
737
|
+
def set(self, key, value):
|
|
738
|
+
self.data[key] = value
|
|
739
|
+
def get(self, key):
|
|
740
|
+
return self.data.get(key)
|
|
739
741
|
def delete(self, key):
|
|
740
742
|
if key in self.data: del self.data[key]
|
|
741
743
|
|
|
@@ -954,8 +956,24 @@ class SmartContract:
|
|
|
954
956
|
|
|
955
957
|
print(f" 🔗 Contract Address: {new_address}")
|
|
956
958
|
|
|
957
|
-
#
|
|
958
|
-
|
|
959
|
+
# Copy initial storage values from the template contract
|
|
960
|
+
# This ensures instances get the evaluated initial values
|
|
961
|
+
initial_storage = {}
|
|
962
|
+
for var_node in self.storage_vars:
|
|
963
|
+
var_name = None
|
|
964
|
+
if hasattr(var_node, 'name'):
|
|
965
|
+
var_name = var_node.name.value if hasattr(var_node.name, 'value') else var_node.name
|
|
966
|
+
elif isinstance(var_node, dict):
|
|
967
|
+
var_name = var_node.get("name")
|
|
968
|
+
|
|
969
|
+
if var_name:
|
|
970
|
+
# Get the initial value from the template contract's storage
|
|
971
|
+
value = self.storage.get(var_name)
|
|
972
|
+
if value is not None:
|
|
973
|
+
initial_storage[var_name] = value
|
|
974
|
+
|
|
975
|
+
# Deploy the instance with the copied initial values
|
|
976
|
+
instance.deploy(evaluated_storage_values=initial_storage)
|
|
959
977
|
instance.parent_contract = self
|
|
960
978
|
|
|
961
979
|
print(f" Available actions: {list(self.actions.keys())}")
|
|
@@ -964,32 +982,32 @@ class SmartContract:
|
|
|
964
982
|
def __call__(self, *args):
|
|
965
983
|
return self.instantiate(args)
|
|
966
984
|
|
|
967
|
-
def deploy(self):
|
|
968
|
-
"""Deploy the contract and initialize persistent storage
|
|
985
|
+
def deploy(self, evaluated_storage_values=None):
|
|
986
|
+
"""Deploy the contract and initialize persistent storage
|
|
987
|
+
|
|
988
|
+
Args:
|
|
989
|
+
evaluated_storage_values: Optional dict of evaluated initial values
|
|
990
|
+
"""
|
|
969
991
|
# Checks if we should reset storage or strictly load existing
|
|
970
992
|
# For simplicity in this VM, subsequent runs act like "loading" if DB exists
|
|
971
993
|
self.is_deployed = True
|
|
972
994
|
|
|
973
|
-
#
|
|
974
|
-
|
|
975
|
-
var_name
|
|
976
|
-
default_value = None
|
|
977
|
-
|
|
978
|
-
if hasattr(var_node, 'initial_value'):
|
|
979
|
-
var_name = var_node.name.value if hasattr(var_node.name, 'value') else var_node.name
|
|
980
|
-
default_value = var_node.initial_value
|
|
981
|
-
elif isinstance(var_node, dict) and "initial_value" in var_node:
|
|
982
|
-
var_name = var_node.get("name")
|
|
983
|
-
default_value = var_node["initial_value"]
|
|
984
|
-
|
|
985
|
-
if var_name:
|
|
986
|
-
# ONLY set if not already in DB (Persistence Logic)
|
|
995
|
+
# If evaluated values are provided, use them (from evaluator)
|
|
996
|
+
if evaluated_storage_values:
|
|
997
|
+
for var_name, value in evaluated_storage_values.items():
|
|
987
998
|
if self.storage.get(var_name) is None:
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
999
|
+
self.storage.set(var_name, value)
|
|
1000
|
+
else:
|
|
1001
|
+
# Fallback: Initialize storage with NULL for declared variables
|
|
1002
|
+
for var_node in self.storage_vars:
|
|
1003
|
+
var_name = None
|
|
1004
|
+
if hasattr(var_node, 'name'):
|
|
1005
|
+
var_name = var_node.name.value if hasattr(var_node.name, 'value') else var_node.name
|
|
1006
|
+
elif isinstance(var_node, dict):
|
|
1007
|
+
var_name = var_node.get("name")
|
|
1008
|
+
|
|
1009
|
+
if var_name and self.storage.get(var_name) is None:
|
|
1010
|
+
self.storage.set(var_name, Null)
|
|
993
1011
|
|
|
994
1012
|
def call_method(self, action_name, args):
|
|
995
1013
|
"""Call a contract action - similar to EntityInstance.call_method"""
|
package/src/zexus/zexus_ast.py
CHANGED
|
@@ -592,12 +592,13 @@ class LiteralPattern:
|
|
|
592
592
|
return f"LiteralPattern({self.value})"
|
|
593
593
|
|
|
594
594
|
class PropertyAccessExpression(Expression):
|
|
595
|
-
def __init__(self, object, property):
|
|
595
|
+
def __init__(self, object, property, computed=False):
|
|
596
596
|
self.object = object
|
|
597
597
|
self.property = property
|
|
598
|
+
self.computed = computed # True for obj[expr], False for obj.prop
|
|
598
599
|
|
|
599
600
|
def __repr__(self):
|
|
600
|
-
return f"PropertyAccessExpression(object={self.object}, property={self.property})"
|
|
601
|
+
return f"PropertyAccessExpression(object={self.object}, property={self.property}, computed={self.computed})"
|
|
601
602
|
|
|
602
603
|
class AssignmentExpression(Expression):
|
|
603
604
|
def __init__(self, name, value):
|
|
@@ -23,7 +23,7 @@ class PackageManager:
|
|
|
23
23
|
self.installer = PackageInstaller(self.zpm_dir)
|
|
24
24
|
self.publisher = PackagePublisher(self.registry)
|
|
25
25
|
|
|
26
|
-
def init(self, name: str = None, version: str = "1.6.
|
|
26
|
+
def init(self, name: str = None, version: str = "1.6.5") -> Dict:
|
|
27
27
|
"""Initialize a new Zexus project with package.json"""
|
|
28
28
|
if self.config_file.exists():
|
|
29
29
|
print(f"⚠️ {self.config_file} already exists")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: zexus
|
|
3
|
-
Version: 1.6.
|
|
3
|
+
Version: 1.6.5
|
|
4
4
|
Summary: A modern, security-first programming language with blockchain support
|
|
5
5
|
Home-page: https://github.com/Zaidux/zexus-interpreter
|
|
6
6
|
Author: Zaidux
|
|
@@ -50,7 +50,7 @@ Dynamic: requires-python
|
|
|
50
50
|
|
|
51
51
|
<div align="center">
|
|
52
52
|
|
|
53
|
-

|
|
54
54
|
[](LICENSE)
|
|
55
55
|
[](https://python.org)
|
|
56
56
|
[](https://github.com/Zaidux/zexus-interpreter)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
.gitattributes
|
|
2
2
|
.gitignore
|
|
3
3
|
CHANGELOG.md
|
|
4
|
+
FIX_SUMMARY.md
|
|
4
5
|
LICENSE
|
|
5
6
|
PUBLISH_TO_NPM.md
|
|
6
7
|
README.md
|
|
@@ -29,9 +30,7 @@ setup.cfg
|
|
|
29
30
|
setup.py
|
|
30
31
|
setup_stdlib.sh
|
|
31
32
|
shared_config.json
|
|
32
|
-
test_const_time_debug.zx
|
|
33
33
|
test_data.json
|
|
34
|
-
test_sqlite_python.py
|
|
35
34
|
ultimate_test.zx
|
|
36
35
|
zexus.json
|
|
37
36
|
zpics
|
|
@@ -348,6 +347,8 @@ examples/test_postgres.zx
|
|
|
348
347
|
examples/test_sqlite.zx
|
|
349
348
|
examples/token_contract.zx
|
|
350
349
|
examples/ziver_chain_test.zx
|
|
350
|
+
issues/ISSUE2.md
|
|
351
|
+
issues/ISSUSE1.md
|
|
351
352
|
linguist-submission/SUBMISSION_INSTRUCTIONS.md
|
|
352
353
|
linguist-submission/grammars.yml
|
|
353
354
|
linguist-submission/languages.yml
|