shell-lite 0.5__py3-none-any.whl → 0.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tests/compare_parsers.py DELETED
@@ -1,31 +0,0 @@
1
- import sys
2
- import os
3
- import time
4
- sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
5
- from shell_lite.lexer import Lexer
6
- from shell_lite.parser import Parser
7
- from shell_lite.parser_gbp import GeometricBindingParser
8
- def benchmark(filename):
9
- with open(filename, 'r') as f:
10
- source = f.read()
11
- long_source = source * 500
12
- print(f"Benchmarking on {len(long_source)} chars of code...")
13
- lexer = Lexer(long_source)
14
- tokens = lexer.tokenize()
15
- tokens_copy = list(tokens)
16
- start = time.perf_counter()
17
- p_old = Parser(list(tokens)) # fresh copy? Parser consumes? yes
18
- ast_old = p_old.parse()
19
- end = time.perf_counter()
20
- t_old = end - start
21
- print(f"Recursive Descent: {t_old:.4f}s")
22
- start = time.perf_counter()
23
- p_new = GeometricBindingParser(list(tokens))
24
- ast_new = p_new.parse()
25
- end = time.perf_counter()
26
- t_new = end - start
27
- print(f"Geometric-Binding: {t_new:.4f}s")
28
- diff = t_old / t_new if t_new > 0 else 0
29
- print(f"Speedup: {diff:.2f}x")
30
- if __name__ == "__main__":
31
- benchmark("tests/benchmark.shl")
tests/debug_jit.py DELETED
@@ -1,49 +0,0 @@
1
- import llvmlite.binding as llvm
2
- import sys
3
- def debug():
4
- try:
5
- llvm.initialize()
6
- llvm.initialize_native_target()
7
- llvm.initialize_native_asmprinter()
8
- print("LLVM Initialized.")
9
- except Exception as e:
10
- print(f"Init Failed: {e}")
11
- return
12
- print("Available in llvm.binding:")
13
- ops = [x for x in dir(llvm) if 'create' in x or 'jit' in x.lower() or 'engine' in x.lower()]
14
- print(ops)
15
- try:
16
- mod = llvm.parse_assembly('define i32 @answer() { ret i32 42 }')
17
- mod.verify()
18
- print("Module parsed.")
19
- target = llvm.Target.from_default_triple()
20
- target_machine = target.create_target_machine()
21
- engine = None
22
- if hasattr(llvm, 'create_mcjit_compiler'):
23
- print("Attempting MCJIT...")
24
- try:
25
- engine = llvm.create_mcjit_compiler(mod, target_machine)
26
- print("MCJIT Created!")
27
- except Exception as e:
28
- print(f"MCJIT Failed: {e}")
29
- if not engine and hasattr(llvm, 'create_execution_engine'):
30
- print("Attempting create_execution_engine...")
31
- try:
32
- engine = llvm.create_execution_engine()
33
- engine.add_module(mod)
34
- print("ExecEngine Created!")
35
- except Exception as e:
36
- print(f"ExecEngine Failed: {e}")
37
- if engine:
38
- engine.finalize_object()
39
- print("Engine Finalized.")
40
- addr = engine.get_function_address("answer")
41
- print(f"Function Address: {addr}")
42
- import ctypes
43
- cfunc = ctypes.CFUNCTYPE(ctypes.c_int)(addr)
44
- res = cfunc()
45
- print(f"Result: {res}")
46
- except Exception as e:
47
- print(f"Module/Engine Error: {e}")
48
- if __name__ == "__main__":
49
- debug()
@@ -1,84 +0,0 @@
1
- import sys
2
- import os
3
- import time
4
- import matplotlib.pyplot as plt
5
- import subprocess
6
- import llvmlite.binding as llvm
7
- sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
8
- try:
9
- from tests.run_jit import compile_and_run_jit
10
- except ImportError:
11
- sys.path.append(os.path.join(os.path.dirname(__file__)))
12
- from run_jit import compile_and_run_jit
13
- def run_benchmark():
14
- counts = [1000000, 10000000, 50000000, 100000000, 200000000]
15
- t_interp = []
16
- t_python = []
17
- t_llvm = []
18
- print("Running Massive Runtime Benchmark...")
19
- for n in counts:
20
- print(f"\n--- n = {n} ---")
21
- if n <= 100000: # Reduced limit for massive scale run
22
- shl_code = f"i = 0\nwhile i < {n}:\n i = i + 1\n"
23
- shl_file = f"tests/temp_{n}.shl"
24
- with open(shl_file, "w") as f: f.write(shl_code)
25
- env = os.environ.copy()
26
- env["USE_GBP"] = "1"
27
- start = time.perf_counter()
28
- try:
29
- subprocess.run(["python", "shell_lite/main.py", "run", shl_file], env=env, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE)
30
- dur = time.perf_counter() - start
31
- except subprocess.CalledProcessError as e:
32
- print(f"Interp failed: {e.stderr.decode()}")
33
- dur = None
34
- except Exception as e:
35
- print(f"Interp failed: {e}")
36
- dur = None
37
- t_interp.append(dur)
38
- if os.path.exists(shl_file): os.remove(shl_file)
39
- if dur is not None:
40
- print(f"Interpreter: {dur:.4f}s")
41
- else:
42
- print("Interpreter: Failed")
43
- else:
44
- t_interp.append(None)
45
- print("Interpreter: Skipped (Too Slow)")
46
- start = time.perf_counter()
47
- i = 0
48
- while i < n:
49
- i += 1
50
- dur = time.perf_counter() - start
51
- t_python.append(dur)
52
- print(f"Python: {dur:.6f}s")
53
- jit_code = f"""
54
- i = 0
55
- count = {n}
56
- while i < count:
57
- i = i + 1
58
- """
59
- try:
60
- _, dur, _ = compile_and_run_jit(jit_code)
61
- if dur < 1e-7: dur = 1e-7
62
- t_llvm.append(dur)
63
- print(f"LLVM JIT: {dur:.8f}s")
64
- except Exception as e:
65
- print(f"JIT Failed: {e}")
66
- t_llvm.append(None)
67
- plt.figure(figsize=(10, 6))
68
- x_interp = [x for x, y in zip(counts, t_interp) if y is not None]
69
- y_interp = [y for y in t_interp if y is not None]
70
- if x_interp:
71
- plt.plot(x_interp, y_interp, label='ShellLite Interpreter', marker='o', color='orange')
72
- plt.plot(counts, t_python, label='Python Native', marker='s', color='green')
73
- plt.plot(counts, t_llvm, label='LLVM JIT', marker='^', color='purple', linestyle='-')
74
- plt.xlabel('Iterations')
75
- plt.ylabel('Time (seconds)')
76
- plt.title('GBP+LLVM Runtime Performance (Massive Scale)')
77
- plt.yscale('log')
78
- plt.legend()
79
- plt.grid(True, which="both", ls="-", alpha=0.2)
80
- output_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'benchmark_final.png'))
81
- plt.savefig(output_path)
82
- print(f"Graph saved to {output_path}")
83
- if __name__ == "__main__":
84
- run_benchmark()
@@ -1,68 +0,0 @@
1
- import sys
2
- import os
3
- import time
4
- import matplotlib.pyplot as plt
5
- sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
6
- from shell_lite.lexer import Lexer
7
- from shell_lite.parser import Parser
8
- from shell_lite.parser_gbp import GeometricBindingParser
9
- HAS_LLVM = False
10
- try:
11
- from shell_lite.llvm_backend.codegen import LLVMCompiler
12
- HAS_LLVM = True
13
- except ImportError:
14
- print("LLVM backend (llvmlite) not found. Skipping LLVM benchmark.")
15
- def generate_large_file(lines=1000):
16
- code = "x = 0\n"
17
- for i in range(lines):
18
- code += f"if x < {i}:\n"
19
- code += f" x = x + 1\n"
20
- return code
21
- def run_benchmark():
22
- sizes = [1000, 5000, 10000, 20000, 50000]
23
- times_old = []
24
- times_gbp = []
25
- times_py = []
26
- times_llvm = []
27
- for size in sizes:
28
- print(f"Benchmarking size: {size} lines...")
29
- source_shl = generate_large_file(size)
30
- source_py = source_shl.replace("x = x + 1", "x += 1")
31
- start = time.perf_counter()
32
- l = Lexer(source_shl)
33
- toks = l.tokenize()
34
- Parser(toks).parse()
35
- times_old.append(time.perf_counter() - start)
36
- start = time.perf_counter()
37
- l = Lexer(source_shl)
38
- toks = l.tokenize()
39
- GeometricBindingParser(toks).parse()
40
- times_gbp.append(time.perf_counter() - start)
41
- start = time.perf_counter()
42
- compile(source_py, '<string>', 'exec')
43
- times_py.append(time.perf_counter() - start)
44
- start = time.perf_counter()
45
- if HAS_LLVM:
46
- l = Lexer(source_shl)
47
- toks = l.tokenize()
48
- ast = GeometricBindingParser(toks).parse()
49
- LLVMCompiler().compile(ast)
50
- times_llvm.append(time.perf_counter() - start)
51
- else:
52
- times_llvm.append(0)
53
- plt.figure(figsize=(10, 6))
54
- plt.plot(sizes, times_old, label='Old Parser', marker='o', color='red')
55
- plt.plot(sizes, times_gbp, label='GBP Parser', marker='s', color='blue')
56
- plt.plot(sizes, times_py, label='Python Native', marker='^', color='green')
57
- if HAS_LLVM and any(times_llvm):
58
- plt.plot(sizes, times_llvm, label='LLVM Compile (via GBP)', marker='x', color='purple', linestyle='--')
59
- plt.xlabel('Lines of Code')
60
- plt.ylabel('Time (seconds)')
61
- plt.title('Parsing/Compilation Speed vs Code Size')
62
- plt.legend()
63
- plt.grid(True)
64
- output_path = os.path.join(os.path.dirname(__file__), 'benchmark_scaling.png')
65
- plt.savefig(output_path)
66
- print(f"Graph saved to {output_path}")
67
- if __name__ == "__main__":
68
- run_benchmark()
@@ -1,58 +0,0 @@
1
- import sys
2
- import os
3
- import time
4
- import matplotlib.pyplot as plt
5
- import subprocess
6
- sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
7
- def run_benchmark():
8
- iterations = 1000000 # 1 Million
9
- print(f"Benchmarking Runtime (Count: {iterations})...")
10
- shl_code = f"""
11
- i = 0
12
- while i < {iterations}:
13
- i = i + 1
14
- """
15
- shl_file = "tests/temp_loop.shl"
16
- with open(shl_file, "w") as f:
17
- f.write(shl_code)
18
- env = os.environ.copy()
19
- env["USE_GBP"] = "1"
20
- start = time.perf_counter()
21
- try:
22
- subprocess.run(["python", "shell_lite/main.py", "run", shl_file], env=env, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE)
23
- except subprocess.CalledProcessError as e:
24
- print(f"Interpreter Crash:\n{e.stderr.decode()}")
25
- t_interp = 10.0 # Dummy
26
- else:
27
- t_interp = time.perf_counter() - start
28
- print(f"Interpreter: {t_interp:.4f}s")
29
- start = time.perf_counter()
30
- i = 0
31
- while i < iterations:
32
- i = i + 1
33
- t_python = time.perf_counter() - start
34
- print(f"Python: {t_python:.4f}s")
35
- t_llvm_est = t_python / 20.0
36
- print(f"LLVM (Est): {t_llvm_est:.4f}s")
37
- labels = ['ShellLite Interpreter', 'Python Native', 'LLVM Native (Projected)']
38
- times = [t_interp, t_python, t_llvm_est]
39
- colors = ['orange', 'green', 'purple']
40
- plt.figure(figsize=(10, 6))
41
- bars = plt.bar(labels, times, color=colors)
42
- plt.ylabel('Execution Time (seconds)')
43
- plt.title(f'Runtime Speed Comparison ({iterations} Iterations)')
44
- plt.yscale('log') # Log scale because difference is massive
45
- for bar in bars:
46
- height = bar.get_height()
47
- plt.text(bar.get_x() + bar.get_width()/2., height,
48
- f'{height:.4f}s',
49
- ha='center', va='bottom')
50
- plt.figtext(0.5, 0.01,
51
- "Note: Logarithmic Scale used due to massive speed difference.",
52
- ha="center", fontsize=10)
53
- output_path = os.path.join(os.path.dirname(__file__), 'benchmark_runtime.png')
54
- plt.savefig(output_path)
55
- print(f"Graph saved to {output_path}")
56
- if os.path.exists(shl_file): os.remove(shl_file)
57
- if __name__ == "__main__":
58
- run_benchmark()
tests/run_jit.py DELETED
@@ -1,70 +0,0 @@
1
- import sys
2
- import os
3
- import time
4
- import ctypes
5
- import llvmlite.binding as llvm
6
- try:
7
- from llvmlite.binding.orcjit import create_lljit_compiler, JITLibraryBuilder
8
- except ImportError:
9
- import llvmlite.binding.orcjit as orc
10
- create_lljit_compiler = orc.create_lljit_compiler
11
- JITLibraryBuilder = orc.JITLibraryBuilder
12
- sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
13
- from shell_lite.lexer import Lexer
14
- from shell_lite.parser_gbp import GeometricBindingParser
15
- from shell_lite.llvm_backend.codegen import LLVMCompiler
16
- def compile_and_run_jit(source_code):
17
- llvm.initialize_native_target()
18
- llvm.initialize_native_asmprinter()
19
- start_compile = time.perf_counter()
20
- lexer = Lexer(source_code)
21
- tokens = lexer.tokenize()
22
- ast = GeometricBindingParser(tokens).parse()
23
- compiler = LLVMCompiler()
24
- ir_module = compiler.compile(ast)
25
- llvm_ir = str(ir_module)
26
- start_jit = time.perf_counter()
27
- lljit = create_lljit_compiler()
28
- builder = JITLibraryBuilder()
29
- builder.add_ir(llvm_ir)
30
- tracker = builder.link(lljit, "shell_lite_lib")
31
- try:
32
- addr = tracker["main"]
33
- except KeyError:
34
- try:
35
- addr = tracker["_main"]
36
- except KeyError:
37
- print("Error: Could not find 'main' symbol.")
38
- return 0, 0, -1
39
- jit_time = time.perf_counter() - start_jit
40
- cfunc = ctypes.CFUNCTYPE(ctypes.c_int)(addr)
41
- start_run = time.perf_counter()
42
- res = cfunc()
43
- run_time = time.perf_counter() - start_run
44
- return start_jit - start_compile, run_time, res
45
- if __name__ == "__main__":
46
- code = """
47
- sum = 0
48
- i = 0
49
- count = 10000000
50
- while i < count:
51
- sum = sum + 1
52
- i = i + 1
53
- print sum
54
- """
55
- print("Running JIT Speed Test (10M iterations)...")
56
- try:
57
- c_time, r_time, res = compile_and_run_jit(code)
58
- print(f"Result: {res}")
59
- print(f"JIT Exec Time: {r_time:.6f}s")
60
- start_py = time.perf_counter()
61
- s, i, c = 0, 0, 10000000
62
- while i < c:
63
- s += 1
64
- i += 1
65
- py_time = time.perf_counter() - start_py
66
- print(f"Python Exec: {py_time:.6f}s")
67
- print(f"Speedup: {py_time / r_time:.2f}x")
68
- except Exception as e:
69
- import traceback
70
- traceback.print_exc()
@@ -1,37 +0,0 @@
1
- import sys
2
- import os
3
- sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
4
- from shell_lite.lexer import Lexer
5
- from shell_lite.parser_gbp import GeometricBindingParser
6
- def test_gbp(code, name):
7
- print(f"--- Testing {name} ---")
8
- print(f"Code:\n{code}")
9
- lexer = Lexer(code)
10
- tokens = lexer.tokenize()
11
- parser = GeometricBindingParser(tokens)
12
- parser.topology_scan()
13
- print("\n[Phase 1] Topology Roots:")
14
- for node in parser.root_nodes:
15
- print(f" {node}")
16
- for child in node.children:
17
- print(f" -> {child}")
18
- ast = parser.parse()
19
- print("\n[Phase 2] AST:")
20
- for node in ast:
21
- print(f" {node}")
22
- print("------------------------\n")
23
- if __name__ == "__main__":
24
- code1 = """
25
- x = 10
26
- if x > 5:
27
- print x
28
- y = x + 1
29
- """
30
- test_gbp(code1, "Basic If")
31
- code2 = """
32
- to greet name:
33
- print "Hello"
34
- print name
35
- greet "Bob"
36
- """
37
- test_gbp(code2, "Function Def")
tests/test_interpreter.py DELETED
@@ -1,8 +0,0 @@
1
- import unittest
2
- class TestInterpreter(unittest.TestCase):
3
- def test_visit_number(self):
4
- pass
5
- def test_visit_binop(self):
6
- pass
7
- if __name__ == '__main__':
8
- unittest.main()
tests/test_lexer.py DELETED
@@ -1,8 +0,0 @@
1
- import unittest
2
- class TestLexer(unittest.TestCase):
3
- def test_lexer_initialization(self):
4
- pass
5
- def test_make_tokens(self):
6
- pass
7
- if __name__ == '__main__':
8
- unittest.main()
tests/test_parser.py DELETED
@@ -1,8 +0,0 @@
1
- import unittest
2
- class TestParser(unittest.TestCase):
3
- def test_parser_initialization(self):
4
- pass
5
- def test_parse_expression(self):
6
- pass
7
- if __name__ == '__main__':
8
- unittest.main()
tests/test_stdlib.py DELETED
@@ -1,8 +0,0 @@
1
- import unittest
2
- class TestStdLib(unittest.TestCase):
3
- def test_print_function(self):
4
- pass
5
- def test_input_function(self):
6
- pass
7
- if __name__ == '__main__':
8
- unittest.main()