demathpy 0.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
demathpy-0.0.1/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Misekai
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,106 @@
1
+ Metadata-Version: 2.4
2
+ Name: demathpy
3
+ Version: 0.0.1
4
+ Summary: PDE/ODE math backend
5
+ Author: Misekai
6
+ Author-email: Misekai <mcore-us@misekai.net>
7
+ License-Expression: MIT
8
+ License-File: LICENSE
9
+ Requires-Dist: numpy>=1.24.0
10
+ Requires-Dist: sympy>=1.12.0
11
+ Requires-Dist: typing>=3.10.0.0
12
+ Requires-Python: >=3.10
13
+ Description-Content-Type: text/markdown
14
+
15
+ # Demathpy
16
+
17
+ A Python library for parsing and safely evaluating symbolic Ordinary and Partial Differential Equations (ODEs/PDEs) on numerical grids.
18
+
19
+ This repository provides:
20
+
21
+ - A lightweight **symbol normalizer** that converts human-readable mathematical notation into valid Python expressions.
22
+ - A **secure evaluation engine** that evaluates PDE/ODE right-hand sides on NumPy grids without using raw `eval` on untrusted input.
23
+ - Built-in support for common differential operators and vector calculus notation.
24
+
25
+ ### Key Features
26
+
27
+ #### 1. Symbol Normalization
28
+
29
+ The parser supports Unicode and mathematical shorthand, including:
30
+
31
+ - Greek letters: `α, β, γ, λ, ε, φ, θ` → `alpha, beta, gamma, lam, epsilon, phi, theta`
32
+ - Powers: `u², v³` → `u**2, v**3`
33
+ - Implicit multiplication:
34
+ - `αu` → `alpha*u`
35
+ - `2u` → `2*u`
36
+ - `(u+1)(v+1)` → `(u+1)*(v+1)`
37
+ - Absolute values: `|u|` → `abs(u)`
38
+ - Common functions:
39
+ `sin, cos, tan, exp, log, tanh, sech, sign, sqrt`
40
+
41
+ #### 2. Differential Operator Support
42
+
43
+ The parser recognizes and evaluates:
44
+
45
+ - First derivatives:
46
+ - `du/dt`, `u_t`
47
+ - Second derivatives:
48
+ - `dxx(u)`, `dzz(u)`
49
+ - Laplacian:
50
+ - `∇²u`, `lap(u)`
51
+ - Gradient:
52
+ - `∇u`, `grad(u)`
53
+ - Divergence:
54
+ - `∇·(A)`, `div(A)`
55
+
56
+ These are mapped to finite-difference operators implemented in NumPy.
57
+
58
+ #### 3. PDE / ODE Parsing
59
+
60
+ The library provides:
61
+
62
+ - `parse_pde(equation: str)`
63
+ Parses a PDE string and returns `(lhs_variable, rhs_expression)`.
64
+
65
+ - `evaluate_rhs(rhs: str, grids: dict, constants: dict, grid_dx: float)`
66
+ Safely evaluates the right-hand side on NumPy arrays representing fields and parameters.
67
+
68
+ Supports equations such as:
69
+
70
+ ```text
71
+ ∂T/∂t = α ∇²T - σ T
72
+ u_t = D dxx(u) + f(u)
73
+ ∂φ/∂t = ∇·((1 + T²) ∇φ)
74
+ ```
75
+ #### 4. Secure Evaluation Environment
76
+ Only a restricted set of functions and operators are exposed.
77
+ No access to Python builtins, file I/O, or unsafe functions.
78
+ All variables must come from:
79
+ grids: NumPy arrays for fields (u, T, phi, etc.)
80
+ constants: scalar parameters (alpha, beta, lambda, etc.)
81
+ #### 5. Coordinate-Aware PDEs
82
+ The evaluator automatically provides spatial coordinate grids:
83
+ x, z as NumPy arrays derived from grid shape and spacing.
84
+ This enables anisotropic and spatially varying coefficients such as:
85
+ ``` α x dxx(T) + α z dzz(T)```
86
+ Typical Use:
87
+
88
+ ```text
89
+ from pde import parse_pde, evaluate_rhs
90
+
91
+ lhs, rhs = parse_pde("∂T/∂t = α ∇²T - σ T")
92
+
93
+ result = evaluate_rhs(
94
+ rhs,
95
+ grids={"T": T_grid},
96
+ constants={"alpha": 0.1, "sigma": 0.01},
97
+ grid_dx=0.01
98
+ )
99
+ ```
100
+
101
+ #### 6. Purpose
102
+ This project is intended as a safe mathematical expression parser for:
103
+ Scientific computing
104
+ PDE/ODE solvers
105
+ Physics and engineering simulations
106
+ Educational or sandboxed equation evaluation
@@ -0,0 +1,92 @@
1
+ # Demathpy
2
+
3
+ A Python library for parsing and safely evaluating symbolic Ordinary and Partial Differential Equations (ODEs/PDEs) on numerical grids.
4
+
5
+ This repository provides:
6
+
7
+ - A lightweight **symbol normalizer** that converts human-readable mathematical notation into valid Python expressions.
8
+ - A **secure evaluation engine** that evaluates PDE/ODE right-hand sides on NumPy grids without using raw `eval` on untrusted input.
9
+ - Built-in support for common differential operators and vector calculus notation.
10
+
11
+ ### Key Features
12
+
13
+ #### 1. Symbol Normalization
14
+
15
+ The parser supports Unicode and mathematical shorthand, including:
16
+
17
+ - Greek letters: `α, β, γ, λ, ε, φ, θ` → `alpha, beta, gamma, lam, epsilon, phi, theta`
18
+ - Powers: `u², v³` → `u**2, v**3`
19
+ - Implicit multiplication:
20
+ - `αu` → `alpha*u`
21
+ - `2u` → `2*u`
22
+ - `(u+1)(v+1)` → `(u+1)*(v+1)`
23
+ - Absolute values: `|u|` → `abs(u)`
24
+ - Common functions:
25
+ `sin, cos, tan, exp, log, tanh, sech, sign, sqrt`
26
+
27
+ #### 2. Differential Operator Support
28
+
29
+ The parser recognizes and evaluates:
30
+
31
+ - First derivatives:
32
+ - `du/dt`, `u_t`
33
+ - Second derivatives:
34
+ - `dxx(u)`, `dzz(u)`
35
+ - Laplacian:
36
+ - `∇²u`, `lap(u)`
37
+ - Gradient:
38
+ - `∇u`, `grad(u)`
39
+ - Divergence:
40
+ - `∇·(A)`, `div(A)`
41
+
42
+ These are mapped to finite-difference operators implemented in NumPy.
43
+
44
+ #### 3. PDE / ODE Parsing
45
+
46
+ The library provides:
47
+
48
+ - `parse_pde(equation: str)`
49
+ Parses a PDE string and returns `(lhs_variable, rhs_expression)`.
50
+
51
+ - `evaluate_rhs(rhs: str, grids: dict, constants: dict, grid_dx: float)`
52
+ Safely evaluates the right-hand side on NumPy arrays representing fields and parameters.
53
+
54
+ Supports equations such as:
55
+
56
+ ```text
57
+ ∂T/∂t = α ∇²T - σ T
58
+ u_t = D dxx(u) + f(u)
59
+ ∂φ/∂t = ∇·((1 + T²) ∇φ)
60
+ ```
61
+ #### 4. Secure Evaluation Environment
62
+ Only a restricted set of functions and operators are exposed.
63
+ No access to Python builtins, file I/O, or unsafe functions.
64
+ All variables must come from:
65
+ grids: NumPy arrays for fields (u, T, phi, etc.)
66
+ constants: scalar parameters (alpha, beta, lambda, etc.)
67
+ #### 5. Coordinate-Aware PDEs
68
+ The evaluator automatically provides spatial coordinate grids:
69
+ x, z as NumPy arrays derived from grid shape and spacing.
70
+ This enables anisotropic and spatially varying coefficients such as:
71
+ ``` α x dxx(T) + α z dzz(T)```
72
+ Typical Use:
73
+
74
+ ```text
75
+ from pde import parse_pde, evaluate_rhs
76
+
77
+ lhs, rhs = parse_pde("∂T/∂t = α ∇²T - σ T")
78
+
79
+ result = evaluate_rhs(
80
+ rhs,
81
+ grids={"T": T_grid},
82
+ constants={"alpha": 0.1, "sigma": 0.01},
83
+ grid_dx=0.01
84
+ )
85
+ ```
86
+
87
+ #### 6. Purpose
88
+ This project is intended as a safe mathematical expression parser for:
89
+ Scientific computing
90
+ PDE/ODE solvers
91
+ Physics and engineering simulations
92
+ Educational or sandboxed equation evaluation
@@ -0,0 +1,24 @@
1
+
2
+ [project]
3
+ name = "demathpy"
4
+ version = "0.0.1"
5
+ authors = [
6
+ { name="Misekai", email="mcore-us@misekai.net" },
7
+ ]
8
+ description = "PDE/ODE math backend"
9
+ readme = "README.md"
10
+ requires-python = ">=3.10"
11
+ dependencies = [
12
+ "numpy>=1.24.0",
13
+ "sympy>=1.12.0",
14
+ "typing>=3.10.0.0",
15
+ ]
16
+
17
+
18
+ license = "MIT"
19
+ license-files = ["LICEN[CS]E*"]
20
+
21
+ [build-system]
22
+ requires = ["uv_build >= 0.9.26, <0.10.0"]
23
+ build-backend = "uv_build"
24
+
@@ -0,0 +1,33 @@
1
+ """demathpy: PDE/ODE math backend for rde-core."""
2
+
3
+ from .symbols import normalize_symbols, normalize_lhs
4
+ from .pde import (
5
+ normalize_pde,
6
+ init_grid,
7
+ sample_gradient,
8
+ parse_pde,
9
+ evaluate_rhs,
10
+ evaluate_scalar,
11
+ step_pdes,
12
+ evaluate_rhs_compiled,
13
+ evaluate_scalar_compiled,
14
+ step_compiled_pdes,
15
+ )
16
+ from .ode import robust_parse, parse_odes_to_function
17
+
18
+ __all__ = [
19
+ "normalize_symbols",
20
+ "normalize_lhs",
21
+ "normalize_pde",
22
+ "init_grid",
23
+ "sample_gradient",
24
+ "parse_pde",
25
+ "evaluate_rhs",
26
+ "evaluate_scalar",
27
+ "step_pdes",
28
+ "evaluate_rhs_compiled",
29
+ "evaluate_scalar_compiled",
30
+ "step_compiled_pdes",
31
+ "robust_parse",
32
+ "parse_odes_to_function",
33
+ ]
@@ -0,0 +1,131 @@
1
+ import json
2
+ import re
3
+ import sympy
4
+ import numpy as np
5
+ from sympy.parsing.sympy_parser import parse_expr, standard_transformations, implicit_multiplication_application, convert_xor
6
+
7
+
8
+ def _convert_ternary(expr: str) -> str:
9
+ """
10
+ Convert a single C-style ternary (cond ? a : b) into SymPy Piecewise.
11
+ Supports one level (no nesting).
12
+ """
13
+ if "?" not in expr or ":" not in expr:
14
+ return expr
15
+
16
+ # naive split for single ternary
17
+ # pattern: <cond> ? <a> : <b>
18
+ parts = expr.split("?")
19
+ if len(parts) != 2:
20
+ return expr
21
+ cond = parts[0].strip()
22
+ rest = parts[1]
23
+ if ":" not in rest:
24
+ return expr
25
+ a, b = rest.split(":", 1)
26
+ a = a.strip()
27
+ b = b.strip()
28
+ return f"Piecewise(({a}, {cond}), ({b}, True))"
29
+
30
+
31
+ def robust_parse(expr_str):
32
+ """
33
+ Parses a string into a SymPy expression with relaxed syntax rules:
34
+ - Implicit multiplication (5x -> 5*x)
35
+ - Caret for power (x^2 -> x**2)
36
+ - Aliases 'y' to 'z' for 2D convenience
37
+ """
38
+ if not isinstance(expr_str, str):
39
+ return sympy.sympify(expr_str)
40
+
41
+ transformations = (standard_transformations + (implicit_multiplication_application, convert_xor))
42
+
43
+ # Define symbols and alias y -> z
44
+ x, z, vx, vz, t, pid = sympy.symbols('x z vx vz t id')
45
+ local_dict = {
46
+ 'x': x, 'z': z, 'y': z, 'vx': vx, 'vz': vz, 't': t, 'id': pid,
47
+ 'pi': sympy.pi, 'e': sympy.E
48
+ }
49
+
50
+ # Ensure common functions are recognized (Abs not abs)
51
+ local_dict.update({
52
+ 'sin': sympy.sin,
53
+ 'cos': sympy.cos,
54
+ 'tan': sympy.tan,
55
+ 'exp': sympy.exp,
56
+ 'sqrt': sympy.sqrt,
57
+ 'log': sympy.log,
58
+ 'abs': sympy.Abs,
59
+ 'Abs': sympy.Abs,
60
+ 'Piecewise': sympy.Piecewise,
61
+ })
62
+
63
+ try:
64
+ pre = _convert_ternary(expr_str)
65
+ return parse_expr(pre, transformations=transformations, local_dict=local_dict)
66
+ except Exception:
67
+ # Fallback
68
+ return sympy.sympify(expr_str, locals=local_dict)
69
+
70
+
71
+ def parse_odes_to_function(ode_json_str):
72
+ """
73
+ Parses a JSON string of ODEs and returns a dynamic update function.
74
+ """
75
+ try:
76
+ if isinstance(ode_json_str, str):
77
+ odes = json.loads(ode_json_str)
78
+ else:
79
+ odes = ode_json_str
80
+ except json.JSONDecodeError as e:
81
+ print(f"Failed to decode JSON from LLM: {e}")
82
+ return None
83
+
84
+ # Define standard symbols
85
+ x, z, vx, vz, t = sympy.symbols('x z vx vz t')
86
+
87
+ deriv_map = {}
88
+ keys = ['dx', 'dz', 'dvx', 'dvz']
89
+
90
+ for key in keys:
91
+ expr_str = odes.get(key, "0")
92
+ try:
93
+ # Parse the expression safely using robust parser
94
+ expr = robust_parse(str(expr_str))
95
+
96
+ # Create a localized function
97
+ # Arguments match the order we will call them
98
+ func = sympy.lambdify((x, z, vx, vz, t), expr, modules=['numpy', 'math'])
99
+ deriv_map[key] = func
100
+ except Exception as e:
101
+ print(f"Error parsing expression for {key}: {e}")
102
+ return None
103
+
104
+ def dynamics(particle, dt):
105
+ # Current state
106
+ cx, cz, cvx, cvz = particle.x, particle.z, particle.vx, particle.vz
107
+ # We assume particle might track time, or we just pass 0 if autonomous
108
+ ct = getattr(particle, 'time', 0.0)
109
+
110
+ try:
111
+ # Calculate derivatives
112
+ val_dx = deriv_map['dx'](cx, cz, cvx, cvz, ct)
113
+ val_dz = deriv_map['dz'](cx, cz, cvx, cvz, ct)
114
+ val_dvx = deriv_map['dvx'](cx, cz, cvx, cvz, ct)
115
+ val_dvz = deriv_map['dvz'](cx, cz, cvx, cvz, ct)
116
+
117
+ # Simple Euler Integration
118
+ particle.x += float(val_dx) * dt
119
+ particle.z += float(val_dz) * dt
120
+ particle.vx += float(val_dvx) * dt
121
+ particle.vz += float(val_dvz) * dt
122
+
123
+ # Update time if tracked
124
+ if hasattr(particle, 'time'):
125
+ particle.time += dt
126
+
127
+ except Exception as e:
128
+ # Prevent crashing the renderer on math errors (e.g. div by zero)
129
+ print(f"Runtime error in dynamics: {e}")
130
+
131
+ return dynamics
@@ -0,0 +1,422 @@
1
+ """
2
+ Field math utilities (PDE/field-space helpers).
3
+
4
+ Provides a grid-based PDE solver and sampling utilities for projecting
5
+ field PDEs into particle motion without hardcoded dynamics.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from typing import Dict, List, Tuple
11
+
12
+ import re
13
+ import numpy as np
14
+
15
+ from .symbols import normalize_symbols, normalize_lhs
16
+
17
+
18
+ def normalize_pde(pde: str) -> str:
19
+ return (pde or "").strip()
20
+
21
+
22
+ def init_grid(width: int, height: int, dx: float) -> Tuple[np.ndarray, float]:
23
+ nx = max(2, int(width / dx))
24
+ nz = max(2, int(height / dx))
25
+ grid = np.zeros((nz, nx), dtype=float)
26
+ return grid, dx
27
+
28
+
29
+ def _preprocess_expr(expr: str) -> str:
30
+ expr = (expr or "").strip()
31
+ # Drop common annotation tokens (LLM outputs sometimes include these).
32
+ expr = re.sub(r"\(\s*approx\s*\)", "", expr, flags=re.IGNORECASE)
33
+ expr = re.sub(r"\bapprox\b", "", expr, flags=re.IGNORECASE)
34
+ # If the RHS contains multiple '=' (e.g. an annotated expansion), prefer the
35
+ # rightmost expression, which is typically the most explicit.
36
+ if "=" in expr:
37
+ expr = expr.split("=")[-1]
38
+ expr = normalize_symbols(expr)
39
+ return expr.strip()
40
+
41
+
42
+ def parse_pde(pde: str) -> Tuple[str, int, str, str]:
43
+ """
44
+ Returns (var, order, lhs_coeff_expr, rhs_expr).
45
+ Supports forms:
46
+ ∂u/∂t = RHS
47
+ ∂²u/∂t² = RHS
48
+ ∂^n u / ∂t^n = RHS
49
+ rho(∂²u/∂t²) = RHS
50
+ """
51
+ pde = normalize_pde(pde)
52
+ if "=" not in pde:
53
+ return "u", 1, "1", pde
54
+
55
+ lhs, rhs = pde.split("=", 1)
56
+ lhs = normalize_lhs(lhs.strip())
57
+ rhs = rhs.strip()
58
+
59
+ def _extract_coeff(lhs_expr: str, deriv_expr: str) -> str:
60
+ coeff = lhs_expr.replace(deriv_expr, "").strip()
61
+ if not coeff:
62
+ return "1"
63
+ # Remove leading or trailing multiplication symbols
64
+ coeff = coeff.strip("*")
65
+ return _preprocess_expr(coeff) or "1"
66
+
67
+ # N-th order time derivative: ∂^n v / ∂t^n
68
+ # Matches patterns like: ∂^2 u / ∂t^2, ∂3u/∂t3, ∂ u / ∂t
69
+ # We normalized unicode `²` to `^2` in normalize_lhs.
70
+
71
+ # Regex for derivative term
72
+ # breakdown:
73
+ # ∂ literal partial
74
+ # \s* whitespace
75
+ # (?:\^?(\d+))? optional order: ^2, 2, or nothing (order 1)
76
+ # \s* whitespace
77
+ # ([a-zA-Z_]\w*) variable name (group 2)
78
+ # \s* whitespace
79
+ # / division
80
+ # \s* whitespace
81
+ # ∂t ∂t
82
+ # (?:\^?(\d+))? optional order at bottom: ^2, 2, or nothing
83
+
84
+ pattern = r"∂\s*(?:\^?(\d+))?\s*([a-zA-Z_]\w*)\s*/\s*∂t\s*(?:\^?(\d+))?"
85
+ m = re.search(pattern, lhs)
86
+
87
+ if m:
88
+ # group 1: order top, group 2: var, group 3: order bottom
89
+ # if inconsistent orders, we trust top or bottom if they match?
90
+ # Usually they match. If missing, assume 1.
91
+ ord1 = m.group(1)
92
+ var = m.group(2)
93
+ ord2 = m.group(3)
94
+
95
+ order = 1
96
+ if ord1:
97
+ order = int(ord1)
98
+ elif ord2:
99
+ order = int(ord2)
100
+
101
+ coeff = _extract_coeff(lhs, m.group(0))
102
+ return var, order, coeff, _preprocess_expr(rhs)
103
+
104
+ return "u", 1, "1", _preprocess_expr(rhs)
105
+
106
+
107
+ def evolve_pde(grid: np.ndarray, dt: float, constants: dict) -> np.ndarray:
108
+ """
109
+ Generic PDE evolution step. Requires constants to define dynamics.
110
+ If no constants are provided, returns the grid unchanged.
111
+
112
+ Expected constants:
113
+ - alpha: diffusion coefficient (optional)
114
+ """
115
+ return grid
116
+
117
+
118
+ def sample_gradient(grid: np.ndarray, x: float, z: float, dx: float) -> Tuple[float, float]:
119
+ if grid is None:
120
+ return 0.0, 0.0
121
+ nz, nx = grid.shape
122
+ ix = int(np.clip(x / dx, 1, nx - 2))
123
+ iz = int(np.clip(z / dx, 1, nz - 2))
124
+ du_dx = (grid[iz, ix + 1] - grid[iz, ix - 1]) / (2 * dx)
125
+ du_dz = (grid[iz + 1, ix] - grid[iz - 1, ix]) / (2 * dx)
126
+ return du_dx, du_dz
127
+
128
+
129
+ def _build_eval_env(vars_grid: Dict[str, np.ndarray], constants: Dict[str, float], grid_dx: float) -> Dict[str, object]:
130
+ def lap(u):
131
+ return dxx(u) + dzz(u)
132
+
133
+ def dx(u):
134
+ return (np.roll(u, -1, axis=1) - np.roll(u, 1, axis=1)) / (2 * grid_dx)
135
+
136
+ def dz(u):
137
+ return (np.roll(u, -1, axis=0) - np.roll(u, 1, axis=0)) / (2 * grid_dx)
138
+
139
+ def dxx(u):
140
+ return (np.roll(u, -1, axis=1) - 2 * u + np.roll(u, 1, axis=1)) / (grid_dx ** 2)
141
+
142
+ def dzz(u):
143
+ return (np.roll(u, -1, axis=0) - 2 * u + np.roll(u, 1, axis=0)) / (grid_dx ** 2)
144
+
145
+ def gradmag(u):
146
+ return dx(u) ** 2 + dz(u) ** 2
147
+
148
+ def gradl1(u):
149
+ return np.abs(dx(u)) + np.abs(dz(u))
150
+
151
+ def grad(u):
152
+ # Return a 2x(HxW) array so scalar multiplication broadcasts naturally
153
+ # and div(...) can consume it.
154
+ return np.stack((dx(u), dz(u)), axis=0)
155
+
156
+ def div(v):
157
+ # Accept either a tuple/list (vx, vz) or a stacked array with shape (2, ...).
158
+ if isinstance(v, (tuple, list)) and len(v) == 2:
159
+ return dx(v[0]) + dz(v[1])
160
+ if isinstance(v, np.ndarray) and v.ndim >= 3 and v.shape[0] == 2:
161
+ return dx(v[0]) + dz(v[1])
162
+ return np.zeros_like(next(iter(vars_grid.values())))
163
+
164
+ def sech(u):
165
+ return 1.0 / np.cosh(u)
166
+
167
+ def sign(u):
168
+ return np.sign(u)
169
+
170
+ def pos(u):
171
+ return np.maximum(u, 0.0)
172
+
173
+ env = {
174
+ "np": np,
175
+ "sin": np.sin,
176
+ "cos": np.cos,
177
+ "tan": np.tan,
178
+ "sinh": np.sinh,
179
+ "cosh": np.cosh,
180
+ "tanh": np.tanh,
181
+ "arcsin": np.arcsin,
182
+ "arccos": np.arccos,
183
+ "arctan": np.arctan,
184
+ "log": np.log,
185
+ "log10": np.log10,
186
+ "log2": np.log2,
187
+ "exp": np.exp,
188
+ "sqrt": np.sqrt,
189
+ "abs": np.abs,
190
+ "pi": np.pi,
191
+ "inf": np.inf,
192
+ "lap": lap,
193
+ "dx": dx,
194
+ "dz": dz,
195
+ "dxx": dxx,
196
+ "dzz": dzz,
197
+ "gradmag": gradmag,
198
+ "gradl1": gradl1,
199
+ "grad": grad,
200
+ "div": div,
201
+ "pos": pos,
202
+ "sech": sech,
203
+ "sign": sign,
204
+ }
205
+
206
+ # Add constants
207
+ for k, v in (constants or {}).items():
208
+ env[k] = v
209
+
210
+ if "E" not in env:
211
+ env["E"] = np.e
212
+ if "e" not in env:
213
+ env["e"] = np.e
214
+
215
+ # Normalize common constant aliases used in test corpora / LLM outputs.
216
+ # - Users often provide 'lambda' and 'eps' in JSON to avoid Unicode / reserved keywords.
217
+ if "lambda" in env and "lam" not in env:
218
+ env["lam"] = env["lambda"]
219
+ if "eps" in env and "epsilon" not in env:
220
+ env["epsilon"] = env["eps"]
221
+
222
+ # Common aliasing for constants
223
+ if "w" in env and "omega" not in env:
224
+ env["omega"] = env["w"]
225
+ if "w2" in env and "omega2" not in env:
226
+ env["omega2"] = env["w2"]
227
+ if "ax" in env and "alphax" not in env:
228
+ env["alphax"] = env["ax"]
229
+ if "az" in env and "alphaz" not in env:
230
+ env["alphaz"] = env["az"]
231
+ if "th" in env and "theta" not in env:
232
+ env["theta"] = env["th"]
233
+ if "damp" in env and "zeta" not in env:
234
+ env["zeta"] = env["damp"]
235
+
236
+ # Provide time symbol if present
237
+ if "t" not in env:
238
+ env["t"] = float((constants or {}).get("t", 0.0))
239
+
240
+ # Add variable grids
241
+ for k, v in vars_grid.items():
242
+ env[k] = v
243
+
244
+ return env
245
+
246
+
247
+ def evaluate_rhs(rhs_expr: str, vars_grid: Dict[str, np.ndarray], constants: Dict[str, float], grid_dx: float) -> np.ndarray:
248
+ """
249
+ Evaluate RHS expression on grid using numpy operations.
250
+ """
251
+ rhs_expr = _preprocess_expr(rhs_expr)
252
+ env = _build_eval_env(vars_grid, constants, grid_dx)
253
+
254
+ # Provide default zero grids for missing symbols referenced in RHS
255
+ if vars_grid:
256
+ zero_grid = np.zeros_like(next(iter(vars_grid.values())))
257
+ identifiers = set(re.findall(r"\b([A-Za-z_][A-Za-z0-9_]*)\b", rhs_expr))
258
+ reserved = {
259
+ "np",
260
+ "sin", "cos", "tan", "sinh", "cosh", "tanh",
261
+ "arcsin", "arccos", "arctan",
262
+ "exp", "sqrt", "abs", "log", "log10", "log2",
263
+ "sech", "sign",
264
+ "pi", "inf", "E", "e",
265
+ "lap", "dx", "dz", "dxx", "dzz", "grad", "div", "gradmag", "gradl1", "pos", "t",
266
+ }
267
+ for name in identifiers:
268
+ if name in reserved:
269
+ continue
270
+ if name not in env:
271
+ env[name] = zero_grid
272
+
273
+ if not vars_grid:
274
+ return np.zeros((2, 2), dtype=float)
275
+
276
+ try:
277
+ return eval(rhs_expr, {}, env)
278
+ except Exception as exc:
279
+ raise RuntimeError(f"PDE RHS eval failed for '{rhs_expr}': {exc}")
280
+
281
+
282
+ def evaluate_rhs_compiled(rhs_expr: str, vars_grid: Dict[str, np.ndarray], constants: Dict[str, float], grid_dx: float) -> np.ndarray:
283
+ """Evaluate already-compiled RHS expression (no preprocessing)."""
284
+ env = _build_eval_env(vars_grid, constants, grid_dx)
285
+
286
+ if vars_grid:
287
+ zero_grid = np.zeros_like(next(iter(vars_grid.values())))
288
+ identifiers = set(re.findall(r"\b([A-Za-z_][A-Za-z0-9_]*)\b", rhs_expr))
289
+ reserved = {
290
+ "np",
291
+ "sin", "cos", "tan", "sinh", "cosh", "tanh",
292
+ "arcsin", "arccos", "arctan",
293
+ "exp", "sqrt", "abs", "log", "log10", "log2",
294
+ "sech", "sign",
295
+ "pi", "inf", "E", "e",
296
+ "lap", "dx", "dz", "dxx", "dzz", "grad", "div", "gradmag", "gradl1", "pos", "t",
297
+ }
298
+ for name in identifiers:
299
+ if name in reserved:
300
+ continue
301
+ if name not in env:
302
+ env[name] = zero_grid
303
+
304
+ if not vars_grid:
305
+ return np.zeros((2, 2), dtype=float)
306
+
307
+ try:
308
+ return eval(rhs_expr, {}, env)
309
+ except Exception as exc:
310
+ raise RuntimeError(f"PDE RHS eval failed for '{rhs_expr}': {exc}")
311
+
312
+
313
+ def evaluate_scalar(expr: str, vars_grid: Dict[str, np.ndarray], constants: Dict[str, float], grid_dx: float) -> float:
314
+ """Evaluate a scalar coefficient expression with the same env as RHS."""
315
+ if expr in ("", "1"):
316
+ return 1.0
317
+ rhs = evaluate_rhs(expr, vars_grid, constants, grid_dx)
318
+ if np.isscalar(rhs):
319
+ return float(rhs)
320
+ try:
321
+ return float(np.mean(rhs))
322
+ except Exception as exc:
323
+ raise RuntimeError(f"PDE coeff eval failed for '{expr}': {exc}")
324
+
325
+
326
+ def evaluate_scalar_compiled(expr: str, vars_grid: Dict[str, np.ndarray], constants: Dict[str, float], grid_dx: float) -> float:
327
+ """Evaluate scalar coefficient expression without preprocessing."""
328
+ if expr in ("", "1"):
329
+ return 1.0
330
+ rhs = evaluate_rhs_compiled(expr, vars_grid, constants, grid_dx)
331
+ if np.isscalar(rhs):
332
+ return float(rhs)
333
+ try:
334
+ return float(np.mean(rhs))
335
+ except Exception as exc:
336
+ raise RuntimeError(f"PDE coeff eval failed for '{expr}': {exc}")
337
+
338
+
339
+ def step_pdes(
340
+ field_pdes: List[str],
341
+ grids: Dict[str, np.ndarray],
342
+ constants: Dict[str, float],
343
+ grid_dx: float,
344
+ dt: float,
345
+ external_grids: Dict[str, np.ndarray] | None = None,
346
+ ) -> List[str]:
347
+ """Advance PDEs by one time step. Returns list of error strings."""
348
+ errors: List[str] = []
349
+ if not field_pdes or not grids:
350
+ return errors
351
+
352
+ vars_grid = dict(grids)
353
+ if external_grids:
354
+ vars_grid.update(external_grids)
355
+
356
+ for pde in field_pdes:
357
+ try:
358
+ var, order, coeff_expr, rhs_expr = parse_pde(pde)
359
+ if var not in grids:
360
+ grids[var] = np.zeros_like(next(iter(grids.values())))
361
+
362
+ rhs = evaluate_rhs(rhs_expr, vars_grid, constants, grid_dx)
363
+ coeff = evaluate_scalar(coeff_expr, vars_grid, constants, grid_dx)
364
+ rhs = rhs / float(coeff or 1.0)
365
+
366
+ if order == 2:
367
+ vname = f"{var}_t"
368
+ if vname not in grids:
369
+ grids[vname] = np.zeros_like(grids[var])
370
+ grids[vname] = grids[vname] + dt * rhs
371
+ grids[var] = grids[var] + dt * grids[vname]
372
+ else:
373
+ grids[var] = grids[var] + dt * rhs
374
+ except Exception as exc:
375
+ errors.append(f"PDE eval failed for '{pde}': {exc}")
376
+
377
+ return errors
378
+
379
+
380
+ def step_compiled_pdes(
381
+ compiled_pdes: List[Dict[str, object]],
382
+ grids: Dict[str, np.ndarray],
383
+ constants: Dict[str, float],
384
+ grid_dx: float,
385
+ dt: float,
386
+ external_grids: Dict[str, np.ndarray] | None = None,
387
+ ) -> List[str]:
388
+ """Advance already-compiled PDEs. Returns list of error strings."""
389
+ errors: List[str] = []
390
+ if not compiled_pdes or not grids:
391
+ return errors
392
+
393
+ vars_grid = dict(grids)
394
+ if external_grids:
395
+ vars_grid.update(external_grids)
396
+
397
+ for entry in compiled_pdes:
398
+ try:
399
+ var = entry.get("var", "u")
400
+ order = int(entry.get("order", 1))
401
+ coeff_expr = str(entry.get("coeff_expr", "1"))
402
+ rhs_expr = str(entry.get("rhs_expr", "0"))
403
+
404
+ if var not in grids:
405
+ grids[var] = np.zeros_like(next(iter(grids.values())))
406
+
407
+ rhs = evaluate_rhs_compiled(rhs_expr, vars_grid, constants, grid_dx)
408
+ coeff = evaluate_scalar_compiled(coeff_expr, vars_grid, constants, grid_dx)
409
+ rhs = rhs / float(coeff or 1.0)
410
+
411
+ if order == 2:
412
+ vname = f"{var}_t"
413
+ if vname not in grids:
414
+ grids[vname] = np.zeros_like(grids[var])
415
+ grids[vname] = grids[vname] + dt * rhs
416
+ grids[var] = grids[var] + dt * grids[vname]
417
+ else:
418
+ grids[var] = grids[var] + dt * rhs
419
+ except Exception as exc:
420
+ errors.append(f"PDE eval failed for '{entry}': {exc}")
421
+
422
+ return errors
@@ -0,0 +1,379 @@
1
+ """
2
+ Symbol normalization utilities for PDE parsing.
3
+ Converts Unicode math symbols and common LaTeX-like tokens to Python-callable forms.
4
+ """
5
+ from __future__ import annotations
6
+
7
+ import re
8
+ from typing import Dict
9
+
10
+ SYMBOL_MAP: Dict[str, str] = {
11
+ # Basic operators
12
+ "−": "-",
13
+ "×": "*",
14
+ "·": "*",
15
+ "÷": "/",
16
+ "^": "**",
17
+ "√": "sqrt",
18
+ "∞": "inf",
19
+ "π": "pi",
20
+ "Π": "pi",
21
+ "≈": "~",
22
+ "≠": "!=",
23
+ "≤": "<=",
24
+ "≥": ">=",
25
+ # Superscripts
26
+ "²": "**2",
27
+ "³": "**3",
28
+ "⁴": "**4",
29
+ "⁵": "**5",
30
+ "⁶": "**6",
31
+ "⁷": "**7",
32
+ "⁸": "**8",
33
+ "⁹": "**9",
34
+ "⁰": "**0",
35
+ "¹": "**1",
36
+ "⁺": "POSITIVE_PART",
37
+ # Subscripts
38
+ "₀": "0",
39
+ "₁": "1",
40
+ "₂": "2",
41
+ "₃": "3",
42
+ "₄": "4",
43
+ "₅": "5",
44
+ "₆": "6",
45
+ "₇": "7",
46
+ "₈": "8",
47
+ "₉": "9",
48
+ # Greek letters
49
+ "α": "alpha",
50
+ "β": "beta",
51
+ "γ": "gamma",
52
+ "δ": "delta",
53
+ "ε": "epsilon",
54
+ "ζ": "zeta",
55
+ "η": "eta",
56
+ "θ": "theta",
57
+ "ι": "iota",
58
+ "κ": "kappa",
59
+ "λ": "lam",
60
+ "μ": "mu",
61
+ "ν": "nu",
62
+ "ξ": "xi",
63
+ "ο": "omicron",
64
+ "π": "pi",
65
+ "ρ": "rho",
66
+ "σ": "sigma",
67
+ "τ": "tau",
68
+ "υ": "upsilon",
69
+ "φ": "phi",
70
+ "χ": "chi",
71
+ "ψ": "psi",
72
+ "ω": "omega",
73
+ "Α": "alpha",
74
+ "Β": "beta",
75
+ "Γ": "gamma",
76
+ "Δ": "delta",
77
+ "Ε": "epsilon",
78
+ "Ζ": "zeta",
79
+ "Η": "eta",
80
+ "Θ": "theta",
81
+ "Ι": "iota",
82
+ "Κ": "kappa",
83
+ "Λ": "lam",
84
+ "Μ": "mu",
85
+ "Ν": "nu",
86
+ "Ξ": "xi",
87
+ "Ο": "omicron",
88
+ "Π": "pi",
89
+ "Ρ": "rho",
90
+ "Σ": "sigma",
91
+ "Τ": "tau",
92
+ "Υ": "upsilon",
93
+ "Φ": "phi",
94
+ "Χ": "chi",
95
+ "Ψ": "psi",
96
+ "Ω": "omega",
97
+ # Common math functions (unicode to ascii word if present)
98
+ "∂": "",
99
+ "Δ": "lap",
100
+ }
101
+
102
+ LHS_SYMBOL_MAP: Dict[str, str] = {
103
+ # Keep ∂ intact for LHS parsing but normalize greek
104
+ "α": "alpha",
105
+ "β": "beta",
106
+ "γ": "gamma",
107
+ "δ": "delta",
108
+ "ε": "epsilon",
109
+ "ζ": "zeta",
110
+ "η": "eta",
111
+ "θ": "theta",
112
+ "ι": "iota",
113
+ "κ": "kappa",
114
+ "λ": "lam",
115
+ "μ": "mu",
116
+ "ν": "nu",
117
+ "ξ": "xi",
118
+ "ο": "omicron",
119
+ "π": "pi",
120
+ "ρ": "rho",
121
+ "σ": "sigma",
122
+ "τ": "tau",
123
+ "υ": "upsilon",
124
+ "φ": "phi",
125
+ "χ": "chi",
126
+ "ψ": "psi",
127
+ "ω": "omega",
128
+ "Α": "alpha",
129
+ "Β": "beta",
130
+ "Γ": "gamma",
131
+ "Δ": "delta",
132
+ "Ε": "epsilon",
133
+ "Ζ": "zeta",
134
+ "Η": "eta",
135
+ "Θ": "theta",
136
+ "Ι": "iota",
137
+ "Κ": "kappa",
138
+ "Λ": "lam",
139
+ "Μ": "mu",
140
+ "Ν": "nu",
141
+ "Ξ": "xi",
142
+ "Ο": "omicron",
143
+ "Π": "pi",
144
+ "Ρ": "rho",
145
+ "Σ": "sigma",
146
+ "Τ": "tau",
147
+ "Υ": "upsilon",
148
+ "Φ": "phi",
149
+ "Χ": "chi",
150
+ "Ψ": "psi",
151
+ "Ω": "omega",
152
+ # Superscripts for LHS
153
+ "²": "^2",
154
+ "³": "^3",
155
+ "⁴": "^4",
156
+ "⁵": "^5",
157
+ "⁶": "^6",
158
+ "⁷": "^7",
159
+ "⁸": "^8",
160
+ "⁹": "^9",
161
+ }
162
+
163
+ # Regex replacements for structured operators
164
+ REGEX_RULES = [
165
+ # divergence operator: ∇·u or ∇·(u)
166
+ (r"∇\s*·\s*\(([^\)]*)\)", r"div(\1)"),
167
+ (r"∇\s*·\s*([a-zA-Z_][a-zA-Z0-9_]*)", r"div(\1)"),
168
+ # divergence shorthand: ∇ * u or ∇ * (u)
169
+ (r"∇\s*\*\s*\(([^\)]*)\)", r"div(\1)"),
170
+ (r"∇\s*\*\s*([a-zA-Z_][a-zA-Z0-9_]*)", r"div(\1)"),
171
+ # gradient: ∇(u)
172
+ (r"∇\s*\(([^\)]*)\)", r"grad(\1)"),
173
+ # gradient magnitude |∇u|^2
174
+ (r"\|\s*∇\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\|\^?2", r"gradmag(\1)"),
175
+ # Laplacian
176
+ (r"∇²\s*([a-zA-Z_][a-zA-Z0-9_]*)", r"lap(\1)"),
177
+ # Gradient
178
+ (r"∇\s*([a-zA-Z_][a-zA-Z0-9_]*)", r"grad(\1)"),
179
+ ]
180
+
181
+
182
+ def normalize_symbols(expr: str) -> str:
183
+ if not expr:
184
+ return ""
185
+
186
+ # --- Helpers ---------------------------------------------------------
187
+ def _replace_balanced_paren_call(s: str, start_idx: int, fn_name: str, op_len: int) -> tuple[str, int] | None:
188
+ """Replace an operator call like '∇·( ... )' (possibly nested) with 'div( ... )'.
189
+
190
+ Args:
191
+ s: input string
192
+ start_idx: index where the operator starts (the '∇')
193
+ fn_name: replacement function name (e.g. 'div' or 'grad')
194
+ op_len: number of characters to skip from start_idx to the '(' (inclusive of operator symbols)
195
+
196
+ Returns:
197
+ (new_string, next_scan_index) or None if no balanced paren call found.
198
+ """
199
+ # Find the '(' that starts the argument list
200
+ i = start_idx + op_len
201
+ while i < len(s) and s[i].isspace():
202
+ i += 1
203
+ if i >= len(s) or s[i] != "(":
204
+ return None
205
+
206
+ # Find matching ')'
207
+ depth = 0
208
+ j = i
209
+ while j < len(s):
210
+ ch = s[j]
211
+ if ch == "(":
212
+ depth += 1
213
+ elif ch == ")":
214
+ depth -= 1
215
+ if depth == 0:
216
+ inner = s[i + 1 : j]
217
+ replaced = f"{fn_name}({inner})"
218
+ new_s = s[:start_idx] + replaced + s[j + 1 :]
219
+ return new_s, start_idx + len(replaced)
220
+ j += 1
221
+ return None
222
+
223
+ # Normalize greek letters early so time-derivative regex can match
224
+ greek_map = {
225
+ "α": "alpha", "β": "beta", "γ": "gamma", "δ": "delta", "ε": "epsilon",
226
+ "ζ": "zeta", "η": "eta", "θ": "theta", "ι": "iota", "κ": "kappa",
227
+ "λ": "lam", "μ": "mu", "ν": "nu", "ξ": "xi", "ο": "omicron",
228
+ "π": "pi", "ρ": "rho", "σ": "sigma", "τ": "tau", "υ": "upsilon",
229
+ "φ": "phi", "χ": "chi", "ψ": "psi", "ω": "omega",
230
+ "Α": "alpha", "Β": "beta", "Γ": "gamma", "Δ": "delta", "Ε": "epsilon",
231
+ "Ζ": "zeta", "Η": "eta", "Θ": "theta", "Ι": "iota", "Κ": "kappa",
232
+ "Λ": "lam", "Μ": "mu", "Ν": "nu", "Ξ": "xi", "Ο": "omicron",
233
+ "Π": "pi", "Ρ": "rho", "Σ": "sigma", "Τ": "tau", "Υ": "upsilon",
234
+ "Φ": "phi", "Χ": "chi", "Ψ": "psi", "Ω": "omega",
235
+ }
236
+ for k, v in greek_map.items():
237
+ expr = expr.replace(k, v)
238
+
239
+ # Strip stray question marks sometimes added by LLM output
240
+ expr = expr.replace("?", "")
241
+
242
+ # Normalize common whitespace variants around nabla operators so we can
243
+ # safely do balanced-parenthesis rewrites.
244
+ expr = re.sub(r"∇\s*[·\*]\s*", "∇·", expr)
245
+ expr = re.sub(r"∇\s*\(", "∇(", expr)
246
+
247
+ # Balanced-parenthesis rewrites for divergence / gradient:
248
+ # ∇·( ... ) -> div( ... ) (handles nested parentheses)
249
+ # ∇( ... ) -> grad( ... )
250
+ i = 0
251
+ while i < len(expr):
252
+ if expr.startswith("∇·", i):
253
+ out = _replace_balanced_paren_call(expr, i, "div", op_len=2)
254
+ if out is not None:
255
+ expr, i = out
256
+ continue
257
+ if expr.startswith("∇(", i):
258
+ out = _replace_balanced_paren_call(expr, i, "grad", op_len=1)
259
+ if out is not None:
260
+ expr, i = out
261
+ continue
262
+ i += 1
263
+
264
+ # Remove primes like t' -> t
265
+ expr = re.sub(r"([a-zA-Z_][a-zA-Z0-9_]*)'", r"\1", expr)
266
+
267
+ # Normalize partial derivatives BEFORE stripping ∂
268
+ # Time derivatives in RHS: ∂u/∂t -> u_t, ∂²u/∂t² -> u_tt
269
+ expr = re.sub(r"∂\s*\^?2\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*/\s*∂t\s*\^?2", r"\1_tt", expr)
270
+ expr = re.sub(r"∂\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*/\s*∂t", r"\1_t", expr)
271
+ expr = re.sub(r"∂x\s*\(\s*∂x\s*\(([^\)]*)\)\s*\)", r"dxx(\1)", expr)
272
+ expr = re.sub(r"∂z\s*\(\s*∂z\s*\(([^\)]*)\)\s*\)", r"dzz(\1)", expr)
273
+ expr = re.sub(r"∂x\s*\(", "dx(", expr)
274
+ expr = re.sub(r"∂z\s*\(", "dz(", expr)
275
+
276
+ # Positive part: (x)⁺ or x⁺ -> pos(x)
277
+ expr = re.sub(r"\(([^\)]+)\)\s*⁺", r"pos(\1)", expr)
278
+ expr = re.sub(r"([a-zA-Z_][a-zA-Z0-9_]*)\s*⁺", r"pos(\1)", expr)
279
+
280
+ # Apply direct symbol replacement
281
+ for k, v in SYMBOL_MAP.items():
282
+ expr = expr.replace(k, v)
283
+
284
+ # Fix concatenated greek-constant prefixes such as "alphasin(u)" or "betau**3".
285
+ greek_words = (
286
+ "alpha|beta|gamma|delta|epsilon|zeta|eta|theta|iota|kappa|lam|mu|nu|xi|"
287
+ "omicron|pi|rho|sigma|tau|upsilon|phi|chi|psi|omega"
288
+ )
289
+ fn_words = "sin|cos|tan|sinh|cosh|tanh|exp|log|log10|log2|sqrt|abs|sech|sign"
290
+ expr = re.sub(rf"\b({greek_words})(?=({fn_words})\b)", r"\1*", expr)
291
+ # Greek word directly followed by a single-letter variable (e.g., beta u)
292
+ # Greek word directly followed by a single-letter variable (e.g., beta u) or concatenated (betau).
293
+ # BUT do not split axis-suffixed coefficients like alphax/alphaz (common in anisotropic diffusion terms).
294
+ _axis_coeffs = {
295
+ "alphax", "alphaz", "betax", "betaz", "gammax", "gammaz", "deltax", "deltaz",
296
+ "sigmax", "sigmaz", "thetax", "thetaz", "kappax", "kappaz", "lambdax", "lambdaz",
297
+ "rhox", "rhoz", "mux", "muz", "nux", "nuz",
298
+ }
299
+
300
+ def _split_greek_letter_var(m: re.Match) -> str:
301
+ greek = m.group(1)
302
+ letter = m.group(2)
303
+ combined = f"{greek}{letter}"
304
+ if combined in _axis_coeffs:
305
+ return combined
306
+ return f"{greek}*{letter}"
307
+
308
+ expr = re.sub(rf"({greek_words})([A-Za-z])", _split_greek_letter_var, expr)
309
+
310
+ # Absolute value for variables/parentheses
311
+ expr = re.sub(r"\|\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\|", r"abs(\1)", expr)
312
+ expr = re.sub(r"\|\s*\(([^\)]*)\)\s*\|", r"abs(\1)", expr)
313
+
314
+ # |∇u| -> sqrt(gradmag(u))
315
+ expr = re.sub(r"\|\s*∇\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\|", r"sqrt(gradmag(\1))", expr)
316
+
317
+ # Catch-all for remaining absolute-value bars like |lap(u)|
318
+ expr = re.sub(r"\|([^|]+)\|", r"abs(\1)", expr)
319
+
320
+ # Apply regex-based transformations
321
+ for pattern, repl in REGEX_RULES:
322
+ expr = re.sub(pattern, repl, expr)
323
+
324
+ # Handle ∇**2 and nested ∇**2
325
+ prev = None
326
+ while prev != expr:
327
+ prev = expr
328
+ expr = re.sub(r"∇\s*\*\*\s*2\s*\(([^\)]*)\)", r"lap(\1)", expr)
329
+ expr = re.sub(r"∇\s*\*\*\s*2\s*([a-zA-Z_][a-zA-Z0-9_]*)", r"lap(\1)", expr)
330
+
331
+ # Interpret ∇²u³ as lap(u**3)
332
+ expr = re.sub(r"∇²\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\*\*\s*3", r"lap((\1)**3)", expr)
333
+ expr = re.sub(r"∇²\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*³", r"lap((\1)**3)", expr)
334
+ # If lap(u)**3 appears, interpret as lap(u**3)
335
+ expr = re.sub(r"lap\(([^\)]+)\)\s*\*\*\s*3", r"lap((\1)**3)", expr)
336
+
337
+ # Fix nested laplacian call pattern: lap(lap)(u) -> lap(lap(u))
338
+ expr = re.sub(r"lap\(\s*lap\s*\)\s*\(([^\)]*)\)", r"lap(lap(\1))", expr)
339
+
340
+ # Implicit multiplication: 2mu -> 2*mu, )u -> )*u, u( -> u*(
341
+ # Avoid breaking scientific notation like 1e-9.
342
+ expr = re.sub(r"(\d)\s*(?![eE][+-]?\d)([a-zA-Z_])", r"\1*\2", expr)
343
+ expr = re.sub(r"(\d)\s*(\()", r"\1*\2", expr)
344
+ expr = re.sub(r"(\))\s*([a-zA-Z_])", r"\1*\2", expr)
345
+ # Symbol followed by operator function: eta lap(u) -> eta*lap(u)
346
+ expr = re.sub(
347
+ r"([a-zA-Z_][a-zA-Z0-9_]*)\s*(lap|grad|div|dx|dz|dxx|dzz|pos|gradl1|gradmag)\s*\(",
348
+ r"\1*\2(",
349
+ expr,
350
+ )
351
+
352
+ # Fix accidental insertions like gradl1*(u) -> gradl1(u)
353
+ expr = re.sub(r"\b(gradl1|gradmag|lap|dx|dz|dxx|dzz|grad|div|pos)\s*\*\s*\(", r"\1(", expr)
354
+ # Avoid inserting * for known functions
355
+ def _fn_mul(match):
356
+ name = match.group(1)
357
+ if name in {
358
+ "sin", "cos", "tan",
359
+ "sinh", "cosh", "tanh",
360
+ "arcsin", "arccos", "arctan",
361
+ "exp", "sqrt",
362
+ "log", "log10", "log2",
363
+ "abs", "sech", "sign",
364
+ "lap", "dx", "dz", "dxx", "dzz", "grad", "div", "gradmag", "gradl1", "pos",
365
+ }:
366
+ return f"{name}("
367
+ return f"{name}*("
368
+
369
+ expr = re.sub(r"([a-zA-Z_][a-zA-Z0-9_]*)\(", _fn_mul, expr)
370
+
371
+ return expr
372
+
373
+
374
+ def normalize_lhs(expr: str) -> str:
375
+ if not expr:
376
+ return ""
377
+ for k, v in LHS_SYMBOL_MAP.items():
378
+ expr = expr.replace(k, v)
379
+ return expr