openscvx 0.3.2.dev170__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of openscvx might be problematic. Click here for more details.

Files changed (79) hide show
  1. openscvx/__init__.py +123 -0
  2. openscvx/_version.py +34 -0
  3. openscvx/algorithms/__init__.py +92 -0
  4. openscvx/algorithms/autotuning.py +24 -0
  5. openscvx/algorithms/base.py +351 -0
  6. openscvx/algorithms/optimization_results.py +215 -0
  7. openscvx/algorithms/penalized_trust_region.py +384 -0
  8. openscvx/config.py +437 -0
  9. openscvx/discretization/__init__.py +47 -0
  10. openscvx/discretization/discretization.py +236 -0
  11. openscvx/expert/__init__.py +23 -0
  12. openscvx/expert/byof.py +326 -0
  13. openscvx/expert/lowering.py +419 -0
  14. openscvx/expert/validation.py +357 -0
  15. openscvx/integrators/__init__.py +48 -0
  16. openscvx/integrators/runge_kutta.py +281 -0
  17. openscvx/lowered/__init__.py +30 -0
  18. openscvx/lowered/cvxpy_constraints.py +23 -0
  19. openscvx/lowered/cvxpy_variables.py +124 -0
  20. openscvx/lowered/dynamics.py +34 -0
  21. openscvx/lowered/jax_constraints.py +133 -0
  22. openscvx/lowered/parameters.py +54 -0
  23. openscvx/lowered/problem.py +70 -0
  24. openscvx/lowered/unified.py +718 -0
  25. openscvx/plotting/__init__.py +63 -0
  26. openscvx/plotting/plotting.py +756 -0
  27. openscvx/plotting/scp_iteration.py +299 -0
  28. openscvx/plotting/viser/__init__.py +126 -0
  29. openscvx/plotting/viser/animated.py +605 -0
  30. openscvx/plotting/viser/plotly_integration.py +333 -0
  31. openscvx/plotting/viser/primitives.py +355 -0
  32. openscvx/plotting/viser/scp.py +459 -0
  33. openscvx/plotting/viser/server.py +112 -0
  34. openscvx/problem.py +734 -0
  35. openscvx/propagation/__init__.py +60 -0
  36. openscvx/propagation/post_processing.py +104 -0
  37. openscvx/propagation/propagation.py +248 -0
  38. openscvx/solvers/__init__.py +51 -0
  39. openscvx/solvers/cvxpy.py +226 -0
  40. openscvx/symbolic/__init__.py +9 -0
  41. openscvx/symbolic/augmentation.py +630 -0
  42. openscvx/symbolic/builder.py +492 -0
  43. openscvx/symbolic/constraint_set.py +92 -0
  44. openscvx/symbolic/expr/__init__.py +222 -0
  45. openscvx/symbolic/expr/arithmetic.py +517 -0
  46. openscvx/symbolic/expr/array.py +632 -0
  47. openscvx/symbolic/expr/constraint.py +796 -0
  48. openscvx/symbolic/expr/control.py +135 -0
  49. openscvx/symbolic/expr/expr.py +720 -0
  50. openscvx/symbolic/expr/lie/__init__.py +87 -0
  51. openscvx/symbolic/expr/lie/adjoint.py +357 -0
  52. openscvx/symbolic/expr/lie/se3.py +172 -0
  53. openscvx/symbolic/expr/lie/so3.py +138 -0
  54. openscvx/symbolic/expr/linalg.py +279 -0
  55. openscvx/symbolic/expr/math.py +699 -0
  56. openscvx/symbolic/expr/spatial.py +209 -0
  57. openscvx/symbolic/expr/state.py +607 -0
  58. openscvx/symbolic/expr/stl.py +136 -0
  59. openscvx/symbolic/expr/variable.py +321 -0
  60. openscvx/symbolic/hashing.py +112 -0
  61. openscvx/symbolic/lower.py +760 -0
  62. openscvx/symbolic/lowerers/__init__.py +106 -0
  63. openscvx/symbolic/lowerers/cvxpy.py +1302 -0
  64. openscvx/symbolic/lowerers/jax.py +1382 -0
  65. openscvx/symbolic/preprocessing.py +757 -0
  66. openscvx/symbolic/problem.py +110 -0
  67. openscvx/symbolic/time.py +116 -0
  68. openscvx/symbolic/unified.py +420 -0
  69. openscvx/utils/__init__.py +20 -0
  70. openscvx/utils/cache.py +131 -0
  71. openscvx/utils/caching.py +210 -0
  72. openscvx/utils/printing.py +301 -0
  73. openscvx/utils/profiling.py +37 -0
  74. openscvx/utils/utils.py +100 -0
  75. openscvx-0.3.2.dev170.dist-info/METADATA +350 -0
  76. openscvx-0.3.2.dev170.dist-info/RECORD +79 -0
  77. openscvx-0.3.2.dev170.dist-info/WHEEL +5 -0
  78. openscvx-0.3.2.dev170.dist-info/licenses/LICENSE +201 -0
  79. openscvx-0.3.2.dev170.dist-info/top_level.txt +1 -0
@@ -0,0 +1,136 @@
1
+ """Signal Temporal Logic (STL) operations for trajectory optimization.
2
+
3
+ This module provides symbolic expression nodes for Signal Temporal Logic (STL)
4
+ operations, enabling the specification of complex temporal and logical constraints
5
+ in optimization problems. STL is particularly useful for robotics and autonomous
6
+ systems where tasks involve temporal reasoning.
7
+ """
8
+
9
+ from typing import Tuple
10
+
11
+ import numpy as np
12
+
13
+ from .expr import Expr, to_expr
14
+
15
+
16
+ class Or(Expr):
17
+ """Logical OR operation for disjunctive constraints.
18
+
19
+ Represents a logical disjunction (OR) between multiple constraint expressions.
20
+ This is particularly useful in STL-based trajectory optimization for expressing
21
+ choices or alternatives in task specifications. The Or operation is typically
22
+ relaxed using smooth approximations (e.g., LogSumExp) during optimization.
23
+
24
+ The Or operation allows expressing constraints like:
25
+
26
+ - "Reach either goal A OR goal B"
27
+ - "Avoid obstacle 1 OR obstacle 2" (at least one must be satisfied)
28
+ - "Use path 1 OR path 2 OR path 3"
29
+
30
+ During optimization, the disjunction is typically approximated using:
31
+ Or(φ₁, φ₂, ..., φₙ) ≈ LSE(φ₁, φ₂, ..., φₙ) ≥ 0
32
+
33
+ where LSE is the LogSumExp (smooth maximum) function.
34
+
35
+ Attributes:
36
+ operands: List of expressions representing the disjunctive clauses
37
+
38
+ Example:
39
+ Use Or STL operator to enforce that robot must reach either of two goal regions:
40
+
41
+ import openscvx as ox
42
+ x = ox.State("x", shape=(2,))
43
+ goal_a = ox.Parameter("goal_a", shape=(2,), value=[1.0, 1.0])
44
+ goal_b = ox.Parameter("goal_b", shape=(2,), value=[-1.0, -1.0])
45
+ # Robot is within 0.5 units of either goal
46
+ reach_a = 0.25 - ox.Norm(x - goal_a)**2
47
+ reach_b = 0.25 - ox.Norm(x - goal_b)**2
48
+ reach_either = ox.Or(reach_a, reach_b)
49
+
50
+ Note:
51
+ The Or operation produces a scalar result even when operands are vector
52
+ expressions, as it represents a single logical proposition.
53
+
54
+ See Also:
55
+ LogSumExp: Common smooth approximation for OR operations
56
+ Max: Hard maximum (non-smooth alternative)
57
+ """
58
+
59
+ def __init__(self, *operands):
60
+ """Initialize a logical OR operation.
61
+
62
+ Args:
63
+ *operands: Two or more expressions to combine with logical OR.
64
+ Each operand typically represents a constraint or condition.
65
+
66
+ Raises:
67
+ ValueError: If fewer than two operands are provided
68
+ """
69
+ if len(operands) < 2:
70
+ raise ValueError("Or requires at least two operands")
71
+ self.operands = [to_expr(op) for op in operands]
72
+
73
+ def children(self):
74
+ return self.operands
75
+
76
+ def canonicalize(self) -> "Expr":
77
+ """Canonicalize by flattening nested Or expressions.
78
+
79
+ Flattens nested Or operations into a single flat Or with all clauses
80
+ at the same level. For example: Or(a, Or(b, c)) → Or(a, b, c).
81
+ Also canonicalizes all operands recursively.
82
+
83
+ Returns:
84
+ Expr: Canonical form of the Or expression. If only one operand
85
+ remains after canonicalization, returns that operand directly.
86
+ """
87
+ operands = []
88
+
89
+ for operand in self.operands:
90
+ canonicalized = operand.canonicalize()
91
+ if isinstance(canonicalized, Or):
92
+ # Flatten nested Or: Or(a, Or(b, c)) -> Or(a, b, c)
93
+ operands.extend(canonicalized.operands)
94
+ else:
95
+ operands.append(canonicalized)
96
+
97
+ # Return simplified Or expression
98
+ if len(operands) == 1:
99
+ return operands[0]
100
+ return Or(*operands)
101
+
102
+ def check_shape(self) -> Tuple[int, ...]:
103
+ """Validate operand shapes and return result shape.
104
+
105
+ Checks that all operands have compatible (broadcastable) shapes. The Or
106
+ operation supports broadcasting, allowing mixing of scalars and vectors.
107
+
108
+ Returns:
109
+ tuple: Empty tuple () indicating a scalar result, as Or represents
110
+ a single logical proposition
111
+
112
+ Raises:
113
+ ValueError: If fewer than two operands exist
114
+ ValueError: If operand shapes are not broadcastable
115
+ """
116
+ if len(self.operands) < 2:
117
+ raise ValueError("Or requires at least two operands")
118
+
119
+ # Validate all operands and get their shapes
120
+ operand_shapes = [operand.check_shape() for operand in self.operands]
121
+
122
+ # For logical operations, all operands should be broadcastable
123
+ # This allows mixing scalars with vectors for element-wise operations
124
+ try:
125
+ result_shape = operand_shapes[0]
126
+ for shape in operand_shapes[1:]:
127
+ result_shape = np.broadcast_shapes(result_shape, shape)
128
+ except ValueError as e:
129
+ raise ValueError(f"Or operands not broadcastable: {operand_shapes}") from e
130
+
131
+ # Or produces a scalar result (like constraints)
132
+ return ()
133
+
134
+ def __repr__(self):
135
+ operands_repr = " | ".join(repr(op) for op in self.operands)
136
+ return f"Or({operands_repr})"
@@ -0,0 +1,321 @@
1
+ import hashlib
2
+
3
+ import numpy as np
4
+
5
+ from .expr import Leaf
6
+
7
+
8
+ class Variable(Leaf):
9
+ """Base class for decision variables in optimization problems.
10
+
11
+ Variable represents decision variables (free parameters) in an optimization problem.
12
+ These are values that the optimizer can adjust to minimize the objective function
13
+ while satisfying constraints. Variables can have bounds (min/max) and initial guesses
14
+ to guide the optimization process.
15
+
16
+ Unlike Parameters (which are fixed values that can be changed between solves),
17
+ Variables are optimized by the solver. In trajectory optimization, Variables typically
18
+ represent discretized state or control trajectories.
19
+
20
+ Note:
21
+ Variable is typically not instantiated directly. Instead, use the specialized
22
+ subclasses State (for state variables with boundary conditions) or Control
23
+ (for control inputs). These provide additional functionality specific to
24
+ trajectory optimization.
25
+
26
+ Attributes:
27
+ name (str): Name identifier for the variable
28
+ _shape (tuple[int, ...]): Shape of the variable as a tuple (typically 1D)
29
+ _slice (slice | None): Internal slice information for variable indexing
30
+ _min (np.ndarray | None): Minimum bounds for each element of the variable
31
+ _max (np.ndarray | None): Maximum bounds for each element of the variable
32
+ _guess (np.ndarray | None): Initial guess for the variable trajectory (n_points, n_vars)
33
+
34
+ Example:
35
+ # Typically, use State or Control instead of Variable directly:
36
+ pos = openscvx.State("pos", shape=(3,))
37
+ u = openscvx.Control("u", shape=(2,))
38
+ """
39
+
40
+ def __init__(self, name, shape):
41
+ """Initialize a Variable object.
42
+
43
+ Args:
44
+ name: Name identifier for the variable
45
+ shape: Shape of the variable as a tuple (typically 1D like (3,) for 3D vector)
46
+ """
47
+ super().__init__(name, shape)
48
+ self._slice = None
49
+ self._min = None
50
+ self._max = None
51
+ self._guess = None
52
+
53
+ def __repr__(self):
54
+ return f"Var({self.name!r})"
55
+
56
+ def _hash_into(self, hasher: "hashlib._Hash") -> None:
57
+ """Hash Variable using its slice (canonical position, name-invariant).
58
+
59
+ Instead of hashing the variable name, we hash the _slice attribute
60
+ which represents the variable's canonical position in the unified
61
+ state/control vector. This ensures that two problems with the same
62
+ structure but different variable names produce the same hash.
63
+
64
+ Args:
65
+ hasher: A hashlib hash object to update
66
+ """
67
+ hasher.update(self.__class__.__name__.encode())
68
+ hasher.update(str(self._shape).encode())
69
+ # Hash the slice (canonical position) - this is name-invariant
70
+ if self._slice is not None:
71
+ hasher.update(f"slice:{self._slice.start}:{self._slice.stop}".encode())
72
+ else:
73
+ raise RuntimeError(
74
+ f"Cannot hash Variable '{self.name}' without _slice attribute. "
75
+ "Hashing should only be called on preprocessed problems where "
76
+ "all Variables have been assigned canonical slice positions."
77
+ )
78
+
79
+ @property
80
+ def min(self):
81
+ """Get the minimum bounds (lower bounds) for the variable.
82
+
83
+ Returns:
84
+ Array of minimum values for each element of the variable, or None if unbounded.
85
+
86
+ Example:
87
+ pos = Variable("pos", shape=(3,))
88
+ pos.min = [-10, -10, 0]
89
+ print(pos.min) # [-10., -10., 0.]
90
+ """
91
+ return self._min
92
+
93
+ @min.setter
94
+ def min(self, arr):
95
+ """Set the minimum bounds (lower bounds) for the variable.
96
+
97
+ The bounds are applied element-wise to each component of the variable.
98
+ Scalars will be broadcast to match the variable shape.
99
+
100
+ Args:
101
+ arr: Array of minimum values, must be broadcastable to shape (n,)
102
+ where n is the variable dimension
103
+
104
+ Raises:
105
+ ValueError: If the shape of arr doesn't match the variable shape
106
+
107
+ Example:
108
+ pos = Variable("pos", shape=(3,))
109
+ pos.min = -10 # Broadcasts to [-10, -10, -10]
110
+ pos.min = [-5, -10, 0] # Element-wise bounds
111
+ """
112
+ arr = np.asarray(arr, dtype=float)
113
+ if arr.ndim != 1 or arr.shape[0] != self.shape[0]:
114
+ raise ValueError(
115
+ f"{self.__class__.__name__} min must be 1D with shape ({self.shape[0]},), got"
116
+ f" {arr.shape}"
117
+ )
118
+ self._min = arr
119
+
120
+ @property
121
+ def max(self):
122
+ """Get the maximum bounds (upper bounds) for the variable.
123
+
124
+ Returns:
125
+ Array of maximum values for each element of the variable, or None if unbounded.
126
+
127
+ Example:
128
+ vel = Variable("vel", shape=(3,))
129
+ vel.max = [10, 10, 5]
130
+ print(vel.max) # [10., 10., 5.]
131
+ """
132
+ return self._max
133
+
134
+ @max.setter
135
+ def max(self, arr):
136
+ """Set the maximum bounds (upper bounds) for the variable.
137
+
138
+ The bounds are applied element-wise to each component of the variable.
139
+ Scalars will be broadcast to match the variable shape.
140
+
141
+ Args:
142
+ arr: Array of maximum values, must be broadcastable to shape (n,)
143
+ where n is the variable dimension
144
+
145
+ Raises:
146
+ ValueError: If the shape of arr doesn't match the variable shape
147
+
148
+ Example:
149
+ vel = Variable("vel", shape=(3,))
150
+ vel.max = 10 # Broadcasts to [10, 10, 10]
151
+ vel.max = [15, 10, 5] # Element-wise bounds
152
+ """
153
+ arr = np.asarray(arr, dtype=float)
154
+ if arr.ndim != 1 or arr.shape[0] != self.shape[0]:
155
+ raise ValueError(
156
+ f"{self.__class__.__name__} max must be 1D with shape ({self.shape[0]},), got"
157
+ f" {arr.shape}"
158
+ )
159
+ self._max = arr
160
+
161
+ @property
162
+ def slice(self):
163
+ """Get the slice indexing this variable in the unified state/control vector.
164
+
165
+ After preprocessing, each variable is assigned a canonical position in the
166
+ unified optimization vector. This property returns the slice object that
167
+ extracts this variable's values from the unified vector.
168
+
169
+ This is particularly useful for expert users working with byof (bring-your-own
170
+ functions) who need to manually index into the unified x and u vectors.
171
+
172
+ Returns:
173
+ slice: Slice object for indexing into unified vector, or None if the
174
+ variable hasn't been preprocessed yet.
175
+
176
+ Example:
177
+ velocity = ox.State("velocity", shape=(3,))
178
+ # ... after Problem construction ...
179
+ print(velocity.slice) # slice(2, 5) (for example)
180
+
181
+ # Use in byof functions
182
+ def my_constraint(x, u, node, params):
183
+ vel = x[velocity.slice] # Extract velocity from unified state
184
+ return jnp.sum(vel**2) - 100 # |v|^2 <= 100
185
+ """
186
+ return self._slice
187
+
188
+ @property
189
+ def guess(self):
190
+ """Get the initial guess for the variable trajectory.
191
+
192
+ The guess provides a starting point for the optimizer. A good initial guess
193
+ can significantly improve convergence speed and help avoid local minima.
194
+
195
+ Returns:
196
+ 2D array of shape (n_points, n_vars) representing the variable trajectory
197
+ over time, or None if no guess is provided.
198
+
199
+ Example:
200
+ x = Variable("x", shape=(2,))
201
+ # Linear interpolation from [0,0] to [10,10] over 50 points
202
+ x.guess = np.linspace([0, 0], [10, 10], 50)
203
+ print(x.guess.shape) # (50, 2)
204
+ """
205
+ return self._guess
206
+
207
+ @guess.setter
208
+ def guess(self, arr):
209
+ """Set the initial guess for the variable trajectory.
210
+
211
+ The guess should be a 2D array where each row represents the variable value
212
+ at a particular time point or trajectory node.
213
+
214
+ Args:
215
+ arr: 2D array of shape (n_points, n_vars) where n_vars matches the
216
+ variable dimension. Can be fewer points than the final trajectory -
217
+ the solver will interpolate as needed.
218
+
219
+ Raises:
220
+ ValueError: If the array is not 2D or if the second dimension doesn't
221
+ match the variable dimension
222
+
223
+ Example:
224
+ pos = Variable("pos", shape=(3,))
225
+ # Create a straight-line trajectory from origin to target
226
+ n_points = 50
227
+ pos.guess = np.linspace([0, 0, 0], [10, 5, 3], n_points)
228
+ """
229
+ arr = np.asarray(arr, dtype=float)
230
+ if arr.ndim != 2:
231
+ raise ValueError(
232
+ f"Guess must be a 2D array of shape (n_guess_points, {self.shape[0]}), got shape"
233
+ f" {arr.shape}"
234
+ )
235
+ if arr.shape[1] != self.shape[0]:
236
+ raise ValueError(
237
+ f"Guess must have second dimension equal to variable dimension {self.shape[0]}, got"
238
+ f" {arr.shape[1]}"
239
+ )
240
+ self._guess = arr
241
+
242
+ def append(self, other=None, *, min=-np.inf, max=np.inf, guess=0.0):
243
+ """Append a new dimension to this variable or merge with another variable.
244
+
245
+ This method extends the variable's dimension by either:
246
+ 1. Appending another Variable object (concatenating their dimensions)
247
+ 2. Adding a single new scalar dimension with specified bounds and guess
248
+
249
+ The bounds and guesses of both variables are concatenated appropriately.
250
+
251
+ Args:
252
+ other: Another Variable object to append. If None, adds a single scalar
253
+ dimension with the specified min/max/guess values.
254
+ min: Minimum bound for the new dimension (only used if other is None).
255
+ Defaults to -np.inf (unbounded below).
256
+ max: Maximum bound for the new dimension (only used if other is None).
257
+ Defaults to np.inf (unbounded above).
258
+ guess: Initial guess value for the new dimension (only used if other is None).
259
+ Defaults to 0.0.
260
+
261
+ Example:
262
+ Create a 2D variable and extend it to 3D:
263
+
264
+ pos_xy = Variable("pos", shape=(2,))
265
+ pos_xy.min = [-10, -10]
266
+ pos_xy.max = [10, 10]
267
+ pos_xy.append(min=0, max=100) # Add z dimension
268
+ print(pos_xy.shape) # (3,)
269
+ print(pos_xy.min) # [-10., -10., 0.]
270
+ print(pos_xy.max) # [10., 10., 100.]
271
+
272
+ Merge two variables:
273
+
274
+ pos = Variable("pos", shape=(3,))
275
+ vel = Variable("vel", shape=(3,))
276
+ pos.append(vel) # Now pos has shape (6,)
277
+ """
278
+
279
+ def process_array(val, is_guess=False):
280
+ """Process input array to ensure correct shape and type.
281
+
282
+ Args:
283
+ val: Input value to process
284
+ is_guess: Whether the value is a guess array
285
+
286
+ Returns:
287
+ Processed array with correct shape and type
288
+ """
289
+ arr = np.asarray(val, dtype=float)
290
+ if is_guess:
291
+ return np.atleast_2d(arr)
292
+ return np.atleast_1d(arr)
293
+
294
+ if isinstance(other, Variable):
295
+ self._shape = (self.shape[0] + other.shape[0],)
296
+
297
+ if self._min is not None and other._min is not None:
298
+ self._min = np.concatenate([self._min, process_array(other._min)], axis=0)
299
+
300
+ if self._max is not None and other._max is not None:
301
+ self._max = np.concatenate([self._max, process_array(other._max)], axis=0)
302
+
303
+ if self._guess is not None and other._guess is not None:
304
+ self._guess = np.concatenate(
305
+ [self._guess, process_array(other._guess, is_guess=True)], axis=1
306
+ )
307
+
308
+ else:
309
+ self._shape = (self.shape[0] + 1,)
310
+
311
+ if self._min is not None:
312
+ self._min = np.concatenate([self._min, process_array(min)], axis=0)
313
+
314
+ if self._max is not None:
315
+ self._max = np.concatenate([self._max, process_array(max)], axis=0)
316
+
317
+ if self._guess is not None:
318
+ guess_arr = process_array(guess, is_guess=True)
319
+ if guess_arr.shape[1] != 1:
320
+ guess_arr = guess_arr.T
321
+ self._guess = np.concatenate([self._guess, guess_arr], axis=1)
@@ -0,0 +1,112 @@
1
+ """Structural hashing for symbolic problems.
2
+
3
+ This module provides name-invariant hashing for symbolic optimization problems.
4
+ Two problems with the same mathematical structure will produce the same hash,
5
+ regardless of the variable names used.
6
+
7
+ This enables efficient caching: if a problem has already been compiled with
8
+ the same structure, the cached compiled artifacts can be reused.
9
+ """
10
+
11
+ import hashlib
12
+ from typing import TYPE_CHECKING
13
+
14
+ import numpy as np
15
+
16
+ from openscvx._version import __version__
17
+
18
+ if TYPE_CHECKING:
19
+ from openscvx.symbolic.problem import SymbolicProblem
20
+
21
+
22
+ def hash_symbolic_problem(problem: "SymbolicProblem") -> str:
23
+ """Compute a structural hash of a symbolic optimization problem.
24
+
25
+ This function computes a hash that depends only on the mathematical structure
26
+ of the problem, not on variable names or runtime values. Two problems with the same:
27
+ - Dynamics expressions (using _slice for canonical variable positions)
28
+ - Constraints
29
+ - State/control shapes and boundary condition types
30
+ - Parameter shapes
31
+ - Configuration (N, etc.)
32
+
33
+ will produce the same hash, regardless of what names are used for variables.
34
+
35
+ Notably, the following are NOT included in the hash (allowing solver reuse):
36
+ - Boundary condition values (initial/final state values)
37
+ - Bound values (min/max for states and controls)
38
+ - Parameter values (only shapes are hashed)
39
+
40
+ Args:
41
+ problem: A SymbolicProblem (should be preprocessed for best results,
42
+ so that _slice attributes are set on states/controls)
43
+
44
+ Returns:
45
+ A hex string representing the SHA-256 hash of the problem structure
46
+ """
47
+ hasher = hashlib.sha256()
48
+
49
+ # Include library version to invalidate cache on version changes
50
+ hasher.update(f"openscvx:{__version__}:".encode())
51
+
52
+ # Hash the dynamics
53
+ hasher.update(b"dynamics:")
54
+ problem.dynamics._hash_into(hasher)
55
+
56
+ # Hash propagation dynamics if present
57
+ if problem.dynamics_prop is not None:
58
+ hasher.update(b"dynamics_prop:")
59
+ problem.dynamics_prop._hash_into(hasher)
60
+
61
+ # Hash all constraints (order-invariant within each category)
62
+ # We compute individual hashes and sort them so that the same set of
63
+ # constraints produces the same hash regardless of definition order.
64
+ hasher.update(b"constraints:")
65
+ for constraint_list in [
66
+ problem.constraints.ctcs,
67
+ problem.constraints.nodal,
68
+ problem.constraints.nodal_convex,
69
+ problem.constraints.cross_node,
70
+ problem.constraints.cross_node_convex,
71
+ ]:
72
+ # Compute individual hashes for each constraint
73
+ constraint_hashes = sorted(c.structural_hash() for c in constraint_list)
74
+ # Hash the count and sorted hashes
75
+ hasher.update(len(constraint_hashes).to_bytes(4, "big"))
76
+ for h in constraint_hashes:
77
+ hasher.update(h)
78
+
79
+ # Hash all states and controls explicitly to capture metadata (boundary
80
+ # condition types) that may not appear in expressions. For example, a state
81
+ # with dynamics dx/dt = 1.0 doesn't appear in the expression tree, but its
82
+ # boundary condition types still affect the compiled problem structure.
83
+ hasher.update(b"states:")
84
+ for state in problem.states:
85
+ state._hash_into(hasher)
86
+
87
+ hasher.update(b"controls:")
88
+ for control in problem.controls:
89
+ control._hash_into(hasher)
90
+
91
+ # Hash parameter shapes (not values) from the problem's parameter dict.
92
+ # This allows the same compiled solver to be reused across parameter sweeps -
93
+ # only the structure matters for compilation, not the actual values.
94
+ hasher.update(b"parameters:")
95
+ hasher.update(str(len(problem.parameters)).encode()) # Hash count for structure
96
+ for name in sorted(problem.parameters.keys()):
97
+ value = problem.parameters[name]
98
+ # Only hash shape, not name - maintains name-invariance
99
+ if isinstance(value, np.ndarray):
100
+ hasher.update(str(value.shape).encode())
101
+ else:
102
+ hasher.update(b"scalar")
103
+
104
+ # Hash configuration
105
+ hasher.update(f"N:{problem.N}".encode())
106
+
107
+ # Hash node intervals for CTCS
108
+ hasher.update(b"node_intervals:")
109
+ for interval in problem.node_intervals:
110
+ hasher.update(f"{interval}".encode())
111
+
112
+ return hasher.hexdigest()