pydasa 0.4.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. pydasa/__init__.py +103 -0
  2. pydasa/_version.py +6 -0
  3. pydasa/analysis/__init__.py +0 -0
  4. pydasa/analysis/scenario.py +584 -0
  5. pydasa/analysis/simulation.py +1158 -0
  6. pydasa/context/__init__.py +0 -0
  7. pydasa/context/conversion.py +11 -0
  8. pydasa/context/system.py +17 -0
  9. pydasa/context/units.py +15 -0
  10. pydasa/core/__init__.py +15 -0
  11. pydasa/core/basic.py +287 -0
  12. pydasa/core/cfg/default.json +136 -0
  13. pydasa/core/constants.py +27 -0
  14. pydasa/core/io.py +102 -0
  15. pydasa/core/setup.py +269 -0
  16. pydasa/dimensional/__init__.py +0 -0
  17. pydasa/dimensional/buckingham.py +728 -0
  18. pydasa/dimensional/fundamental.py +146 -0
  19. pydasa/dimensional/model.py +1077 -0
  20. pydasa/dimensional/vaschy.py +633 -0
  21. pydasa/elements/__init__.py +19 -0
  22. pydasa/elements/parameter.py +218 -0
  23. pydasa/elements/specs/__init__.py +22 -0
  24. pydasa/elements/specs/conceptual.py +161 -0
  25. pydasa/elements/specs/numerical.py +469 -0
  26. pydasa/elements/specs/statistical.py +229 -0
  27. pydasa/elements/specs/symbolic.py +394 -0
  28. pydasa/serialization/__init__.py +27 -0
  29. pydasa/serialization/parser.py +133 -0
  30. pydasa/structs/__init__.py +0 -0
  31. pydasa/structs/lists/__init__.py +0 -0
  32. pydasa/structs/lists/arlt.py +578 -0
  33. pydasa/structs/lists/dllt.py +18 -0
  34. pydasa/structs/lists/ndlt.py +262 -0
  35. pydasa/structs/lists/sllt.py +746 -0
  36. pydasa/structs/tables/__init__.py +0 -0
  37. pydasa/structs/tables/htme.py +182 -0
  38. pydasa/structs/tables/scht.py +774 -0
  39. pydasa/structs/tools/__init__.py +0 -0
  40. pydasa/structs/tools/hashing.py +53 -0
  41. pydasa/structs/tools/math.py +149 -0
  42. pydasa/structs/tools/memory.py +54 -0
  43. pydasa/structs/types/__init__.py +0 -0
  44. pydasa/structs/types/functions.py +131 -0
  45. pydasa/structs/types/generics.py +54 -0
  46. pydasa/validations/__init__.py +0 -0
  47. pydasa/validations/decorators.py +510 -0
  48. pydasa/validations/error.py +100 -0
  49. pydasa/validations/patterns.py +32 -0
  50. pydasa/workflows/__init__.py +1 -0
  51. pydasa/workflows/influence.py +497 -0
  52. pydasa/workflows/phenomena.py +529 -0
  53. pydasa/workflows/practical.py +765 -0
  54. pydasa-0.4.7.dist-info/METADATA +320 -0
  55. pydasa-0.4.7.dist-info/RECORD +58 -0
  56. pydasa-0.4.7.dist-info/WHEEL +5 -0
  57. pydasa-0.4.7.dist-info/licenses/LICENSE +674 -0
  58. pydasa-0.4.7.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1077 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Module model.py
4
+ ============================================
5
+
6
+ Module for **Matrix** to perform Dimensional Analysis in *PyDASA*.
7
+
8
+ This module provides the Matrix class which implements matrix-based dimensional analysis following the Buckingham Pi theorem methodology.
9
+
10
+ Classes:
11
+ **Matrix**: Represents a dimensional matrix for performing dimensional analysis, including methods for matrix creation, solving, and coefficient generation.
12
+
13
+ *IMPORTANT:* Based on the theory from:
14
+ H. Gorter, *Dimensionalanalyse: Eine Theoririe der physikalischen Dimensionen mit Anwendungen*
15
+ """
16
+
17
+ # native python modules
18
+ # forward references + postpone eval type hints
19
+ from __future__ import annotations
20
+ from dataclasses import dataclass, field, fields
21
+ from typing import List, Dict, Optional, Any, Union
22
+ import re
23
+
24
+ # numeric and symbolic computation
25
+ import numpy as np
26
+ import sympy as sp
27
+ from numpy.typing import NDArray
28
+
29
+ # basic-core and dimensional analysis imports
30
+ from pydasa.core.basic import Foundation
31
+ from pydasa.elements.parameter import Variable
32
+ from pydasa.dimensional.vaschy import Schema
33
+ from pydasa.dimensional.buckingham import Coefficient
34
+ # import global variables
35
+ from pydasa.core.setup import Frameworks
36
+ from pydasa.core.setup import VarCardinality
37
+ from pydasa.core.setup import PYDASA_CFG
38
+
39
+
40
+ # Global constants
41
+ MAX_OUT: int = 1
42
+ """Maximum number of output variables allowed."""
43
+
44
+ MAX_IN: int = 10
45
+ """Maximum number of input variables allowed."""
46
+
47
+
48
+ @dataclass
49
+ class Matrix(Foundation):
50
+ """**Matrix** for Dimensional Analysis in *PyDASA*. Manages the dimensional matrix for performing analysis using the Buckingham Pi theorem methodology.
51
+
52
+ Args:
53
+ Foundation: Foundation class for validation of symbols and frameworks.
54
+
55
+ Attributes:
56
+ # Core Identification
57
+ name (str): User-friendly name of the dimensional model.
58
+ description (str): Brief summary of the dimensional model.
59
+ _idx (int): Index/precedence of the dimensional model.
60
+ _sym (str): Symbol representation (LaTeX or alphanumeric).
61
+ _alias (str): Python-compatible alias for use in code.
62
+ _fwk (str): Frameworks context (PHYSICAL, COMPUTATION, SOFTWARE, CUSTOM).
63
+
64
+ # FDU Schema Management
65
+ _schema (Schema): Dimensional framework managing FDUs.
66
+ working_fdus (List[str]): Active FDUs used in current analysis.
67
+
68
+ # Variable Management
69
+ _variables (Dict[str, Variable]): All variables in the model.
70
+ _relevant_lt (Dict[str, Variable]): Relevant variables for analysis.
71
+ _output (Optional[Variable]): Output variable for analysis.
72
+
73
+ # Variable Statistics
74
+ _n_var (int): Total number of variables.
75
+ _n_relevant (int): Number of relevant variables.
76
+ _n_in (int): Number of input variables.
77
+ _n_out (int): Number of output variables.
78
+ _n_ctrl (int): Number of control variables.
79
+
80
+ # Matrix Representations
81
+ _dim_mtx (Optional[NDArray[np.float64]]): Dimensional matrix (FDUs × Variables).
82
+ _dim_mtx_trans (Optional[NDArray[np.float64]]): Transposed dimensional matrix.
83
+ _sym_mtx (Optional[sp.Matrix]): SymPy matrix for symbolic computation.
84
+ _rref_mtx (Optional[NDArray[np.float64]]): Row-Reduced Echelon Form matrix.
85
+
86
+ # Analysis Results
87
+ _pivot_cols (List[int]): Pivot columns in the RREF matrix.
88
+ _coefficients (Dict[str, Coefficient]): Dimensionless Pi coefficients.
89
+ """
90
+
91
+ # ========================================================================
92
+ # Core Identification
93
+ # ========================================================================
94
+ # TODO may be I don't need it
95
+ # :attr: _name
96
+ _name: str = "Dimensional Matrix"
97
+ """User-friendly name of the dimensional matrix."""
98
+
99
+ # :attr: description
100
+ description: str = ""
101
+ """Brief summary of the dimensional matrix and its purpose."""
102
+
103
+ # :attr: _idx
104
+ _idx: int = -1
105
+ """Index/precedence of the dimensional model."""
106
+
107
+ # :attr: _sym
108
+ _sym: str = ""
109
+ """Symbol representation (LaTeX or alphanumeric)."""
110
+
111
+ # :attr: _alias
112
+ _alias: str = ""
113
+ """Python-compatible alias for use in code."""
114
+
115
+ # :attr: _fwk
116
+ _fwk: str = Frameworks.PHYSICAL.value
117
+ """Frameworks context (PHYSICAL, COMPUTATION, SOFTWARE, CUSTOM)."""
118
+
119
+ # ========================================================================
120
+ # Frameworks Management
121
+ # ========================================================================
122
+
123
+ # :attr: _schema
124
+ _schema: Schema = field(default_factory=Schema)
125
+ """Dimensional framework managing Fundamental Dimensional Units (FDUs)."""
126
+
127
+ # :attr: working_fdus
128
+ working_fdus: List[str] = field(default_factory=list)
129
+ """List of active FDU symbols used in the current analysis."""
130
+
131
+ # ========================================================================
132
+ # Variable Management
133
+ # ========================================================================
134
+
135
+ # :attr: _variables
136
+ _variables: Dict[str, Variable] = field(default_factory=dict)
137
+ """Dictionary of all parameters/variables in the model.
138
+
139
+ Keys are variable symbols (str), values are Variable instances.
140
+ """
141
+
142
+ # :attr: _relevant_lt
143
+ _relevant_lt: Dict[str, Variable] = field(default_factory=dict)
144
+ """Dictionary of relevant parameters/variables for dimensional analysis.
145
+
146
+ Filtered subset of _variables where Variable.relevant == True. Keys are variable symbols (str), values are Variable instances.
147
+
148
+ NOTE: called 'relevant list' by convention.
149
+ """
150
+
151
+ # :attr: _output
152
+ _output: Optional[Variable] = None
153
+ """The single output variable for the dimensional analysis.
154
+
155
+ Must be a variable with cat == "OUT".
156
+ """
157
+
158
+ # ========================================================================
159
+ # Variable Statistics
160
+ # ========================================================================
161
+
162
+ # :attr: _n_var
163
+ _n_var: int = 0
164
+ """Total number of variables in the model."""
165
+
166
+ # :attr: _n_relevant
167
+ _n_relevant: int = 0
168
+ """Number of variables marked as relevant for analysis."""
169
+
170
+ # :attr: _n_in
171
+ _n_in: int = 0
172
+ """Number of input variables (cat == "IN" and relevant == True)."""
173
+
174
+ # :attr: _n_out
175
+ _n_out: int = 0
176
+ """Number of output variables (cat == "OUT" and relevant == True)."""
177
+
178
+ # :attr: _n_ctrl
179
+ _n_ctrl: int = 0
180
+ """Number of control variables (cat == "CTRL" and relevant == True)."""
181
+
182
+ # ========================================================================
183
+ # Matrix Representations
184
+ # ========================================================================
185
+
186
+ # :attr: _dim_mtx
187
+ # _dim_mtx: Optional[np.ndarray] = field(default_factory=lambda: np.array([]))
188
+ _dim_mtx: Optional[NDArray[np.float64]] = None
189
+ """Dimensional matrix as NumPy array.
190
+
191
+ Shape: (n_fdus, n_relevant_vars)
192
+ Each column represents a variable's dimensional formula.
193
+ Each row represents an FDU's exponent across all variables.
194
+ """
195
+
196
+ # :attr: _dim_mtx_trans
197
+ _dim_mtx_trans: Optional[NDArray[np.float64]] = None
198
+ """Transposed dimensional matrix.
199
+
200
+ Shape: (n_relevant_vars, n_fdus)
201
+ Transpose of _dim_mtx for alternative operations.
202
+ """
203
+
204
+ # :attr: _sym_mtx
205
+ _sym_mtx: Optional[sp.Matrix] = field(default_factory=lambda: sp.Matrix([]))
206
+ """SymPy Matrix representation for symbolic computation.
207
+
208
+ Used for RREF calculation and nullspace computation.
209
+ Equivalent to _dim_mtx but in SymPy format.
210
+ """
211
+
212
+ # :attr: _rref_mtx
213
+ _rref_mtx: Optional[NDArray[np.float64]] = None
214
+ """Row-Reduced Echelon Form (RREF) of the dimensional matrix.
215
+
216
+ Result of Gaussian elimination on _sym_mtx.
217
+ Used to identify pivot columns and compute nullspace.
218
+ """
219
+
220
+ # :attr: _nullspace
221
+ _nullspace: List[Union[np.ndarray, sp.Matrix]] = field(default_factory=list)
222
+ """List of nullspace vectors of the dimensional matrix.
223
+
224
+ Can be list of arrays or list of sympy vectors"""
225
+
226
+ # ========================================================================
227
+ # Analysis Results
228
+ # ========================================================================
229
+
230
+ # :attr: _pivot_cols
231
+ _pivot_cols: List[int] = field(default_factory=list)
232
+ """Indices of pivot columns in the RREF matrix.
233
+
234
+ Identifies which variables are dependent (pivot) vs. independent (free).
235
+ """
236
+
237
+ # :attr: _coefficients
238
+ _coefficients: Dict[str, Coefficient] = field(default_factory=dict)
239
+ """Dictionary of dimensionless Pi coefficients.
240
+
241
+ Keys are coefficient symbols (e.g., "\\Pi_{0}"), values are Coefficient instances.
242
+ Generated from the nullspace of the dimensional matrix.
243
+ """
244
+
245
+ # ========================================================================
246
+ # Initialization
247
+ # ========================================================================
248
+
249
+ def __post_init__(self) -> None:
250
+ """*__post_init__()* Initialize the dimensional matrix.
251
+
252
+ Validates variables, sets up the framework, identifies relevant variables, and prepares for dimensional analysis.
253
+ """
254
+ # Initialize base class
255
+ super().__post_init__()
256
+
257
+ # # Update global configuration from framework
258
+ # if self._schema:
259
+ # self._schema.update_global_config()
260
+
261
+ # Process variables if provided
262
+ if self._variables:
263
+ self._prepare_analysis()
264
+
265
+ # Ensure proper types
266
+ if not isinstance(self._dim_mtx, np.ndarray):
267
+ self._dim_mtx = np.array([], dtype=float)
268
+
269
+ if not isinstance(self._sym_mtx, sp.Matrix):
270
+ self._sym_mtx = sp.Matrix([])
271
+
272
+ # ========================================================================
273
+ # Preparation Methods
274
+ # ========================================================================
275
+
276
+ def _prepare_analysis(self) -> None:
277
+ """*_prepare_analysis()* Prepare the model for dimensional analysis.
278
+
279
+ Sets up relevant variables, computes model statistics, identifies the output variable, and extracts working FDUs.
280
+
281
+ Raises:
282
+ ValueError: If variable configuration is invalid.
283
+ """
284
+ # Update variable statistics
285
+ self._update_variable_stats()
286
+
287
+ # Identify and sort relevant variables
288
+ self._relevant_lt = {
289
+ k: v for k, v in self._variables.items() if v.relevant
290
+ }
291
+ self._relevant_lt = self._sort_by_category(self._relevant_lt)
292
+
293
+ # Find the output variable
294
+ self._find_output_variable()
295
+
296
+ # Extract working FDUs from relevant variables
297
+ self.working_fdus = self._extract_fdus()
298
+
299
+ # Handle CUSTOM framework
300
+ if self._fwk == Frameworks.CUSTOM.value and self.working_fdus:
301
+ _fwk = self._schema
302
+ _w_fdus = self.working_fdus
303
+ if not all(fdu in _fwk.fdu_symbols for fdu in _w_fdus):
304
+ _msg = f"Invalid CUSTOM FDUs: {_w_fdus}. "
305
+ _msg += f"Must be subset of: {_fwk.fdu_symbols}."
306
+ raise ValueError(_msg)
307
+
308
+ def _update_variable_stats(self) -> None:
309
+ """*_update_variable_stats()* Update variable statistics.
310
+
311
+ Computes the number of variables, inputs, outputs, and control variables. Validates the model constraints.
312
+
313
+ Raises:
314
+ ValueError: If model has invalid variable counts.
315
+ """
316
+ _vars = self._variables.values()
317
+
318
+ # Count all variables
319
+ self._n_var = len(_vars)
320
+ self._n_relevant = len([v for v in _vars if v.relevant])
321
+
322
+ # Count by category (only relevant ones)
323
+ IN = VarCardinality.IN.value
324
+ OUT = VarCardinality.OUT.value
325
+ self._n_in = len([v for v in _vars if v.cat == IN and v.relevant])
326
+ self._n_out = len([v for v in _vars if v.cat == OUT and v.relevant])
327
+ self._n_ctrl = self._n_relevant - self._n_in - self._n_out
328
+
329
+ # Validate output count
330
+ if self._n_out == 0:
331
+ _msg = "No output variable defined. At least one output variable"
332
+ _msg += " (cat='OUT', relevant=True) is required."
333
+ raise ValueError(_msg)
334
+
335
+ if self._n_out > MAX_OUT:
336
+ _msg = f"Invalid number of outputs: {self._n_out}. "
337
+ _msg += f"Maximum allowed: {MAX_OUT}."
338
+ raise ValueError(_msg)
339
+
340
+ # Validate input count
341
+ if self._n_in == 0:
342
+ _msg = "No input variables defined. "
343
+ _msg += "At least one input variable is required."
344
+ raise ValueError(_msg)
345
+
346
+ max_inputs = len(self._schema.fdu_symbols)
347
+ if self._n_in > max_inputs:
348
+ _msg = f"Too many input variables: {self._n_in}. "
349
+ _msg += f"Maximum allowed: {max_inputs} (number of FDUs)."
350
+ raise ValueError(_msg)
351
+
352
+ def _sort_by_category(self,
353
+ vars_lt: Dict[str, Variable]) -> Dict[str, Variable]:
354
+ """*_sort_by_category()* Sorts variables by category.
355
+
356
+ Sorts variables in order: OUT → IN → CTRL. Updates variable indices to reflect sorted order.
357
+
358
+ Args:
359
+ vars_lt (Dict[str, Variable]): Dictionary of variables to sort.
360
+
361
+ Returns:
362
+ Dict[str, Variable]: Sorted dictionary of variables.
363
+ """
364
+ # Get category order from global config
365
+ # cat_order = list(PYDASA_CFG.parameter_cardinality)
366
+ cat_order = [c.value for c in PYDASA_CFG.parameter_cardinality]
367
+
368
+ # Sort by category precedence
369
+ sorted_items = sorted(vars_lt.items(),
370
+ key=lambda v: cat_order.index(v[1].cat))
371
+
372
+ # # FIXME IA weird lambda function, check later!!!
373
+ # sorted_items = sorted(vars_lt.items(),
374
+ # key=lambda v: cat_order.index(v[1].cat) if v[1].cat in cat_order else len(cat_order))
375
+
376
+ # Update indices and rebuild dictionary
377
+ sorted_dict = {}
378
+ for i, (k, v) in enumerate(sorted_items):
379
+ v._idx = i
380
+ sorted_dict[k] = v
381
+
382
+ return sorted_dict
383
+
384
+ def _find_output_variable(self) -> None:
385
+ """*_find_output_variable()* Identifies the output variable.
386
+
387
+ Finds the first variable with cat == "OUT" in the relevant list.
388
+ """
389
+ values = self._relevant_lt.values()
390
+ self._output = next((v for v in values if v.cat == "OUT"), None)
391
+
392
+ def _extract_fdus(self) -> List[str]:
393
+ """*_extract_fdus()* Extracts FDUs from relevant variables.
394
+
395
+ Scans all relevant variables' dimension strings to find which FDUs are actually used.
396
+
397
+ Returns:
398
+ List[str]: List of unique FDU symbols used, in precedence order.
399
+ """
400
+ # Collect all dimension strings
401
+ var_dims = [v.std_dims for v in self._relevant_lt.values()]
402
+
403
+ # Extract FDU symbols using regex
404
+ fdus = [
405
+ d for d in re.findall(self._schema.fdu_sym_regex, str(var_dims))
406
+ ]
407
+
408
+ # Remove duplicates while preserving order
409
+ unique_fdus = list({fdus[i] for i in range(len(fdus))})
410
+
411
+ return unique_fdus
412
+
413
+ # ========================================================================
414
+ # Matrix Operations
415
+ # ========================================================================
416
+
417
+ def create_matrix(self) -> None:
418
+ """*create_matrix()* Builds the dimensional matrix.
419
+
420
+ Creates the dimensional matrix by arranging variable dimensions as columns. Each row represents an FDU, each column a variable.
421
+
422
+ Raises:
423
+ ValueError: If no relevant variables exist.
424
+ ValueError: If variables have invalid or missing dimensional columns.
425
+ """
426
+ if not self._relevant_lt:
427
+ raise ValueError("No relevant variables to create matrix from.")
428
+
429
+ # Validate that all variables have dimensional columns
430
+ invalid_vars = []
431
+ for var in self._relevant_lt.values():
432
+ if not var._dim_col or len(var._dim_col) == 0:
433
+ invalid_vars.append(f"{var._sym} (dims='{var._dims}')")
434
+
435
+ if invalid_vars:
436
+ _msg = "Variables with missing or empty dimensional columns detected:\n"
437
+ _msg += "\n".join(f" - {v}" for v in invalid_vars)
438
+ _msg += "\n\nEnsure all relevant variables have valid '_dims' "
439
+ _msg += "properties (e.g., 'L', 'M*L*T^-2', etc.) and not 'n.a.'"
440
+ raise ValueError(_msg)
441
+
442
+ # Get dimensions
443
+ n_fdu = len(self._schema.fdu_symbols)
444
+ n_var = len(self._relevant_lt)
445
+
446
+ # Initialize empty matrix
447
+ self._dim_mtx = np.zeros((n_fdu, n_var), dtype=float)
448
+
449
+ # Fill matrix with dimension columns
450
+ for var in self._relevant_lt.values():
451
+ # Ensure dimension column has correct length
452
+ dim_col = var._dim_col
453
+
454
+ # Pad or truncate to match FDU count
455
+ if len(dim_col) < n_fdu:
456
+ dim_col = dim_col + [0] * (n_fdu - len(dim_col))
457
+ elif len(dim_col) > n_fdu:
458
+ dim_col = dim_col[:n_fdu]
459
+
460
+ # Set column in matrix
461
+ self._dim_mtx[:, var._idx] = dim_col
462
+
463
+ # Create transposed version
464
+ self._dim_mtx_trans = self._dim_mtx.T
465
+
466
+ def solve_matrix(self) -> None:
467
+ """*solve_matrix()* Solves the dimensional matrix.
468
+
469
+ Computes the Row-Reduced Echelon Form (RREF) of the matrix, identifies pivot columns, and generates dimensionless coefficients from the nullspace.
470
+
471
+ Raises:
472
+ ValueError: If matrix hasn't been created yet.
473
+ """
474
+ # Ensure matrix exists
475
+ if not isinstance(self._dim_mtx, np.ndarray) or self._dim_mtx.size == 0:
476
+ self.create_matrix()
477
+
478
+ # Convert to SymPy for symbolic computation
479
+ self._sym_mtx = sp.Matrix(self._dim_mtx)
480
+
481
+ # Compute RREF and pivot columns
482
+ rref_result, pivot_cols = self._sym_mtx.rref()
483
+
484
+ # Store results
485
+ self._rref_mtx = np.array(rref_result).astype(float)
486
+ self._pivot_cols = list(pivot_cols)
487
+
488
+ # Generate coefficients from nullspace
489
+ self._generate_coefficients()
490
+
491
+ def _generate_coefficients(self) -> None:
492
+ """*_generate_coefficients()* Generates dimensionless coefficients.
493
+
494
+ Creates Coefficient objects from each nullspace vector of the dimensional matrix. Each nullspace vector represents a dimensionless group (Pi coefficient).
495
+ """
496
+ if self._sym_mtx is None:
497
+ _msg = "Symbolic matrix not created. Call solve_matrix() first."
498
+ raise ValueError(_msg)
499
+
500
+ # Compute nullspace vectors
501
+ self._nullspace = self._sym_mtx.nullspace()
502
+
503
+ # Clear existing coefficients
504
+ self._coefficients.clear()
505
+
506
+ # Get variable symbols in order
507
+ var_syms = [var for var in self._relevant_lt.keys()]
508
+
509
+ # Create coefficient for each nullspace vector
510
+ for i, vector in enumerate(self._nullspace):
511
+ # Convert to numpy array
512
+ vector_np = np.array(vector).flatten().astype(float)
513
+
514
+ # Create variable dictionary for this coefficient
515
+ # TODO is this reduntant? check later!!!
516
+ coef_vars = {}
517
+ for j, val in enumerate(vector_np):
518
+ if j < len(var_syms) and isinstance(val, (int, float)):
519
+ coef_vars[var_syms[j]] = self._relevant_lt[var_syms[j]]
520
+
521
+ # Create Pi coefficient
522
+ pi_sym = f"\\Pi_{{{i}}}"
523
+ coef = Coefficient(
524
+ _idx=i,
525
+ _sym=pi_sym,
526
+ _alias=f"Pi_{i}",
527
+ _fwk=self._fwk,
528
+ _cat="COMPUTED",
529
+ _variables=self._relevant_lt,
530
+ _dim_col=vector_np.tolist(),
531
+ _pivot_lt=self._pivot_cols,
532
+ _name=f"Pi-{i}",
533
+ description=f"Dimensionless coefficient {i} from nullspace"
534
+ )
535
+
536
+ self._coefficients[pi_sym] = coef
537
+
538
+ # ========================================================================
539
+ # Coefficient Derivation
540
+ # ========================================================================
541
+
542
+ def derive_coefficient(self,
543
+ expr: str,
544
+ name: str = "",
545
+ description: str = "",
546
+ idx: int = -1) -> Coefficient:
547
+ """*derive_coefficient()* Creates a new coefficient derived from existing ones.
548
+
549
+ Combines existing dimensionless coefficients using a mathematical expression. The new coefficient is marked as "DERIVED".
550
+
551
+ Args:
552
+ expr (str): Mathematical expression using existing coefficients.
553
+ Examples: "\\Pi_{0} * \\Pi_{1}", "\\Pi_{0} / \\Pi_{2}^2"
554
+ name (str, optional): Name for the derived coefficient. Defaults to "Derived-Pi-{idx}".
555
+ description (str, optional): Description of the coefficient. Defaults to "Derived from: {expr}".
556
+ idx (int, optional): Index for the coefficient. If -1, the next available index is used.
557
+
558
+ Returns:
559
+ Coefficient: The newly created derived coefficient.
560
+
561
+ Raises:
562
+ ValueError: If expression is invalid or references non-existent coefficients.
563
+ ValueError: If expression creates dimensionally inconsistent result.
564
+
565
+ Example:
566
+ >>> # Create Reynolds number as ratio of two Pi groups
567
+ >>> Re = model.derive_coefficient(
568
+ ... expr="\\Pi_{0} / \\Pi_{1}",
569
+ ... name="Reynolds Number",
570
+ ... description="Ratio of inertial to viscous forces"
571
+ ... )
572
+ """
573
+ # Validate coefficients exist
574
+ if not self._coefficients:
575
+ _msg = "Cannot derive coefficients. No base coefficients exist yet."
576
+ raise ValueError(_msg)
577
+
578
+ # Extract coefficient symbols from expression
579
+ coef_pattern = r"\\Pi_\{\d+\}"
580
+ coef_symbols = re.findall(coef_pattern, expr)
581
+
582
+ if not coef_symbols:
583
+ _msg = f"Expression '{expr}' does not contain any valid "
584
+ _msg += "coefficient references (format: \\Pi_{{n}})."
585
+ raise ValueError(_msg)
586
+
587
+ # Validate all referenced coefficients exist
588
+ for sym in coef_symbols:
589
+ if sym not in self._coefficients:
590
+ _msg = f"Referenced coefficient {sym} does not exist."
591
+ raise ValueError(_msg)
592
+
593
+ # Determine next available index
594
+ if idx == -1:
595
+ existing_indices = [c._idx for c in self._coefficients.values()]
596
+ idx = max(existing_indices) + 1 if existing_indices else 0
597
+
598
+ # Generate defaults
599
+ if name == "":
600
+ name = f"Derived-Pi-{idx}"
601
+
602
+ if description == "":
603
+ description = f"Derived from: {expr}"
604
+
605
+ # Get base coefficient for structure
606
+ base_coef = self._coefficients[coef_symbols[0]]
607
+ new_variables = base_coef._variables.copy()
608
+ new_dim_col = list(base_coef._dim_col)
609
+
610
+ # Validate all coefficients use same variables
611
+ for sym in coef_symbols[1:]:
612
+ coef = self._coefficients[sym]
613
+ if set(coef._variables.keys()) != set(new_variables.keys()):
614
+ _msg = f"Coefficient {sym} uses different variables. "
615
+ _msg += "Cannot derive new coefficient."
616
+ raise ValueError(_msg)
617
+
618
+ # Parse expression for operations
619
+ parts = re.split(r"(\*|/|\^)", expr)
620
+ current_op = "*"
621
+
622
+ for part in parts:
623
+ part = part.strip()
624
+
625
+ if part in ("*", "/", "^"):
626
+ current_op = part
627
+ elif re.match(coef_pattern, part):
628
+ coef = self._coefficients[part]
629
+
630
+ if current_op == "*":
631
+ # Multiplication: add exponents
632
+ new_dim_col = [a + b for a, b in zip(new_dim_col, coef._dim_col)]
633
+ elif current_op == "/":
634
+ # Division: subtract exponents
635
+ new_dim_col = [a - b for a, b in zip(new_dim_col, coef._dim_col)]
636
+
637
+ # Create derived coefficient
638
+ new_sym = f"\\Pi_{{{idx}}}"
639
+ derived_coef = Coefficient(
640
+ _idx=idx,
641
+ _sym=new_sym,
642
+ _alias=f"Pi_{idx}",
643
+ _fwk=self._fwk,
644
+ _cat="DERIVED",
645
+ _name=name,
646
+ description=description,
647
+ _variables=new_variables,
648
+ _dim_col=new_dim_col,
649
+ _pivot_lt=self._pivot_cols
650
+ )
651
+
652
+ # Add to coefficients dictionary
653
+ self._coefficients[new_sym] = derived_coef
654
+
655
+ return derived_coef
656
+
657
+ # ========================================================================
658
+ # High-Level Methods
659
+ # ========================================================================
660
+
661
+ def analyze(self) -> None:
662
+ """*analyze()* Performs complete dimensional analysis
663
+
664
+ Executes the full analysis workflow:
665
+ 1. Prepare analysis (validate variables, identify output)
666
+ 2. Create dimensional matrix
667
+ 3. Solve matrix (compute RREF and nullspace)
668
+ 4. Generate dimensionless coefficients
669
+
670
+ This is the main entry point for dimensional analysis.
671
+ """
672
+ self._prepare_analysis()
673
+ self.create_matrix()
674
+ self.solve_matrix()
675
+
676
+ def clear(self) -> None:
677
+ """*clear()* Resets all dimensional matrix and analysis data.
678
+
679
+ Clears all computed results while preserving the framework.
680
+
681
+ NOTE: Numpy arrays don't have .clear() method, so we reassign. Lists have .clear() method.
682
+ """
683
+ # Clear variables
684
+ # Reassign numpy arrays (no .clear() method)
685
+ self._dim_mtx = np.array([], dtype=float)
686
+
687
+ # Reassign sympy matrices
688
+ self._sym_mtx = sp.Matrix([])
689
+
690
+ # Clear lists (these have .clear() method)
691
+ self._variables.clear()
692
+ self._relevant_lt.clear()
693
+ self._output = None
694
+
695
+ # Reset scalars
696
+ self._idx = -1
697
+ self._sym = ""
698
+ self._alias = ""
699
+ self.name = ""
700
+ self.description = ""
701
+
702
+ # Reset statistics
703
+ self._n_var = 0
704
+ self._n_relevant = 0
705
+ self._n_in = 0
706
+ self._n_out = 0
707
+ self._n_ctrl = 0
708
+
709
+ # Clear matrices
710
+ self._dim_mtx = None
711
+ self._dim_mtx_trans = None
712
+ self._sym_mtx = None
713
+ self._rref_mtx = None
714
+ self._pivot_cols.clear()
715
+
716
+ # Clear results
717
+ self._coefficients.clear()
718
+ self.working_fdus.clear()
719
+
720
+ # ========================================================================
721
+ # Properties
722
+ # ========================================================================
723
+
724
+ @property
725
+ def variables(self) -> Dict[str, Variable]:
726
+ """*variables* Get the dictionary of variables.
727
+
728
+ Returns:
729
+ Dict[str, Variable]: Copy of variables dictionary.
730
+ """
731
+ return self._variables
732
+
733
+ @variables.setter
734
+ def variables(self, val: Dict[str, Variable]) -> None:
735
+ """*variables* Set the dictionary of variables.
736
+
737
+ Args:
738
+ val (Dict[str, Variable]): Dictionary of variables.
739
+
740
+ Raises:
741
+ ValueError: If input is not a non-empty dictionary.
742
+ ValueError: If any value is not a Variable instance.
743
+ """
744
+ if not val or not isinstance(val, dict):
745
+ _msg = "Variables must be in non-empty dictionary. "
746
+ _msg += f"Provided input: {type(val).__name__}"
747
+ raise ValueError(_msg)
748
+
749
+ if not all(isinstance(v, Variable) for v in val.values()):
750
+ _msg = "All elements must be Variable instances"
751
+ _msg += f", got: {[type(v).__name__ for v in val.values()]}"
752
+ raise ValueError(_msg)
753
+
754
+ self._variables = val
755
+
756
+ # Update relevant variables and prepare for analysis
757
+ self._prepare_analysis()
758
+
759
+ @property
760
+ def framework(self) -> Schema:
761
+ """*framework* Get the dimensional framework.
762
+
763
+ Returns:
764
+ Schema: Current dimensional framework.
765
+ """
766
+ return self._schema
767
+
768
+ @framework.setter
769
+ def framework(self, val: Schema) -> None:
770
+ """Set the dimensional framework.
771
+
772
+ Args:
773
+ val (Schema): New dimensional framework.
774
+
775
+ Raises:
776
+ ValueError: If input is not a Schema instance.
777
+ """
778
+ if not isinstance(val, Schema):
779
+ _msg = "Schema must be a Schema instance. "
780
+ _msg += f"Got: {type(val).__name__}"
781
+ raise ValueError(_msg)
782
+
783
+ # Update framework and global configuration
784
+ self._schema = val
785
+
786
+ # Prepare for analysis with new framework
787
+ if self._variables:
788
+ self._prepare_analysis()
789
+
790
+ @property
791
+ def relevant_lt(self) -> Dict[str, Variable]:
792
+ """*relevant_lt* Get dictionary of relevant variables.
793
+
794
+ Returns:
795
+ Dict[str, Variable]: Dictionary of relevant variables.
796
+ """
797
+ return self._relevant_lt
798
+
799
+ @relevant_lt.setter
800
+ def relevant_lt(self, val: Dict[str, Variable]) -> None:
801
+ """*relevant_lt* Set the dictionary of relevant variables, otherwise known as 'relevance list'.
802
+
803
+ Args:
804
+ val (Dict[str, Variable]): Dictionary of relevant variables.
805
+
806
+ Raises:
807
+ ValueError: If the relevant variable dictionary is invalid.
808
+ ValueError: If any of the dictionary variables are invalid.
809
+ """
810
+ if not val or not isinstance(val, dict):
811
+ raise ValueError("Variables must be in non-empty dictionary.")
812
+
813
+ if not all(isinstance(v, Variable) for v in val.values()):
814
+ raise ValueError("All elements must be Variable instances")
815
+
816
+ # Set relevant variables and prepare for analysis
817
+ # self._relevant_lt = [p for p in val if p.relevant]
818
+ self._relevant_lt = {
819
+ k: v for k, v in self._variables.items() if v.relevant
820
+ }
821
+
822
+ # Update relevant variables and prepare for analysis
823
+ self._prepare_analysis()
824
+
825
+ @property
826
+ def coefficients(self) -> Dict[str, Coefficient]:
827
+ """*coefficients* Get dictionary of dimensionless coefficients.
828
+
829
+ Returns:
830
+ Dict[str, Coefficient]: Dictionary of dimensionless coefficients.
831
+ """
832
+ return self._coefficients
833
+
834
+ @property
835
+ def output(self) -> Optional[Variable]:
836
+ """*output* Get the output variable.
837
+
838
+ Returns:
839
+ Optional[Variable]: The output variable, or None if not set.
840
+ """
841
+ return self._output
842
+
843
+ @property
844
+ def dim_mtx(self) -> Optional[NDArray[np.float64]]:
845
+ """*dim_mtx* Get the dimensional matrix.
846
+
847
+ Returns:
848
+ Optional[NDArray[np.float64]]: Dimensional matrix, or None.
849
+ """
850
+ return self._dim_mtx if self._dim_mtx is not None else None
851
+
852
+ @property
853
+ def rref_mtx(self) -> Optional[NDArray[np.float64]]:
854
+ """*rref_mtx* Get the RREF matrix.
855
+
856
+ Returns:
857
+ Optional[NDArray[np.float64]]: RREF matrix, or None.
858
+ """
859
+ return self._rref_mtx if self._rref_mtx is not None else None
860
+
861
+ @property
862
+ def pivot_cols(self) -> List[int]:
863
+ """*pivot_cols* Get pivot column indices.
864
+
865
+ Returns:
866
+ List[int]: Pivot column list.
867
+ """
868
+ return self._pivot_cols
869
+
870
+ # ========================================================================
871
+ # Serialization
872
+ # ========================================================================
873
+
874
+ def to_dict(self) -> Dict[str, Any]:
875
+ """*to_dict()* Convert model to dictionary representation.
876
+
877
+ Returns:
878
+ Dict[str, Any]: Dictionary representation of the model.
879
+ """
880
+ result = {}
881
+
882
+ # Get all dataclass fields
883
+ for f in fields(self):
884
+ attr_name = f.name
885
+ attr_value = getattr(self, attr_name)
886
+
887
+ # Skip None values for optional fields
888
+ if attr_value is None:
889
+ continue
890
+
891
+ # Convert based on type
892
+ converted_value = self._convert_value(attr_value)
893
+
894
+ # Remove leading underscore from private attributes
895
+ clean_name = attr_name[1:] if attr_name.startswith("_") else attr_name
896
+ result[clean_name] = converted_value
897
+
898
+ return result
899
+
900
+ def _convert_value(self, value: Any) -> Any:
901
+ """Convert a value to JSON-serializable format.
902
+
903
+ Args:
904
+ value (Any): Value to convert.
905
+
906
+ Returns:
907
+ Any: Converted value.
908
+ """
909
+ # Handle None
910
+ if value is None:
911
+ return None
912
+
913
+ # Handle numpy arrays
914
+ if isinstance(value, np.ndarray):
915
+ return value.tolist()
916
+
917
+ # Handle sympy matrices
918
+ if isinstance(value, sp.Matrix):
919
+ return [[float(val) for val in row] for row in value.tolist()]
920
+
921
+ # Handle objects with to_dict method
922
+ if isinstance(value, (Schema, Variable, Coefficient)):
923
+ return value.to_dict()
924
+
925
+ # Handle dictionaries
926
+ if isinstance(value, dict):
927
+ if not value: # Empty dict
928
+ return {}
929
+
930
+ # Check if all values have to_dict method
931
+ first_val = next(iter(value.values()))
932
+ if isinstance(first_val, (Variable, Coefficient)):
933
+ return {k: v.to_dict() for k, v in value.items()}
934
+
935
+ # Regular dict
936
+ return value
937
+
938
+ # Handle lists
939
+ if isinstance(value, list):
940
+ return [self._convert_value(item) for item in value]
941
+
942
+ # Default: return as-is
943
+ return value
944
+
945
+ # def to_dict(self) -> Dict[str, Any]:
946
+ # """*to_dict()* Convert model to dictionary representation.
947
+
948
+ # Returns:
949
+ # Dict[str, Any]: Dictionary representation of the model.
950
+ # """
951
+ # result = {}
952
+
953
+ # # Get all dataclass fields
954
+ # for f in fields(self):
955
+ # attr_name = f.name
956
+ # attr_value = getattr(self, attr_name)
957
+
958
+ # # Skip numpy arrays (convert to list for JSON compatibility)
959
+ # if isinstance(attr_value, np.ndarray):
960
+ # attr_value = attr_value.tolist()
961
+
962
+ # # Skip sympy matrices (convert to list)
963
+ # if isinstance(attr_value, sp.Matrix):
964
+ # attr_value = [[float(val) for val in row] for row in attr_value.tolist()]
965
+
966
+ # # Handle Schema framework (convert to dict)
967
+ # if isinstance(attr_value, Schema):
968
+ # attr_value = attr_value.to_dict()
969
+
970
+ # # Handle Variable dictionaries (convert each Variable)
971
+ # if isinstance(attr_value, dict) and all(isinstance(v, Variable) for v in attr_value.values()):
972
+ # attr_value = {k: v.to_dict() for k, v in attr_value.items()}
973
+
974
+ # # Handle Coefficient dictionaries (convert each Coefficient)
975
+ # if isinstance(attr_value, dict) and all(isinstance(c, Coefficient) for c in attr_value.values()):
976
+ # attr_value = {k: c.to_dict() for k, c in attr_value.items()}
977
+
978
+ # # Handle Variable instance (output variable)
979
+ # if isinstance(attr_value, Variable):
980
+ # attr_value = attr_value.to_dict()
981
+
982
+ # # Skip None values for optional fields
983
+ # if attr_value is None:
984
+ # continue
985
+
986
+ # # Remove leading underscore from private attributes
987
+ # if attr_name.startswith("_"):
988
+ # clean_name = attr_name[1:] # Remove first character
989
+ # else:
990
+ # clean_name = attr_name
991
+
992
+ # result[clean_name] = attr_value
993
+
994
+ # return result
995
+
996
+ @classmethod
997
+ def from_dict(cls, data: Dict[str, Any]) -> "Matrix":
998
+ """*from_dict()* Create model from dictionary representation.
999
+
1000
+ Args:
1001
+ data (Dict[str, Any]): Dictionary representation of the model.
1002
+
1003
+ Returns:
1004
+ Matrix: New Matrix instance.
1005
+ """
1006
+ # Get all valid field names from the dataclass
1007
+ field_names = {f.name for f in fields(cls)}
1008
+
1009
+ # Map keys without underscores to keys with underscores
1010
+ mapped_data = {}
1011
+
1012
+ # Define conversion rules
1013
+ object_converters = {
1014
+ "schema": Schema,
1015
+ "variables": Variable,
1016
+ "relevant_lt": Variable,
1017
+ "output": Variable,
1018
+ "coefficients": Coefficient
1019
+ }
1020
+
1021
+ array_fields = ["dim_mtx", "dim_mtx_trans", "rref_mtx"]
1022
+ matrix_fields = ["sym_mtx"]
1023
+
1024
+ # Computed fields that should not be passed to constructor
1025
+ computed_fields = {
1026
+ "n_var", "n_relevant", "n_in", "n_out", "n_ctrl",
1027
+ "relevant_lt", "output", "coefficients",
1028
+ "dim_mtx", "dim_mtx_trans", "sym_mtx", "rref_mtx", "pivot_cols"
1029
+ }
1030
+
1031
+ for key, value in data.items():
1032
+ # Skip computed fields
1033
+ clean_key = key[1:] if key.startswith("_") else key
1034
+ if clean_key in computed_fields:
1035
+ continue
1036
+
1037
+ # Map key to field name
1038
+ field_key = None
1039
+ if key in field_names:
1040
+ field_key = key
1041
+ elif f"_{key}" in field_names:
1042
+ field_key = f"_{key}"
1043
+ elif key.startswith("_") and key[1:] in field_names:
1044
+ field_key = key[1:]
1045
+
1046
+ if field_key is None:
1047
+ continue
1048
+
1049
+ # Convert objects
1050
+ if clean_key in object_converters:
1051
+ converter = object_converters[clean_key]
1052
+
1053
+ if isinstance(value, dict):
1054
+ if clean_key in ["variables", "relevant_lt", "coefficients"]:
1055
+ # Dictionary of objects
1056
+ mapped_data[field_key] = {
1057
+ k: converter.from_dict(v) if isinstance(v, dict) else v
1058
+ for k, v in value.items()
1059
+ }
1060
+ else:
1061
+ # Single object
1062
+ mapped_data[field_key] = converter.from_dict(value)
1063
+
1064
+ # Convert arrays
1065
+ elif clean_key in array_fields and isinstance(value, list):
1066
+ mapped_data[field_key] = np.array(value)
1067
+
1068
+ # Convert matrices
1069
+ elif clean_key in matrix_fields and isinstance(value, list):
1070
+ mapped_data[field_key] = sp.Matrix(value)
1071
+
1072
+ # Default: use as-is
1073
+ else:
1074
+ mapped_data[field_key] = value
1075
+
1076
+ # Create model instance
1077
+ return cls(**mapped_data)