pydasa 0.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pydasa/__init__.py +103 -0
- pydasa/_version.py +6 -0
- pydasa/analysis/__init__.py +0 -0
- pydasa/analysis/scenario.py +584 -0
- pydasa/analysis/simulation.py +1158 -0
- pydasa/context/__init__.py +0 -0
- pydasa/context/conversion.py +11 -0
- pydasa/context/system.py +17 -0
- pydasa/context/units.py +15 -0
- pydasa/core/__init__.py +15 -0
- pydasa/core/basic.py +287 -0
- pydasa/core/cfg/default.json +136 -0
- pydasa/core/constants.py +27 -0
- pydasa/core/io.py +102 -0
- pydasa/core/setup.py +269 -0
- pydasa/dimensional/__init__.py +0 -0
- pydasa/dimensional/buckingham.py +728 -0
- pydasa/dimensional/fundamental.py +146 -0
- pydasa/dimensional/model.py +1077 -0
- pydasa/dimensional/vaschy.py +633 -0
- pydasa/elements/__init__.py +19 -0
- pydasa/elements/parameter.py +218 -0
- pydasa/elements/specs/__init__.py +22 -0
- pydasa/elements/specs/conceptual.py +161 -0
- pydasa/elements/specs/numerical.py +469 -0
- pydasa/elements/specs/statistical.py +229 -0
- pydasa/elements/specs/symbolic.py +394 -0
- pydasa/serialization/__init__.py +27 -0
- pydasa/serialization/parser.py +133 -0
- pydasa/structs/__init__.py +0 -0
- pydasa/structs/lists/__init__.py +0 -0
- pydasa/structs/lists/arlt.py +578 -0
- pydasa/structs/lists/dllt.py +18 -0
- pydasa/structs/lists/ndlt.py +262 -0
- pydasa/structs/lists/sllt.py +746 -0
- pydasa/structs/tables/__init__.py +0 -0
- pydasa/structs/tables/htme.py +182 -0
- pydasa/structs/tables/scht.py +774 -0
- pydasa/structs/tools/__init__.py +0 -0
- pydasa/structs/tools/hashing.py +53 -0
- pydasa/structs/tools/math.py +149 -0
- pydasa/structs/tools/memory.py +54 -0
- pydasa/structs/types/__init__.py +0 -0
- pydasa/structs/types/functions.py +131 -0
- pydasa/structs/types/generics.py +54 -0
- pydasa/validations/__init__.py +0 -0
- pydasa/validations/decorators.py +510 -0
- pydasa/validations/error.py +100 -0
- pydasa/validations/patterns.py +32 -0
- pydasa/workflows/__init__.py +1 -0
- pydasa/workflows/influence.py +497 -0
- pydasa/workflows/phenomena.py +529 -0
- pydasa/workflows/practical.py +765 -0
- pydasa-0.4.7.dist-info/METADATA +320 -0
- pydasa-0.4.7.dist-info/RECORD +58 -0
- pydasa-0.4.7.dist-info/WHEEL +5 -0
- pydasa-0.4.7.dist-info/licenses/LICENSE +674 -0
- pydasa-0.4.7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,497 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""
|
|
3
|
+
Module influence.py
|
|
4
|
+
===========================================
|
|
5
|
+
|
|
6
|
+
Module for **SensitivityAnalysis** to manage sensitivity analysis in *PyDASA*
|
|
7
|
+
|
|
8
|
+
This module provides the SensitivityAnalysis class for coordinating multiple sensitivity analyses and generating reports on which variables have the most significant impact on dimensionless coefficients.
|
|
9
|
+
|
|
10
|
+
Classes:
|
|
11
|
+
**SensitivityAnalysis**: Manages sensitivity analyses for multiple coefficients, processes results, and generates reports on variable impacts.
|
|
12
|
+
|
|
13
|
+
*IMPORTANT:* Based on the theory from:
|
|
14
|
+
|
|
15
|
+
# H.Gorter, *Dimensionalanalyse: Eine Theoririe der physikalischen Dimensionen mit Anwendungen*
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
from dataclasses import dataclass, field
|
|
20
|
+
from typing import List, Dict, Any, Union, Tuple
|
|
21
|
+
# import re
|
|
22
|
+
|
|
23
|
+
# Import validation base classes
|
|
24
|
+
from pydasa.core.basic import Foundation
|
|
25
|
+
|
|
26
|
+
# Import related classes
|
|
27
|
+
from pydasa.elements.parameter import Variable
|
|
28
|
+
from pydasa.dimensional.buckingham import Coefficient
|
|
29
|
+
from pydasa.analysis.scenario import Sensitivity
|
|
30
|
+
|
|
31
|
+
# Import utils
|
|
32
|
+
from pydasa.validations.error import inspect_var
|
|
33
|
+
from pydasa.serialization.parser import latex_to_python
|
|
34
|
+
|
|
35
|
+
# Import validation decorators
|
|
36
|
+
from pydasa.validations.decorators import validate_type
|
|
37
|
+
from pydasa.validations.decorators import validate_choices
|
|
38
|
+
from pydasa.validations.decorators import validate_emptiness
|
|
39
|
+
# Import global configuration
|
|
40
|
+
from pydasa.core.setup import Frameworks
|
|
41
|
+
from pydasa.core.setup import AnaliticMode
|
|
42
|
+
from pydasa.core.setup import PYDASA_CFG
|
|
43
|
+
# from pydasa.validations.patterns import LATEX_RE
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
@dataclass
|
|
47
|
+
class SensitivityAnalysis(Foundation):
|
|
48
|
+
"""**SensitivityAnalysis** class for managing multiple sensitivity analyses in *PyDASA*.
|
|
49
|
+
|
|
50
|
+
Coordinates sensitivity analyses for multiple coefficients, processes their results, and generates comprehensive reports on variable impacts.
|
|
51
|
+
|
|
52
|
+
Attributes:
|
|
53
|
+
# Identification and Classification
|
|
54
|
+
name (str): User-friendly name of the sensitivity handler.
|
|
55
|
+
description (str): Brief summary of the sensitivity handler.
|
|
56
|
+
_idx (int): Index/precedence of the sensitivity handler.
|
|
57
|
+
_sym (str): Symbol representation (LaTeX or alphanumeric).
|
|
58
|
+
_alias (str): Python-compatible alias for use in code.
|
|
59
|
+
_fwk (str): Frameworks context (PHYSICAL, COMPUTATION, SOFTWARE, CUSTOM).
|
|
60
|
+
_cat (str): Category of analysis (SYM, NUM, HYB).
|
|
61
|
+
|
|
62
|
+
# Analysis Components
|
|
63
|
+
_variables (Dict[str, Variable]): all available parameters/variables in the model (*Variable*).
|
|
64
|
+
_coefficients (Dict[str, Coefficient]): all available coefficients in the model (*Coefficient*).
|
|
65
|
+
|
|
66
|
+
# Analysis Results
|
|
67
|
+
_analyses (Dict[str, Sensitivity]): all sensitivity analyses performed.
|
|
68
|
+
_results (Dict[str, Dict[str, Any]]): all consolidated results of analyses.
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
# Category attribute
|
|
72
|
+
# :attr: _cat
|
|
73
|
+
_cat: str = AnaliticMode.SYM.value
|
|
74
|
+
"""Category of sensitivity analysis (SYM, NUM)."""
|
|
75
|
+
|
|
76
|
+
# Variable management
|
|
77
|
+
# :attr: _variables
|
|
78
|
+
_variables: Dict[str, Variable] = field(default_factory=dict)
|
|
79
|
+
"""Dictionary of all parameters/variables in the model (*Variable*)."""
|
|
80
|
+
|
|
81
|
+
# :attr: _coefficients
|
|
82
|
+
_coefficients: Dict[str, Coefficient] = field(default_factory=dict)
|
|
83
|
+
"""Dictionary of all coefficients in the model (*Coefficient*)."""
|
|
84
|
+
|
|
85
|
+
# Analysis results
|
|
86
|
+
# :attr: _analyses
|
|
87
|
+
_analyses: Dict[str, Sensitivity] = field(default_factory=dict)
|
|
88
|
+
"""Dictionary of sensitivity analyses performed."""
|
|
89
|
+
|
|
90
|
+
# :attr: _results
|
|
91
|
+
_results: Dict[str, Dict[str, Any]] = field(default_factory=dict)
|
|
92
|
+
"""Consolidated results of analyses."""
|
|
93
|
+
|
|
94
|
+
def __post_init__(self) -> None:
|
|
95
|
+
"""*__post_init__()* Initializes the sensitivity handler.
|
|
96
|
+
|
|
97
|
+
Validates basic properties and sets up component maps.
|
|
98
|
+
"""
|
|
99
|
+
# Initialize from base class
|
|
100
|
+
super().__post_init__()
|
|
101
|
+
|
|
102
|
+
# Set default symbol if not specified
|
|
103
|
+
if not self._sym:
|
|
104
|
+
self._sym = f"SH_{{\\Pi_{{{self._idx}}}}}" if self._idx >= 0 else "SH_\\Pi_{-1}"
|
|
105
|
+
|
|
106
|
+
if not self._alias:
|
|
107
|
+
self._alias = latex_to_python(self._sym)
|
|
108
|
+
|
|
109
|
+
# Set name and description if not already set
|
|
110
|
+
if not self.name:
|
|
111
|
+
self.name = f"sensitivity Analysis Handler {self._idx}"
|
|
112
|
+
|
|
113
|
+
if not self.description:
|
|
114
|
+
self.description = f"Manages sensitivity analyses for [{self._coefficients.keys()}] coefficients."
|
|
115
|
+
|
|
116
|
+
def _validate_dict(self, dt: dict,
|
|
117
|
+
exp_type: Union[type, List[type], Tuple[type, ...]]) -> bool:
|
|
118
|
+
"""*_validate_dict()* Validates a dictionary with expected value types.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
dt (dict): Dictionary to validate.
|
|
122
|
+
exp_type (Union[type, List[type], Tuple[type, ...]]): Expected type(s) for dictionary values.
|
|
123
|
+
|
|
124
|
+
Raises:
|
|
125
|
+
ValueError: If the object is not a dictionary.
|
|
126
|
+
ValueError: If the dictionary is empty.
|
|
127
|
+
ValueError: If the dictionary contains values of unexpected types.
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
bool: True if the dictionary is valid.
|
|
131
|
+
"""
|
|
132
|
+
if not isinstance(dt, dict):
|
|
133
|
+
_msg = f"{inspect_var(dt)} must be a dictionary. "
|
|
134
|
+
_msg += f"Provided: {type(dt)}"
|
|
135
|
+
raise ValueError(_msg)
|
|
136
|
+
|
|
137
|
+
if len(dt) == 0:
|
|
138
|
+
_msg = f"{inspect_var(dt)} cannot be empty. "
|
|
139
|
+
_msg += f"Provided: {dt}"
|
|
140
|
+
raise ValueError(_msg)
|
|
141
|
+
|
|
142
|
+
# Convert exp_type to tuple for isinstance()
|
|
143
|
+
if isinstance(exp_type, (list, tuple)):
|
|
144
|
+
type_tuple = tuple(exp_type)
|
|
145
|
+
else:
|
|
146
|
+
type_tuple = (exp_type,)
|
|
147
|
+
|
|
148
|
+
if not all(isinstance(v, type_tuple) for v in dt.values()):
|
|
149
|
+
_msg = f"{inspect_var(dt)} must contain {exp_type} values."
|
|
150
|
+
_msg += f" Provided: {[type(v).__name__ for v in dt.values()]}"
|
|
151
|
+
raise ValueError(_msg)
|
|
152
|
+
|
|
153
|
+
return True
|
|
154
|
+
|
|
155
|
+
def _create_analyses(self) -> None:
|
|
156
|
+
"""*_create_analyses()* Creates sensitivity analyses for each coefficient.
|
|
157
|
+
|
|
158
|
+
Sets up Sensitivity objects for each coefficient to be analyzed.
|
|
159
|
+
"""
|
|
160
|
+
self._analyses.clear()
|
|
161
|
+
|
|
162
|
+
for i, (pi, coef) in enumerate(self._coefficients.items()):
|
|
163
|
+
# Create sensitivity analysis
|
|
164
|
+
analysis = Sensitivity(
|
|
165
|
+
_idx=i,
|
|
166
|
+
_sym=f"SEN_{{{coef.sym}}}",
|
|
167
|
+
_fwk=self._fwk,
|
|
168
|
+
_cat=self._cat,
|
|
169
|
+
_name=f"Sensitivity for {coef.name}",
|
|
170
|
+
description=f"Sensitivity analysis for {coef.sym}",
|
|
171
|
+
# _pi_expr=coef._pi_expr
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
# Configure with coefficient
|
|
175
|
+
analysis.set_coefficient(coef)
|
|
176
|
+
|
|
177
|
+
# Add to list
|
|
178
|
+
self._analyses[pi] = analysis
|
|
179
|
+
|
|
180
|
+
def _get_variable_value(self,
|
|
181
|
+
var_sym: str,
|
|
182
|
+
val_type: str = "mean") -> float:
|
|
183
|
+
"""*_get_variable_value()* Gets a value for a variable based on value type.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
var_sym (str): Symbol of the variable.
|
|
187
|
+
val_type (str, optional): Type of value to return (mean, min, max). Defaults to "mean".
|
|
188
|
+
|
|
189
|
+
Returns:
|
|
190
|
+
float: Variable value.
|
|
191
|
+
|
|
192
|
+
Raises:
|
|
193
|
+
ValueError: If the variable is not found.
|
|
194
|
+
ValueError: If the value type is invalid.
|
|
195
|
+
"""
|
|
196
|
+
# Check if the variable symbol exists in our variable map
|
|
197
|
+
if var_sym not in self._variables:
|
|
198
|
+
_msg = f"Variable '{var_sym}' not found in variables."
|
|
199
|
+
_msg += f" Available variables: {list(self._variables.keys())}"
|
|
200
|
+
raise ValueError(_msg)
|
|
201
|
+
|
|
202
|
+
# Get the Variable object from the map
|
|
203
|
+
var = self._variables[var_sym]
|
|
204
|
+
|
|
205
|
+
# CASE 1: Return average value
|
|
206
|
+
if val_type == "mean":
|
|
207
|
+
# First check if standardized average exists
|
|
208
|
+
if var.std_mean is None:
|
|
209
|
+
# If no standardized average, try regular average
|
|
210
|
+
# If thats also None, use default value -1.0
|
|
211
|
+
return var.mean if var.mean is not None else -1.0
|
|
212
|
+
# Return standardized average if it exists
|
|
213
|
+
return var.std_mean
|
|
214
|
+
|
|
215
|
+
# CASE 2: Return minimum value
|
|
216
|
+
elif val_type == "min":
|
|
217
|
+
# First check if standardized minimum exists
|
|
218
|
+
if var.std_min is None:
|
|
219
|
+
# If no standardized minimum, try regular minimum
|
|
220
|
+
# If thats also None, use default value -0.1
|
|
221
|
+
return var.min if var.min is not None else -0.1
|
|
222
|
+
# Return standardized minimum if it exists
|
|
223
|
+
return var.std_min
|
|
224
|
+
|
|
225
|
+
# CASE 3: Return maximum value
|
|
226
|
+
elif val_type == "max":
|
|
227
|
+
# First check if standardized maximum exists
|
|
228
|
+
if var.std_max is None:
|
|
229
|
+
# If no standardized maximum, try regular maximum
|
|
230
|
+
# If thats also None, use default value -10.0
|
|
231
|
+
return var.max if var.max is not None else -10.0
|
|
232
|
+
# Return standardized maximum if it exists
|
|
233
|
+
return var.std_max
|
|
234
|
+
|
|
235
|
+
# CASE 4: Invalid value type
|
|
236
|
+
else:
|
|
237
|
+
# Build error message
|
|
238
|
+
_msg = f"Invalid value type: {val_type}. "
|
|
239
|
+
_msg += "Must be one of: mean, min, max."
|
|
240
|
+
raise ValueError(_msg)
|
|
241
|
+
|
|
242
|
+
def analyze_symbolic(self,
|
|
243
|
+
val_type: str = "mean") -> Dict[str, Dict[str, float]]:
|
|
244
|
+
"""*analyze_symbolic()* Performs symbolic sensitivity analysis.
|
|
245
|
+
|
|
246
|
+
Analyzes each coefficient using symbolic differentiation at specified values.
|
|
247
|
+
|
|
248
|
+
Args:
|
|
249
|
+
val_type (str, optional): Type of value to use (mean, min, max). Defaults to "mean".
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
Dict[str, Dict[str, float]]: Sensitivity results by coefficient and variable.
|
|
253
|
+
"""
|
|
254
|
+
# Create analyses if not already done
|
|
255
|
+
if not self._analyses:
|
|
256
|
+
self._create_analyses()
|
|
257
|
+
|
|
258
|
+
# Clear previous results
|
|
259
|
+
self._results.clear()
|
|
260
|
+
|
|
261
|
+
# Process each analysis
|
|
262
|
+
for analysis in self._analyses.values():
|
|
263
|
+
# Get variable values
|
|
264
|
+
values = {}
|
|
265
|
+
for var_sym in analysis._latex_to_py.keys():
|
|
266
|
+
# Ensure symbol is a string
|
|
267
|
+
values[var_sym] = self._get_variable_value(var_sym, val_type)
|
|
268
|
+
# Perform analysis
|
|
269
|
+
result = analysis.analyze_symbolically(values)
|
|
270
|
+
|
|
271
|
+
# Store results
|
|
272
|
+
self._results[analysis.sym] = result
|
|
273
|
+
|
|
274
|
+
return self._results
|
|
275
|
+
|
|
276
|
+
def analyze_numeric(self,
|
|
277
|
+
n_samples: int = 1000) -> Dict[str, Dict[str, Any]]:
|
|
278
|
+
"""*analyze_numeric()* Performs numerical sensitivity analysis.
|
|
279
|
+
|
|
280
|
+
Analyzes each coefficient using Fourier Amplitude Sensitivity Test (FAST).
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
n_samples (int, optional): Number of samples to use. Defaults to 1000.
|
|
284
|
+
|
|
285
|
+
Returns:
|
|
286
|
+
Dict[str, Dict[str, Any]]: Sensitivity results by coefficient.
|
|
287
|
+
"""
|
|
288
|
+
# Create analyses if not already done
|
|
289
|
+
if not self._analyses:
|
|
290
|
+
self._create_analyses()
|
|
291
|
+
|
|
292
|
+
# Clear previous results
|
|
293
|
+
self._results.clear()
|
|
294
|
+
|
|
295
|
+
# Process each analysis
|
|
296
|
+
for analysis in self._analyses.values():
|
|
297
|
+
# Get variable bounds
|
|
298
|
+
vals = []
|
|
299
|
+
bounds = []
|
|
300
|
+
for var_sym in analysis._latex_to_py.keys():
|
|
301
|
+
var = self._variables[var_sym]
|
|
302
|
+
min_val = var.std_min if var.std_min is not None else (var.min if var.min is not None else -0.1)
|
|
303
|
+
max_val = var.std_max if var.std_max is not None else (var.max if var.max is not None else -10.0)
|
|
304
|
+
bounds.append([min_val, max_val])
|
|
305
|
+
vals.append(var.sym)
|
|
306
|
+
|
|
307
|
+
# Perform analysis
|
|
308
|
+
result = analysis.analyze_numerically(vals, bounds, n_samples)
|
|
309
|
+
|
|
310
|
+
# Store results
|
|
311
|
+
self._results[analysis.sym] = result
|
|
312
|
+
return self._results
|
|
313
|
+
|
|
314
|
+
# Property getters and setters
|
|
315
|
+
|
|
316
|
+
@property
|
|
317
|
+
def cat(self) -> str:
|
|
318
|
+
"""*cat* Get the analysis category.
|
|
319
|
+
|
|
320
|
+
Returns:
|
|
321
|
+
str: Category (SYM, NUM, HYB).
|
|
322
|
+
"""
|
|
323
|
+
return self._cat
|
|
324
|
+
|
|
325
|
+
@cat.setter
|
|
326
|
+
@validate_choices(PYDASA_CFG.analitic_modes, case_sensitive=False)
|
|
327
|
+
def cat(self, val: str) -> None:
|
|
328
|
+
"""*cat* Set the analysis category.
|
|
329
|
+
|
|
330
|
+
Args:
|
|
331
|
+
val (str): Category value.
|
|
332
|
+
|
|
333
|
+
Raises:
|
|
334
|
+
ValueError: If category is invalid.
|
|
335
|
+
"""
|
|
336
|
+
self._cat = val.upper()
|
|
337
|
+
|
|
338
|
+
@property
|
|
339
|
+
def variables(self) -> Dict[str, Variable]:
|
|
340
|
+
"""*variables* Get the dictionary of variables.
|
|
341
|
+
|
|
342
|
+
Returns:
|
|
343
|
+
Dict[str, Variable]: Dictionary of variables.
|
|
344
|
+
"""
|
|
345
|
+
return self._variables.copy()
|
|
346
|
+
|
|
347
|
+
@variables.setter
|
|
348
|
+
@validate_type(dict, allow_none=False)
|
|
349
|
+
@validate_emptiness()
|
|
350
|
+
def variables(self, val: Dict[str, Variable]) -> None:
|
|
351
|
+
"""*variables* Set the dictionary of variables.
|
|
352
|
+
|
|
353
|
+
Args:
|
|
354
|
+
val (Dict[str, Variable]): Dictionary of variables.
|
|
355
|
+
|
|
356
|
+
Raises:
|
|
357
|
+
ValueError: If dictionary is invalid.
|
|
358
|
+
"""
|
|
359
|
+
# Validate dictionary values are Variable instances
|
|
360
|
+
if not all(isinstance(v, Variable) for v in val.values()):
|
|
361
|
+
_msg = "All dictionary values must be Variable instances"
|
|
362
|
+
raise ValueError(_msg)
|
|
363
|
+
|
|
364
|
+
self._variables = val
|
|
365
|
+
# Clear existing analyses
|
|
366
|
+
self._analyses.clear()
|
|
367
|
+
|
|
368
|
+
@property
|
|
369
|
+
def coefficients(self) -> Dict[str, Coefficient]:
|
|
370
|
+
"""*coefficients* Get the dictionary of coefficients.
|
|
371
|
+
|
|
372
|
+
Returns:
|
|
373
|
+
Dict[str, Coefficient]: Dictionary of coefficients.
|
|
374
|
+
"""
|
|
375
|
+
return self._coefficients.copy()
|
|
376
|
+
|
|
377
|
+
@coefficients.setter
|
|
378
|
+
@validate_type(dict, allow_none=False)
|
|
379
|
+
@validate_emptiness()
|
|
380
|
+
def coefficients(self, val: Dict[str, Coefficient]) -> None:
|
|
381
|
+
"""*coefficients* Set the dictionary of coefficients.
|
|
382
|
+
|
|
383
|
+
Args:
|
|
384
|
+
val (Dict[str, Coefficient]): Dictionary of coefficients.
|
|
385
|
+
|
|
386
|
+
Raises:
|
|
387
|
+
ValueError: If dictionary is invalid.
|
|
388
|
+
"""
|
|
389
|
+
# Validate dictionary values are Coefficient instances
|
|
390
|
+
if not all(isinstance(v, Coefficient) for v in val.values()):
|
|
391
|
+
_msg = "All dictionary values must be Coefficient instances"
|
|
392
|
+
raise ValueError(_msg)
|
|
393
|
+
|
|
394
|
+
self._coefficients = val
|
|
395
|
+
# Clear existing analyses
|
|
396
|
+
self._analyses.clear()
|
|
397
|
+
|
|
398
|
+
@property
|
|
399
|
+
def analyses(self) -> Dict[str, Sensitivity]:
|
|
400
|
+
"""*analyses* Get the dictionary of sensitivity analyses.
|
|
401
|
+
|
|
402
|
+
Returns:
|
|
403
|
+
Dict[str, Sensitivity]: Dictionary of sensitivity analyses.
|
|
404
|
+
"""
|
|
405
|
+
return self._analyses.copy()
|
|
406
|
+
|
|
407
|
+
@property
|
|
408
|
+
def results(self) -> Dict[str, Dict[str, Any]]:
|
|
409
|
+
"""*results* Get the analysis results.
|
|
410
|
+
|
|
411
|
+
Returns:
|
|
412
|
+
Dict[str, Dict[str, Any]]: Analysis results.
|
|
413
|
+
"""
|
|
414
|
+
return self._results.copy()
|
|
415
|
+
|
|
416
|
+
def clear(self) -> None:
|
|
417
|
+
"""*clear()* Reset all attributes to default values.
|
|
418
|
+
|
|
419
|
+
Resets all handler properties to their initial state.
|
|
420
|
+
"""
|
|
421
|
+
# Reset base class attributes
|
|
422
|
+
self._idx = -1
|
|
423
|
+
self._sym = "SENS_Pi_{-1}"
|
|
424
|
+
self._fwk = Frameworks.PHYSICAL.value
|
|
425
|
+
self.name = ""
|
|
426
|
+
self.description = ""
|
|
427
|
+
|
|
428
|
+
# Reset handler-specific attributes
|
|
429
|
+
self._cat = AnaliticMode.SYM.value
|
|
430
|
+
self._variables = {}
|
|
431
|
+
self._coefficients = {}
|
|
432
|
+
self._analyses = {}
|
|
433
|
+
self._results = {}
|
|
434
|
+
|
|
435
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
436
|
+
"""*to_dict()* Convert sensitivity handler to dictionary representation.
|
|
437
|
+
|
|
438
|
+
Returns:
|
|
439
|
+
Dict[str, Any]: Dictionary representation of sensitivity handler.
|
|
440
|
+
"""
|
|
441
|
+
return {
|
|
442
|
+
"name": self._name,
|
|
443
|
+
"description": self.description,
|
|
444
|
+
"idx": self._idx,
|
|
445
|
+
"sym": self._sym,
|
|
446
|
+
"fwk": self._fwk,
|
|
447
|
+
"cat": self._cat,
|
|
448
|
+
"variables": [
|
|
449
|
+
var.to_dict() for var in self._variables.values()
|
|
450
|
+
],
|
|
451
|
+
"coefficients": [
|
|
452
|
+
coef.to_dict() for coef in self._coefficients.values()
|
|
453
|
+
],
|
|
454
|
+
"results": self._results
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
@classmethod
|
|
458
|
+
def from_dict(cls, data: Dict[str, Any]) -> SensitivityAnalysis:
|
|
459
|
+
"""*from_dict()* Create sensitivity handler from dictionary representation.
|
|
460
|
+
|
|
461
|
+
Args:
|
|
462
|
+
data (Dict[str, Any]): Dictionary representation of sensitivity handler.
|
|
463
|
+
|
|
464
|
+
Returns:
|
|
465
|
+
SensitivityAnalysis: New sensitivity handler instance.
|
|
466
|
+
"""
|
|
467
|
+
# Create variables and coefficients from dicts
|
|
468
|
+
variables = {}
|
|
469
|
+
if "variables" in data:
|
|
470
|
+
variables = {
|
|
471
|
+
var.name: Variable.from_dict(var) for var in data["variables"]
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
coefficients = {}
|
|
475
|
+
if "coefficients" in data:
|
|
476
|
+
coefficients = {
|
|
477
|
+
coef.name: Coefficient.from_dict(coef) for coef in data["coefficients"]
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
# Remove list items from data
|
|
481
|
+
handler_data = data.copy()
|
|
482
|
+
for key in ["variables", "coefficients", "results"]:
|
|
483
|
+
if key in handler_data:
|
|
484
|
+
del handler_data[key]
|
|
485
|
+
|
|
486
|
+
# Create handler
|
|
487
|
+
handler = cls(
|
|
488
|
+
**handler_data,
|
|
489
|
+
_variables=variables,
|
|
490
|
+
_coefficients=coefficients
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
# Set results if available
|
|
494
|
+
if "results" in data:
|
|
495
|
+
handler._results = data["results"]
|
|
496
|
+
|
|
497
|
+
return handler
|