steer-core 0.1.21__py3-none-any.whl → 0.1.23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,7 +6,7 @@ slider configurations based on parameter ranges.
6
6
  """
7
7
 
8
8
  import math
9
- from typing import List, Union, Dict
9
+ from typing import List
10
10
 
11
11
 
12
12
  def calculate_slider_steps(
@@ -34,6 +34,7 @@ Y_TO_S = 3600 * 24 * 365
34
34
 
35
35
  # Energy units
36
36
  W_TO_KW = 1e-3
37
+ J_TO_WH = 1 / 3600
37
38
 
38
39
  # Angle units
39
40
  DEG_TO_RAD = 0.017453292519943295
Binary file
@@ -2,6 +2,8 @@ import numpy as np
2
2
  import pandas as pd
3
3
  from typing import Tuple
4
4
 
5
+ from shapely import Polygon
6
+
5
7
 
6
8
  class CoordinateMixin:
7
9
  """
@@ -9,6 +11,94 @@ class CoordinateMixin:
9
11
  Provides methods for rotation, area calculation, and coordinate ordering.
10
12
  """
11
13
 
14
+ @staticmethod
15
+ def _calculate_segment_center_line(x_coords: np.ndarray, z_coords: np.ndarray) -> np.ndarray:
16
+ """
17
+ Calculate center line for a single segment of coordinates.
18
+
19
+ Parameters
20
+ ----------
21
+ x_coords : np.ndarray
22
+ X coordinates for the segment
23
+ z_coords : np.ndarray
24
+ Z coordinates for the segment
25
+
26
+ Returns
27
+ -------
28
+ np.ndarray
29
+ Array containing start and end points of the center line [[min_x, mean_z], [max_x, mean_z]]
30
+ """
31
+ min_x = np.nanmin(x_coords)
32
+ max_x = np.nanmax(x_coords)
33
+ min_z = np.nanmin(z_coords)
34
+ max_z = np.nanmax(z_coords)
35
+ mean_z = max_z - (max_z - min_z) / 2
36
+
37
+ return np.array([[min_x, mean_z], [max_x, mean_z]])
38
+
39
+ @staticmethod
40
+ def get_xz_center_line(coordinates: np.ndarray) -> np.ndarray:
41
+ """
42
+ Generate center line(s) for coordinate data, handling both single and multi-segment polygons.
43
+
44
+ Parameters
45
+ ----------
46
+ coordinates : np.ndarray
47
+ Array of 3D coordinates with shape (N, 3) where columns are [x, y, z].
48
+ NaN values in x or z coordinates indicate breaks between polygon segments.
49
+
50
+ Returns
51
+ -------
52
+ np.ndarray
53
+ For single polygon: Array with shape (2, 2) containing start and end points.
54
+ For multiple segments: Array with center lines for each segment separated by [NaN, NaN].
55
+ """
56
+ x_coords = coordinates[:, 0]
57
+ z_coords = coordinates[:, 2]
58
+
59
+ x_is_nan = np.isnan(x_coords)
60
+
61
+ if np.any(x_is_nan):
62
+ # Handle multiple segments separated by NaN values
63
+ result_points = []
64
+
65
+ # Find NaN indices to split the segments
66
+ nan_indices = np.where(x_is_nan)[0]
67
+ start_idx = 0
68
+
69
+ # Process each segment
70
+ for nan_idx in nan_indices:
71
+ if nan_idx > start_idx:
72
+ segment_x = x_coords[start_idx:nan_idx]
73
+ segment_z = z_coords[start_idx:nan_idx]
74
+
75
+ # Calculate center line for this segment if it has valid points
76
+ if len(segment_x) > 0 and not np.all(np.isnan(segment_x)):
77
+ segment_line = CoordinateMixin._calculate_segment_center_line(segment_x, segment_z)
78
+ result_points.extend(segment_line.tolist())
79
+ result_points.append([np.nan, np.nan]) # Add separator
80
+
81
+ start_idx = nan_idx + 1
82
+
83
+ # Handle the last segment if it exists
84
+ if start_idx < len(x_coords):
85
+ segment_x = x_coords[start_idx:]
86
+ segment_z = z_coords[start_idx:]
87
+
88
+ if len(segment_x) > 0 and not np.all(np.isnan(segment_x)):
89
+ segment_line = CoordinateMixin._calculate_segment_center_line(segment_x, segment_z)
90
+ result_points.extend(segment_line.tolist())
91
+
92
+ # Remove trailing NaN separator if it exists
93
+ if result_points and np.isnan(result_points[-1][0]):
94
+ result_points.pop()
95
+
96
+ return np.array(result_points) if result_points else np.array([]).reshape(0, 2)
97
+
98
+ else:
99
+ # Single polygon - use helper function
100
+ return CoordinateMixin._calculate_segment_center_line(x_coords, z_coords)
101
+
12
102
  @staticmethod
13
103
  def rotate_coordinates(
14
104
  coords: np.ndarray, axis: str, angle: float, center: tuple = None
@@ -241,7 +331,134 @@ class CoordinateMixin:
241
331
 
242
332
  @staticmethod
243
333
  def extrude_footprint(
244
- x: np.ndarray, y: np.ndarray, datum: np.ndarray, thickness: float
334
+ x: np.ndarray,
335
+ y: np.ndarray,
336
+ datum: np.ndarray,
337
+ thickness: float
338
+ ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
339
+ """
340
+ Extrude a 2D footprint to 3D, handling both single and multi-segment polygons.
341
+
342
+ Parameters
343
+ ----------
344
+ x : np.ndarray
345
+ Array of x coordinates. NaN values indicate segment separators.
346
+ y : np.ndarray
347
+ Array of y coordinates. NaN values indicate segment separators.
348
+ datum : np.ndarray
349
+ Datum point for extrusion (shape (3,))
350
+ thickness : float
351
+ Thickness of the extrusion
352
+
353
+ Returns
354
+ -------
355
+ Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]
356
+ Arrays of x, y, z coordinates and side labels, with NaN separators between segments
357
+ """
358
+ if not np.isnan(x).any() and not np.isnan(y).any():
359
+ return CoordinateMixin._extrude_single_footprint(x, y, datum, thickness)
360
+
361
+ # Process segmented coordinates
362
+ segments = CoordinateMixin._extract_coordinate_segments(x, y)
363
+ extruded_sections = []
364
+
365
+ for segment_x, segment_y in segments:
366
+ if len(segment_x) > 0 and not np.all(np.isnan(segment_x)):
367
+ result = CoordinateMixin._extrude_single_footprint(
368
+ segment_x, segment_y, datum, thickness
369
+ )
370
+ extruded_sections.append(result)
371
+
372
+ return CoordinateMixin._concatenate_with_separators(extruded_sections)
373
+
374
+ @staticmethod
375
+ def _extract_coordinate_segments(x: np.ndarray, y: np.ndarray, unify_xy: bool = False) -> list:
376
+ """
377
+ Extract coordinate segments separated by NaN values.
378
+
379
+ Parameters
380
+ ----------
381
+ x : np.ndarray
382
+ X coordinates with NaN separators
383
+ y : np.ndarray
384
+ Y coordinates with NaN separators
385
+
386
+ Returns
387
+ -------
388
+ list
389
+ List of (segment_x, segment_y) tuples
390
+ """
391
+ segments = []
392
+ x_is_nan = np.isnan(x)
393
+ nan_indices = np.where(x_is_nan)[0]
394
+ start_idx = 0
395
+
396
+ # Process each segment between NaN values
397
+ for nan_idx in nan_indices:
398
+ if nan_idx > start_idx:
399
+ segments.append((x[start_idx:nan_idx], y[start_idx:nan_idx]))
400
+ start_idx = nan_idx + 1
401
+
402
+ # Handle the last segment if it exists
403
+ if start_idx < len(x):
404
+ segments.append((x[start_idx:], y[start_idx:]))
405
+
406
+ if unify_xy:
407
+ unified_segments = []
408
+ for i in range(len(segments)):
409
+ segment_x, segment_y = segments[i]
410
+ xy_array = np.column_stack((segment_x, segment_y))
411
+ unified_segments.append(xy_array)
412
+ return np.array(unified_segments)
413
+ else:
414
+ return segments
415
+
416
+ @staticmethod
417
+ def _concatenate_with_separators(sections: list) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
418
+ """
419
+ Concatenate extruded sections with NaN separators.
420
+
421
+ Parameters
422
+ ----------
423
+ sections : list
424
+ List of (x_ext, y_ext, z_ext, side_ext) tuples
425
+
426
+ Returns
427
+ -------
428
+ Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]
429
+ Concatenated arrays with NaN separators
430
+ """
431
+ if not sections:
432
+ return np.array([]), np.array([]), np.array([]), np.array([])
433
+
434
+ x_parts, y_parts, z_parts, side_parts = [], [], [], []
435
+
436
+ for i, (x_ext, y_ext, z_ext, side_ext) in enumerate(sections):
437
+ x_parts.append(x_ext)
438
+ y_parts.append(y_ext)
439
+ z_parts.append(z_ext)
440
+ side_parts.append(side_ext)
441
+
442
+ # Add NaN separators between segments (except for the last one)
443
+ if i < len(sections) - 1:
444
+ x_parts.append(np.array([np.nan]))
445
+ y_parts.append(np.array([np.nan]))
446
+ z_parts.append(np.array([np.nan]))
447
+ side_parts.append(np.array([None], dtype=object))
448
+
449
+ return (
450
+ np.concatenate(x_parts),
451
+ np.concatenate(y_parts),
452
+ np.concatenate(z_parts),
453
+ np.concatenate(side_parts)
454
+ )
455
+
456
+ @staticmethod
457
+ def _extrude_single_footprint(
458
+ x: np.ndarray,
459
+ y: np.ndarray,
460
+ datum: np.ndarray,
461
+ thickness: float
245
462
  ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
246
463
  """
247
464
  Extrude the 2D footprint to 3D and label each point with its side ('a' or 'b'), with 'a' being the top side and 'b' the bottom side.
@@ -273,6 +490,102 @@ class CoordinateMixin:
273
490
 
274
491
  return x_full, y_full, z_full, side_full
275
492
 
493
+ @staticmethod
494
+ def get_coordinate_intersection(
495
+ coords1: np.ndarray,
496
+ coords2: np.ndarray
497
+ ) -> float:
498
+ """Calculate the intersection area between two sets of coordinates"""
499
+ polygon1 = Polygon(coords1)
500
+ polygon2 = Polygon(coords2)
501
+ intersection = polygon1.intersection(polygon2)
502
+ return intersection.area
503
+
504
+ @staticmethod
505
+ def insert_gaps_with_nans(
506
+ data: np.ndarray,
507
+ column_index: int,
508
+ tolerance_multiplier: float = 2.0
509
+ ) -> np.ndarray:
510
+ """
511
+ Insert rows of NaNs when gaps in a specified column exceed a tolerance threshold.
512
+
513
+ Parameters
514
+ ----------
515
+ data : np.ndarray
516
+ Input array with shape (N, M) where N is number of rows and M is number of columns
517
+ column_index : int
518
+ Index of the column to analyze for gaps (0-based indexing)
519
+ tolerance_multiplier : float, optional
520
+ Multiplier for average gap to determine tolerance threshold, by default 2.0
521
+
522
+ Returns
523
+ -------
524
+ np.ndarray
525
+ Array with NaN rows inserted where gaps exceed the tolerance
526
+
527
+ Raises
528
+ ------
529
+ ValueError
530
+ If column_index is out of bounds for the array
531
+ IndexError
532
+ If data array is empty or has insufficient dimensions
533
+
534
+ Examples
535
+ --------
536
+ >>> data = np.array([[1, 10], [2, 20], [5, 50], [6, 60]])
537
+ >>> result = CoordinateMixin.insert_gaps_with_nans(data, column_index=0)
538
+ >>> # Will insert NaN row between [2, 20] and [5, 50] if gap of 3 exceeds tolerance
539
+ """
540
+ if data.size == 0:
541
+ return data.copy()
542
+
543
+ if len(data.shape) != 2:
544
+ raise ValueError("Input array must be 2-dimensional")
545
+
546
+ if column_index < 0 or column_index >= data.shape[1]:
547
+ raise ValueError(f"column_index {column_index} is out of bounds for array with {data.shape[1]} columns")
548
+
549
+ if data.shape[0] < 2:
550
+ return data.copy()
551
+
552
+ # Extract the column values
553
+ column_values = data[:, column_index]
554
+
555
+ # Remove NaN values for gap calculation
556
+ valid_values = column_values[~np.isnan(column_values)]
557
+
558
+ if len(valid_values) < 2:
559
+ return data.copy()
560
+
561
+ # Calculate gaps between consecutive values
562
+ gaps = np.diff(valid_values)
563
+
564
+ # Calculate average gap and tolerance
565
+ average_gap = np.mean(np.abs(gaps))
566
+ tolerance = average_gap * tolerance_multiplier
567
+
568
+ # Find positions where gaps exceed tolerance in original array
569
+ result_rows = []
570
+
571
+ for i in range(len(data)):
572
+ result_rows.append(data[i])
573
+
574
+ # Check if we should insert a gap after this row
575
+ if i < len(data) - 1:
576
+ current_val = column_values[i]
577
+ next_val = column_values[i + 1]
578
+
579
+ # Only check gap if both values are not NaN
580
+ if not (np.isnan(current_val) or np.isnan(next_val)):
581
+ gap = abs(next_val - current_val)
582
+ if gap > tolerance:
583
+ # Insert a row of NaNs
584
+ nan_row = np.full(data.shape[1], np.nan)
585
+ result_rows.append(nan_row)
586
+
587
+ return np.array(result_rows)
588
+
276
589
  @staticmethod
277
590
  def remove_skip_coat_area(
278
591
  x_coords: np.ndarray,
@@ -359,3 +672,5 @@ class CoordinateMixin:
359
672
  y_result.append(np.nan)
360
673
 
361
674
  return np.array(x_result, dtype=float), np.array(y_result, dtype=float)
675
+
676
+
steer_core/Mixins/Data.py CHANGED
@@ -1,3 +1,4 @@
1
+ from copy import deepcopy
1
2
  import numpy as np
2
3
  from scipy.interpolate import PchipInterpolator
3
4
 
@@ -38,3 +39,75 @@ class DataMixin:
38
39
  new_array = np.minimum.accumulate(new_array)
39
40
 
40
41
  return new_array
42
+
43
+ @staticmethod
44
+ def sum_breakdowns(components, breakdown_type: str):
45
+ """
46
+ Aggregate breakdown dictionaries across multiple components.
47
+ If a component doesn't have the specified breakdown, use its fallback attribute instead.
48
+
49
+ Parameters
50
+ ----------
51
+ components : list
52
+ List of component objects
53
+ breakdown_type : str, optional
54
+ Type of breakdown to aggregate ('mass', 'cost', etc.), by default 'mass'
55
+
56
+ Returns
57
+ -------
58
+ dict or float
59
+ Aggregated breakdown dictionary with summed values maintaining structure,
60
+ or simple float sum if no components have the specified breakdown
61
+ """
62
+ def add_dicts(dict1, dict2):
63
+ """Recursively add two dictionaries with matching structure."""
64
+ result = dict1.copy()
65
+
66
+ for key, value in dict2.items():
67
+ if key in result:
68
+ if isinstance(result[key], dict) and isinstance(value, dict):
69
+ result[key] = add_dicts(result[key], value)
70
+ elif isinstance(result[key], (int, float)) and isinstance(value, (int, float)):
71
+ result[key] += value
72
+ else:
73
+ result[key] = value
74
+
75
+ return result
76
+
77
+ breakdown_attr = f'_{breakdown_type}_breakdown'
78
+ fallback_attr = f'_{breakdown_type}'
79
+
80
+ aggregated_breakdown = {}
81
+ simple_sum = 0
82
+ has_breakdown_components = False
83
+
84
+ for component in components:
85
+ if hasattr(component, breakdown_attr):
86
+ breakdown_value = getattr(component, breakdown_attr)
87
+ if breakdown_value is not None:
88
+ has_breakdown_components = True
89
+ if not aggregated_breakdown:
90
+ # Initialize with first component's breakdown
91
+ aggregated_breakdown = deepcopy(breakdown_value)
92
+ else:
93
+ # Add subsequent breakdowns
94
+ aggregated_breakdown = add_dicts(aggregated_breakdown, breakdown_value)
95
+ elif hasattr(component, fallback_attr):
96
+ # Component only has fallback attribute
97
+ fallback_value = getattr(component, fallback_attr)
98
+ if fallback_value is not None:
99
+ simple_sum += fallback_value
100
+
101
+ # If we have breakdown components, add the simple sum to the breakdown
102
+ if has_breakdown_components:
103
+ if simple_sum > 0:
104
+ total_key = f'total_{breakdown_type}'
105
+ if total_key in aggregated_breakdown:
106
+ aggregated_breakdown[total_key] += simple_sum
107
+ else:
108
+ aggregated_breakdown[total_key] = simple_sum
109
+ return aggregated_breakdown
110
+ else:
111
+ # No breakdown components, return simple sum
112
+ return simple_sum
113
+
@@ -4,6 +4,48 @@ from typing import Dict, Any, Tuple, List, Union
4
4
 
5
5
  class PlotterMixin:
6
6
 
7
+ SCATTER_X_AXIS = dict(
8
+ showgrid=True,
9
+ gridcolor="rgba(128, 128, 128, 0.2)",
10
+ gridwidth=1,
11
+ zeroline=True,
12
+ zerolinecolor="rgba(0, 0, 0, 0.5)",
13
+ zerolinewidth=1,
14
+ )
15
+
16
+ SCATTER_Y_AXIS = dict(
17
+ showgrid=True,
18
+ gridcolor="rgba(128, 128, 128, 0.2)",
19
+ gridwidth=1,
20
+ zeroline=True,
21
+ zerolinecolor="rgba(0, 0, 0, 0.5)",
22
+ zerolinewidth=1,
23
+ )
24
+
25
+ SCHEMATIC_X_AXIS = dict(
26
+ zeroline=False,
27
+ scaleanchor="y",
28
+ title="X (mm)"
29
+ )
30
+
31
+ SCHEMATIC_Y_AXIS = dict(
32
+ zeroline=False,
33
+ title="Y (mm)"
34
+ )
35
+
36
+ SCHEMATIC_Z_AXIS = dict(
37
+ zeroline=False,
38
+ title="Z (mm)"
39
+ )
40
+
41
+ BOTTOM_LEGEND = dict(
42
+ orientation="h",
43
+ yanchor="top",
44
+ y=-0.3,
45
+ xanchor="center",
46
+ x=0.5,
47
+ )
48
+
7
49
  @staticmethod
8
50
  def plot_breakdown_sunburst(
9
51
  breakdown_dict: Dict[str, Any],
@@ -147,3 +189,6 @@ class PlotterMixin:
147
189
  )
148
190
 
149
191
  return fig
192
+
193
+
194
+
@@ -41,3 +41,5 @@ class SerializerMixin:
41
41
  decoded = base64.b64decode(String.encode("utf-8"))
42
42
  obj = deepcopy(loads(decoded))
43
43
  return obj
44
+
45
+
@@ -1,4 +1,4 @@
1
- from typing import Type
1
+ from typing import Type, Iterable
2
2
  import pandas as pd
3
3
  import numpy as np
4
4
  import plotly.graph_objects as go
@@ -33,26 +33,42 @@ class ValidationMixin:
33
33
  )
34
34
 
35
35
  @staticmethod
36
- def validate_type(value: Type, expected_type: Type, name: str) -> None:
37
- """
38
- Validate that a value is of the expected type.
36
+ def validate_type(value, expected_type, name: str) -> None:
37
+ """Validate that a value is of the expected type or one of multiple allowed types.
39
38
 
40
39
  Parameters
41
40
  ----------
42
- value : Type
41
+ value : Any
43
42
  The value to validate.
44
- expected_type : Type
45
- The expected type of the value.
43
+ expected_type : Type | Iterable[Type]
44
+ A single expected type or an iterable (list/tuple/set) of acceptable types.
46
45
  name : str
47
46
  The name of the parameter for error messages.
48
47
 
49
48
  Raises
50
49
  ------
51
50
  TypeError
52
- If the value is not of the expected type.
51
+ If the value is not an instance of any of the expected types.
52
+
53
+ Examples
54
+ --------
55
+ >>> ValidationMixin.validate_type(5, int, 'count') # OK
56
+ >>> ValidationMixin.validate_type(5, (int, float), 'count') # OK
57
+ >>> ValidationMixin.validate_type('x', (int, float), 'count') # TypeError
58
+ >>> ValidationMixin.validate_type([1,2,3], list, 'items') # OK
59
+ >>> ValidationMixin.validate_type([1,2,3], [list, tuple], 'items') # OK
53
60
  """
54
- if not isinstance(value, expected_type):
55
- raise TypeError(f"{name} must be of type {expected_type.__name__}. Provided: {type(value).__name__}.")
61
+ # Normalize expected types to a tuple for isinstance
62
+ if isinstance(expected_type, (list, set)):
63
+ expected_types = tuple(expected_type)
64
+ elif isinstance(expected_type, tuple):
65
+ expected_types = expected_type
66
+ else:
67
+ expected_types = (expected_type,)
68
+
69
+ if not isinstance(value, expected_types):
70
+ type_names = ', '.join(t.__name__ for t in expected_types)
71
+ raise TypeError(f"{name} must be of type {type_names}. Provided: {type(value).__name__}.")
56
72
 
57
73
  @staticmethod
58
74
  def validate_percentage(value: float, name: str) -> None:
@@ -195,6 +211,39 @@ class ValidationMixin:
195
211
  if not isinstance(value, (int, float)):
196
212
  raise ValueError(f"{name} must be a positive float. Provided: {value}.")
197
213
 
214
+ @staticmethod
215
+ def validate_positive_int(value: int, name: str) -> None:
216
+ """Validate that a value is a strictly positive integer.
217
+
218
+ Parameters
219
+ ----------
220
+ value : int
221
+ The value to validate.
222
+ name : str
223
+ The name of the parameter for error messages.
224
+
225
+ Raises
226
+ ------
227
+ TypeError
228
+ If the value is not an integer (bool is rejected even though it is a subclass of int).
229
+ ValueError
230
+ If the integer is not strictly positive (> 0).
231
+
232
+ Examples
233
+ --------
234
+ >>> ValidationMixin.validate_positive_int(5, 'count') # OK
235
+ >>> ValidationMixin.validate_positive_int(0, 'count') # ValueError
236
+ >>> ValidationMixin.validate_positive_int(-3, 'count') # ValueError
237
+ >>> ValidationMixin.validate_positive_int(True, 'flag') # TypeError (bool rejected)
238
+ >>> ValidationMixin.validate_positive_int(12_000, 'cycles') # OK
239
+ """
240
+ # Reject bool explicitly (bool is subclass of int)
241
+ if isinstance(value, bool) or not isinstance(value, int):
242
+ raise TypeError(f"{name} must be a positive integer. Provided: {value} (type: {type(value).__name__}).")
243
+
244
+ if value <= 0:
245
+ raise ValueError(f"{name} must be a positive integer (> 0). Provided: {value}.")
246
+
198
247
  @staticmethod
199
248
  def validate_string(value: str, name: str) -> None:
200
249
  """
steer_core/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.1.21"
1
+ __version__ = "0.1.23"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: steer-core
3
- Version: 0.1.21
3
+ Version: 0.1.23
4
4
  Summary: Modelling energy storage from cell to site - STEER OpenCell Design
5
5
  Home-page: https://github.com/nicholas9182/steer-core/
6
6
  Author: Nicholas Siemons
@@ -14,6 +14,7 @@ Requires-Dist: numpy==1.26.4
14
14
  Requires-Dist: datetime==5.5
15
15
  Requires-Dist: scipy==1.15.3
16
16
  Requires-Dist: plotly==6.2.0
17
+ Requires-Dist: dash==2.18.1
17
18
  Dynamic: author
18
19
  Dynamic: author-email
19
20
  Dynamic: classifier
@@ -1,5 +1,5 @@
1
1
  steer_core/DataManager.py,sha256=06TrnBa4SLGvLeH2DacCxwGZ4zjLZslwNcwIlmfhxtA,10943
2
- steer_core/__init__.py,sha256=qEmNtjnOwhDYQ0cHPPtUkUaghzD2xl0thJEznl4giYw,23
2
+ steer_core/__init__.py,sha256=0byemO6n6WCv41u9vBG2AIsOkVbxLvok7puvwy8EhfU,23
3
3
  steer_core/Apps/ContextManagers.py,sha256=-ImT0O8BdmPKOd_e7wS6AWJMMQme7nGDEUdxTi_wv8s,1870
4
4
  steer_core/Apps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  steer_core/Apps/Components/MaterialSelectors.py,sha256=LBf2PvHkyAXUoZgEhoJCfo5shEfaloa6l3PeOJAakFk,35707
@@ -8,28 +8,28 @@ steer_core/Apps/Components/SliderComponents.py,sha256=ynkVYokTXv8DjElOJ_J35H9F3f
8
8
  steer_core/Apps/Components/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  steer_core/Apps/Performance/CallbackTimer.py,sha256=Dh1dWvgX_LownO_UBmlBtUwl5XRb4sQ5kpzSqLXK2G8,387
10
10
  steer_core/Apps/Performance/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- steer_core/Apps/Utils/SliderControls.py,sha256=jSjYZBWE9A9uzFDJjHBjP0RND9DeKUKm0V3ovDoeezo,27008
11
+ steer_core/Apps/Utils/SliderControls.py,sha256=dHk43Ll6O9kKkMAjAdz1GDBDscMAhO9_Y1xXoFTZzVE,26995
12
12
  steer_core/Apps/Utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
- steer_core/Constants/Units.py,sha256=_5Ii1inowJlcvk_jGPGhnPjGPJ3S5ZLg-5OubpYqc0A,695
13
+ steer_core/Constants/Units.py,sha256=QIV_lDX7rANH-MKP90jOyiXbGueL15LILKMNr5dSWoI,714
14
14
  steer_core/Constants/Universal.py,sha256=5FWdrex5NiI2DResDmwO7GIvGN2B0DNtdlG1l-ysDh8,41
15
15
  steer_core/Constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
16
  steer_core/ContextManagers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
17
  steer_core/Data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- steer_core/Data/database.db,sha256=y8cJv-W2T1EXoULbd8cT6AlN_yUIaTAkObX0pMDB4Oc,13615104
18
+ steer_core/Data/database.db,sha256=0DXpYySOxTWqxZiqovYjlKnHlpPPblkPqqEAUxOiZ1o,115044352
19
19
  steer_core/Decorators/Coordinates.py,sha256=MxUWXQNrR9Q0_p4gGAywS4qnPAztajJzSay1Cu6lCRQ,1441
20
20
  steer_core/Decorators/Electrochemical.py,sha256=nrNnTG4weyQOq1VLybjWWcbgGoth8ndvy3muN51xpwU,986
21
21
  steer_core/Decorators/General.py,sha256=lc7YdvxU-JDo8b4kunVzSjxcB3_8C185458HrXQq-lk,970
22
22
  steer_core/Decorators/Objects.py,sha256=aYaRQBFgdSE0IB4QgBVfb6GhEPagoU6TRNrW_pOaqQI,506
23
23
  steer_core/Decorators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  steer_core/Mixins/Colors.py,sha256=vbo44Fr0oeziwHJ-tu7ojG-GzqFc2LBcT_hH4szvPFc,6796
25
- steer_core/Mixins/Coordinates.py,sha256=irdrrXIm7lmjMqgXyRXmu-x5swNQHhR7S0EZTBhvV8U,12824
26
- steer_core/Mixins/Data.py,sha256=2SXRIExCmd98N5JtNEFCQ9poi94fRF_GV5TNYjEGy6o,1363
25
+ steer_core/Mixins/Coordinates.py,sha256=CQJWrxzIq368kezkGClWAlRN2e-_hY0V8EJnHA3kHmQ,24387
26
+ steer_core/Mixins/Data.py,sha256=c313F85muxlBHQ6yl6AKrifNyV2toHvVwEy35fNNUNE,4434
27
27
  steer_core/Mixins/Dunder.py,sha256=591oDGiRPdLH1bDIc1FUw33eeRtSc4pC7UbKEIGPm1I,7035
28
- steer_core/Mixins/Plotter.py,sha256=zYj-P9ryhSUe5mHoIyjasZSOnY5sDCFKx6u73E8rFZc,5424
29
- steer_core/Mixins/TypeChecker.py,sha256=CIv-Tt-NECR55hbscBvqQnzzABflj4bHG2zYHV4kLwk,8199
28
+ steer_core/Mixins/Plotter.py,sha256=IZ3u1vAG1I1brSER2YiWyXCNQiHuafSpPv361nK5DIo,6275
29
+ steer_core/Mixins/Serializer.py,sha256=oPtyfqbamqtLq3G3x9SDpAG-aVlK7AOS1y-TOo3-0y8,1018
30
+ steer_core/Mixins/TypeChecker.py,sha256=dzTu6q6xfz0Op3Yhu7vA-3-8D02iHZPBeAdX9ng9-WM,10454
30
31
  steer_core/Mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
- steer_core/Mixins/serializer.py,sha256=VC3sqfPMR8BxBC7WaDO6cCDpVAgLV8MKAVZtiX953gQ,1016
32
- steer_core-0.1.21.dist-info/METADATA,sha256=c4Eyd22-uhttRYx9Tr8Pss7P2JoowxNj56z6jrqjXNk,704
33
- steer_core-0.1.21.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
34
- steer_core-0.1.21.dist-info/top_level.txt,sha256=6LFpGCSDE_SqRoT7raeM3Ax7KTBKQnyXLXxM9kXtw5M,11
35
- steer_core-0.1.21.dist-info/RECORD,,
32
+ steer_core-0.1.23.dist-info/METADATA,sha256=aP-nsJib7B2whpHguf5y4U79QEXD3SSNvgt4-hKsT9g,732
33
+ steer_core-0.1.23.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
34
+ steer_core-0.1.23.dist-info/top_level.txt,sha256=6LFpGCSDE_SqRoT7raeM3Ax7KTBKQnyXLXxM9kXtw5M,11
35
+ steer_core-0.1.23.dist-info/RECORD,,