tsam 3.0.0__py3-none-any.whl → 3.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tsam/api.py CHANGED
@@ -536,6 +536,7 @@ def _build_old_params(
536
536
  params["addPeakMin"] = extremes.min_value
537
537
  params["addMeanMax"] = extremes.max_period
538
538
  params["addMeanMin"] = extremes.min_period
539
+ params["extremePreserveNumClusters"] = extremes._effective_preserve_n_clusters
539
540
  else:
540
541
  params["extremePeriodMethod"] = "None"
541
542
 
tsam/config.py CHANGED
@@ -606,9 +606,10 @@ class ClusteringResult:
606
606
  ):
607
607
  warnings.warn(
608
608
  "The 'replace' extreme method creates a hybrid cluster representation "
609
- "(some columns from the medoid, some from the extreme period) that cannot "
610
- "be perfectly reproduced during transfer. The transferred result will use "
611
- "the medoid representation for all columns instead of the hybrid values. "
609
+ "(some columns from the cluster representative, some from the extreme period) "
610
+ "that cannot be perfectly reproduced during transfer. The transferred result "
611
+ "will use the stored cluster center periods directly, without the extreme "
612
+ "value injection that was applied during the original aggregation. "
612
613
  "For exact transfer, use 'append' or 'new_cluster' extreme methods.",
613
614
  UserWarning,
614
615
  stacklevel=2,
@@ -785,6 +786,18 @@ class ExtremeConfig:
785
786
  min_period : list[str], optional
786
787
  Column names where the period with minimum total should be preserved.
787
788
  Example: ["wind_generation"] to preserve lowest wind day.
789
+
790
+ preserve_n_clusters : bool, optional
791
+ Whether extreme periods count toward n_clusters.
792
+ - True: Extremes are included in n_clusters
793
+ (e.g., n_clusters=10 with 2 extremes = 8 from clustering + 2 extremes)
794
+ - False: Extremes are added on top of n_clusters (old api behaviour)
795
+ (e.g., n_clusters=10 + 2 extremes = 12 final clusters)
796
+ Only affects "append" or "new_cluster" methods ("replace" never changes n_clusters).
797
+
798
+ .. deprecated::
799
+ The default will change from False to True in a future release.
800
+ Set explicitly to silence the FutureWarning.
788
801
  """
789
802
 
790
803
  method: ExtremeMethod = "append"
@@ -792,6 +805,18 @@ class ExtremeConfig:
792
805
  min_value: list[str] = field(default_factory=list)
793
806
  max_period: list[str] = field(default_factory=list)
794
807
  min_period: list[str] = field(default_factory=list)
808
+ preserve_n_clusters: bool | None = None
809
+
810
+ def __post_init__(self) -> None:
811
+ """Emit FutureWarning if preserve_n_clusters is not explicitly set."""
812
+ if self.preserve_n_clusters is None and self.has_extremes():
813
+ warnings.warn(
814
+ "preserve_n_clusters currently defaults to False to match behaviour of the old api, "
815
+ "but will default to True in a future release. Set preserve_n_clusters explicitly "
816
+ "to silence this warning.",
817
+ FutureWarning,
818
+ stacklevel=3,
819
+ )
795
820
 
796
821
  def has_extremes(self) -> bool:
797
822
  """Check if any extreme periods are configured."""
@@ -799,6 +824,17 @@ class ExtremeConfig:
799
824
  self.max_value or self.min_value or self.max_period or self.min_period
800
825
  )
801
826
 
827
+ @property
828
+ def _effective_preserve_n_clusters(self) -> bool:
829
+ """Get the effective value for preserve_n_clusters.
830
+
831
+ Returns False if not explicitly set (current default behavior).
832
+ In a future release, the default will change to True.
833
+ """
834
+ if self.preserve_n_clusters is None:
835
+ return False # Current default, will change to True in future
836
+ return self.preserve_n_clusters
837
+
802
838
  def to_dict(self) -> dict[str, Any]:
803
839
  """Convert to dictionary for JSON serialization."""
804
840
  result: dict[str, Any] = {}
@@ -812,6 +848,8 @@ class ExtremeConfig:
812
848
  result["max_period"] = self.max_period
813
849
  if self.min_period:
814
850
  result["min_period"] = self.min_period
851
+ if self.preserve_n_clusters is not None:
852
+ result["preserve_n_clusters"] = self.preserve_n_clusters
815
853
  return result
816
854
 
817
855
  @classmethod
@@ -823,6 +861,7 @@ class ExtremeConfig:
823
861
  min_value=data.get("min_value", []),
824
862
  max_period=data.get("max_period", []),
825
863
  min_period=data.get("min_period", []),
864
+ preserve_n_clusters=data.get("preserve_n_clusters"),
826
865
  )
827
866
 
828
867
 
@@ -132,6 +132,7 @@ class TimeSeriesAggregation:
132
132
  weightDict=None,
133
133
  segmentation=False,
134
134
  extremePeriodMethod="None",
135
+ extremePreserveNumClusters=False,
135
136
  representationMethod=None,
136
137
  representationDict=None,
137
138
  distributionPeriodWise=True,
@@ -318,6 +319,8 @@ class TimeSeriesAggregation:
318
319
 
319
320
  self.extremePeriodMethod = extremePeriodMethod
320
321
 
322
+ self.extremePreserveNumClusters = extremePreserveNumClusters
323
+
321
324
  self.evalSumPeriods = evalSumPeriods
322
325
 
323
326
  self.sortValues = sortValues
@@ -683,6 +686,46 @@ class TimeSeriesAggregation:
683
686
 
684
687
  return unnormalizedTimeSeries
685
688
 
689
+ def _countExtremePeriods(self, groupedSeries):
690
+ """
691
+ Count unique extreme periods without modifying any state.
692
+
693
+ Used by extremePreserveNumClusters to determine how many clusters
694
+ to reserve for extreme periods before clustering.
695
+
696
+ Note: The extreme-finding logic (idxmax/idxmin on peak/mean) must
697
+ stay in sync with _addExtremePeriods. This is intentionally separate
698
+ because _addExtremePeriods also filters out periods that are already
699
+ cluster centers (not known at count time).
700
+ """
701
+ extremePeriodIndices = set()
702
+
703
+ # Only iterate over columns that are actually in extreme lists
704
+ extreme_columns = (
705
+ set(self.addPeakMax)
706
+ | set(self.addPeakMin)
707
+ | set(self.addMeanMax)
708
+ | set(self.addMeanMin)
709
+ )
710
+
711
+ for column in extreme_columns:
712
+ col_data = groupedSeries[column]
713
+
714
+ if column in self.addPeakMax:
715
+ extremePeriodIndices.add(col_data.max(axis=1).idxmax())
716
+ if column in self.addPeakMin:
717
+ extremePeriodIndices.add(col_data.min(axis=1).idxmin())
718
+
719
+ # Compute mean only once if needed for either addMeanMax or addMeanMin
720
+ if column in self.addMeanMax or column in self.addMeanMin:
721
+ mean_series = col_data.mean(axis=1)
722
+ if column in self.addMeanMax:
723
+ extremePeriodIndices.add(mean_series.idxmax())
724
+ if column in self.addMeanMin:
725
+ extremePeriodIndices.add(mean_series.idxmin())
726
+
727
+ return len(extremePeriodIndices)
728
+
686
729
  def _addExtremePeriods(
687
730
  self,
688
731
  groupedSeries,
@@ -983,7 +1026,7 @@ class TimeSeriesAggregation:
983
1026
  # Reshape back to 2D: (n_clusters, n_cols * n_timesteps)
984
1027
  return arr.reshape(n_clusters, -1)
985
1028
 
986
- def _clusterSortedPeriods(self, candidates, n_init=20):
1029
+ def _clusterSortedPeriods(self, candidates, n_init=20, n_clusters=None):
987
1030
  """
988
1031
  Runs the clustering algorithms for the sorted profiles within the period
989
1032
  instead of the original profiles. (Duration curve clustering)
@@ -1001,13 +1044,16 @@ class TimeSeriesAggregation:
1001
1044
  n_periods, -1
1002
1045
  )
1003
1046
 
1047
+ if n_clusters is None:
1048
+ n_clusters = self.noTypicalPeriods
1049
+
1004
1050
  (
1005
1051
  _altClusterCenters,
1006
1052
  self.clusterCenterIndices,
1007
1053
  clusterOrders_C,
1008
1054
  ) = aggregatePeriods(
1009
1055
  sortedClusterValues,
1010
- n_clusters=self.noTypicalPeriods,
1056
+ n_clusters=n_clusters,
1011
1057
  n_iter=30,
1012
1058
  solver=self.solver,
1013
1059
  clusterMethod=self.clusterMethod,
@@ -1052,6 +1098,41 @@ class TimeSeriesAggregation:
1052
1098
  """
1053
1099
  self._preProcessTimeSeries()
1054
1100
 
1101
+ # Warn if extremePreserveNumClusters is ignored due to predefined cluster order
1102
+ if (
1103
+ self.predefClusterOrder is not None
1104
+ and self.extremePreserveNumClusters
1105
+ and self.extremePeriodMethod not in ("None", "replace_cluster_center")
1106
+ ):
1107
+ warnings.warn(
1108
+ "extremePreserveNumClusters=True is ignored when predefClusterOrder "
1109
+ "is set. Extreme periods will be appended via _addExtremePeriods "
1110
+ "without reserving clusters upfront. To avoid this warning, set "
1111
+ "extremePreserveNumClusters=False or remove predefClusterOrder.",
1112
+ UserWarning,
1113
+ stacklevel=2,
1114
+ )
1115
+
1116
+ # Count extreme periods upfront if include_in_count is True
1117
+ # Note: replace_cluster_center doesn't add new clusters, so skip
1118
+ n_extremes = 0
1119
+ if (
1120
+ self.extremePreserveNumClusters
1121
+ and self.extremePeriodMethod not in ("None", "replace_cluster_center")
1122
+ and self.predefClusterOrder is None # Don't count for predefined
1123
+ ):
1124
+ n_extremes = self._countExtremePeriods(self.normalizedPeriodlyProfiles)
1125
+
1126
+ if self.noTypicalPeriods <= n_extremes:
1127
+ raise ValueError(
1128
+ f"n_clusters ({self.noTypicalPeriods}) must be greater than "
1129
+ f"the number of extreme periods ({n_extremes}) when "
1130
+ "preserve_n_clusters=True"
1131
+ )
1132
+
1133
+ # Compute effective number of clusters for the clustering algorithm
1134
+ effective_n_clusters = self.noTypicalPeriods - n_extremes
1135
+
1055
1136
  # check for additional cluster parameters
1056
1137
  if self.evalSumPeriods:
1057
1138
  evaluationValues = (
@@ -1096,7 +1177,7 @@ class TimeSeriesAggregation:
1096
1177
  self._clusterOrder,
1097
1178
  ) = aggregatePeriods(
1098
1179
  candidates,
1099
- n_clusters=self.noTypicalPeriods,
1180
+ n_clusters=effective_n_clusters,
1100
1181
  n_iter=100,
1101
1182
  solver=self.solver,
1102
1183
  clusterMethod=self.clusterMethod,
@@ -1107,7 +1188,7 @@ class TimeSeriesAggregation:
1107
1188
  )
1108
1189
  else:
1109
1190
  self.clusterCenters, self._clusterOrder = self._clusterSortedPeriods(
1110
- candidates
1191
+ candidates, n_clusters=effective_n_clusters
1111
1192
  )
1112
1193
  self.clusteringDuration = time.time() - cluster_duration
1113
1194
 
@@ -1117,7 +1198,6 @@ class TimeSeriesAggregation:
1117
1198
  self.clusterPeriods.append(cluster_center[:delClusterParams])
1118
1199
 
1119
1200
  if not self.extremePeriodMethod == "None":
1120
- # overwrite clusterPeriods and clusterOrder
1121
1201
  (
1122
1202
  self.clusterPeriods,
1123
1203
  self._clusterOrder,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: tsam
3
- Version: 3.0.0
3
+ Version: 3.1.0
4
4
  Summary: Time series aggregation module (tsam) to create typical periods
5
5
  Author-email: Leander Kotzur <leander.kotzur@googlemail.com>, Maximilian Hoffmann <maximilian.hoffmann@julumni.fz-juelich.de>
6
6
  Maintainer-email: Julian Belina <j.belina@fz-juelich.de>
@@ -49,14 +49,8 @@ Requires-Dist: pandas<=3.0.0,>=2.2.0
49
49
  Requires-Dist: numpy<=2.4.1,>=1.22.4
50
50
  Requires-Dist: pyomo<=6.95,>=6.4.8
51
51
  Requires-Dist: networkx<=3.6.1,>=2.5
52
- Requires-Dist: tqdm<=4.67.1,>=4.21.0
52
+ Requires-Dist: tqdm<=4.67.2,>=4.21.0
53
53
  Requires-Dist: highspy<=1.12.0,>=1.7.2
54
- Provides-Extra: plot
55
- Requires-Dist: plotly>=5.0.0; extra == "plot"
56
- Provides-Extra: notebooks
57
- Requires-Dist: notebook>=7.5.0; extra == "notebooks"
58
- Requires-Dist: plotly>=5.0.0; extra == "notebooks"
59
- Requires-Dist: matplotlib; extra == "notebooks"
60
54
  Provides-Extra: develop
61
55
  Requires-Dist: pytest; extra == "develop"
62
56
  Requires-Dist: pytest-cov; extra == "develop"
@@ -65,6 +59,7 @@ Requires-Dist: codecov; extra == "develop"
65
59
  Requires-Dist: sphinx; extra == "develop"
66
60
  Requires-Dist: sphinx-autobuild; extra == "develop"
67
61
  Requires-Dist: sphinx_book_theme; extra == "develop"
62
+ Requires-Dist: nbsphinx; extra == "develop"
68
63
  Requires-Dist: twine; extra == "develop"
69
64
  Requires-Dist: nbval; extra == "develop"
70
65
  Requires-Dist: ruff; extra == "develop"
@@ -73,7 +68,11 @@ Requires-Dist: pandas-stubs; extra == "develop"
73
68
  Requires-Dist: pre-commit; extra == "develop"
74
69
  Requires-Dist: plotly>=5.0.0; extra == "develop"
75
70
  Requires-Dist: notebook>=7.5.0; extra == "develop"
76
- Requires-Dist: matplotlib; extra == "develop"
71
+ Provides-Extra: plot
72
+ Requires-Dist: plotly>=5.0.0; extra == "plot"
73
+ Provides-Extra: notebooks
74
+ Requires-Dist: notebook>=7.5.0; extra == "notebooks"
75
+ Requires-Dist: plotly>=5.0.0; extra == "notebooks"
77
76
  Dynamic: license-file
78
77
 
79
78
  [![Version](https://img.shields.io/pypi/v/tsam.svg)](https://pypi.python.org/pypi/tsam) [![Conda Version](https://img.shields.io/conda/vn/conda-forge/tsam.svg)](https://anaconda.org/conda-forge/tsam) [![Documentation Status](https://readthedocs.org/projects/tsam/badge/?version=latest)](https://tsam.readthedocs.io/en/latest/) [![PyPI - License](https://img.shields.io/pypi/l/tsam)]((https://github.com/FZJ-IEK3-VSA/tsam/blob/master/LICENSE.txt)) [![codecov](https://codecov.io/gh/FZJ-IEK3-VSA/tsam/branch/master/graph/badge.svg)](https://codecov.io/gh/FZJ-IEK3-VSA/tsam)
@@ -217,9 +216,9 @@ cluster_representatives = aggregation.createTypicalPeriods()
217
216
  ### Detailed examples
218
217
  Detailed examples can be found at:/docs/source/examples_notebooks/
219
218
 
220
- A [**first example**](/docs/source/examples_notebooks/aggregation_example.ipynb) shows the capabilites of tsam as jupyter notebook.
219
+ A [**quickstart example**](/docs/source/examples_notebooks/quickstart.ipynb) shows the capabilities of tsam as a Jupyter notebook.
221
220
 
222
- A [**second example**](/docs/source/examples_notebooks/aggregation_optiinput.ipynb) shows in more detail how to access the relevant aggregation results required for paramtrizing e.g. an optimization.
221
+ A [**second example**](/docs/source/examples_notebooks/optimization_input.ipynb) shows in more detail how to access the relevant aggregation results required for parameterizing e.g. an optimization.
223
222
 
224
223
  The example time series are based on a department [publication](https://www.mdpi.com/1996-1073/10/3/361) and the [test reference years of the DWD](https://www.dwd.de/DE/leistungen/testreferenzjahre/testreferenzjahre.html).
225
224
 
@@ -1,6 +1,6 @@
1
1
  tsam/__init__.py,sha256=l5PQB_p-OCSQK2daCTc2k4VP6ZtFwOXE_QSVUTCsbV0,2236
2
- tsam/api.py,sha256=FvXzpFScwzypgHOAfCaZmaetWv_P6pqpTTcpRLxDU2E,22575
3
- tsam/config.py,sha256=2povXN55P2VoVHK13MiN6M3bJZVgcY3jn-maCBOhBWY,33523
2
+ tsam/api.py,sha256=YisjYhfsZRRngBzHyw-u_JHeRMpYmuYCXk83oPO15PA,22662
3
+ tsam/config.py,sha256=4gbwfh3aeCW7y5NIKgtNjtRq7afhMP9fdWWvZ8vO3PM,35438
4
4
  tsam/exceptions.py,sha256=arCs6OQ3r5MIcwO7aHRjB8Joy2rFHWN80vvEr_hKnqY,509
5
5
  tsam/hyperparametertuning.py,sha256=S4tCjf4wgKXrX7MCtJGUJXnm26sHHB4qWA1UEmNlTM4,11225
6
6
  tsam/periodAggregation.py,sha256=ppEEWpxBh0x5nQGJiywkPPHOvl0uAphdoroqxYfIJmQ,5306
@@ -8,7 +8,7 @@ tsam/plot.py,sha256=gBnMkiCp7EfVhBn9b4ywM_qOP2CFzTG9T-aQTKUa2qU,15701
8
8
  tsam/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  tsam/representations.py,sha256=MKJJtClMkacOkItbRwreFo1d0Vr6gEAItq4-OwqTfAE,7053
10
10
  tsam/result.py,sha256=0eckXhXZl3dqe24pNlhIDrG9c6CEnIoxExPSFCSH3tM,13707
11
- tsam/timeseriesaggregation.py,sha256=8Xy65KmYxCV96N1t-zz8GMc4S4r8w-NZqUQQP0sP0b8,59351
11
+ tsam/timeseriesaggregation.py,sha256=UxdNYqKVr4Ydom4wIvu49080Nr4AeArl8Pm1Enukl7I,62807
12
12
  tsam/tuning.py,sha256=7yhh6BgxYCdFOJELZU8artR-TuG1oZDRoPfIF9Q9p4Q,35822
13
13
  tsam/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
14
  tsam/utils/durationRepresentation.py,sha256=ESPXhSuHplRig9uOEJGgEOdbzaa0zo-1tPW7o_N0_yk,9791
@@ -16,8 +16,8 @@ tsam/utils/k_maxoids.py,sha256=hUnqkOgH-nTjzH8B6Ho7oF9iWfXaKiJeWa0aDOlshvI,4241
16
16
  tsam/utils/k_medoids_contiguity.py,sha256=ZK09BkwWaub-JwtXbOuLx2OhE8mYkkNbCmmYO6ylb4A,5920
17
17
  tsam/utils/k_medoids_exact.py,sha256=p3nd2madMrOGau6sWE7i3jADXVQkUGF66CPTV2QyYr0,6999
18
18
  tsam/utils/segmentation.py,sha256=qY8jEVB8Rj6tZNwF-mz-mldmsSkaaPOrWoDM_hgEe8M,11094
19
- tsam-3.0.0.dist-info/licenses/LICENSE.txt,sha256=YO7oiTI8iS0QbTaumTaIr8QkVgZPrgqiBKO-s4eiwik,1210
20
- tsam-3.0.0.dist-info/METADATA,sha256=BLpBzrhvz3oJ5eModR0EMK9smnQrBzGAfvfe9Fzor0M,16097
21
- tsam-3.0.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
22
- tsam-3.0.0.dist-info/top_level.txt,sha256=MFI15PnPuMv8F1hTAOXbjGu41z-l6dJbnK69WlIQNcM,5
23
- tsam-3.0.0.dist-info/RECORD,,
19
+ tsam-3.1.0.dist-info/licenses/LICENSE.txt,sha256=YO7oiTI8iS0QbTaumTaIr8QkVgZPrgqiBKO-s4eiwik,1210
20
+ tsam-3.1.0.dist-info/METADATA,sha256=hVNAm38MZgPw8zw4_uKyWT-ioWW7WVOluPoJ6JZKoY4,16044
21
+ tsam-3.1.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
22
+ tsam-3.1.0.dist-info/top_level.txt,sha256=MFI15PnPuMv8F1hTAOXbjGu41z-l6dJbnK69WlIQNcM,5
23
+ tsam-3.1.0.dist-info/RECORD,,
File without changes