fram-core 0.0.0__py3-none-any.whl → 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. fram_core-0.1.0a1.dist-info/METADATA +41 -0
  2. fram_core-0.1.0a1.dist-info/RECORD +100 -0
  3. {fram_core-0.0.0.dist-info → fram_core-0.1.0a1.dist-info}/WHEEL +1 -2
  4. fram_core-0.1.0a1.dist-info/licenses/LICENSE.md +8 -0
  5. framcore/Base.py +142 -0
  6. framcore/Model.py +73 -0
  7. framcore/__init__.py +9 -0
  8. framcore/aggregators/Aggregator.py +153 -0
  9. framcore/aggregators/HydroAggregator.py +837 -0
  10. framcore/aggregators/NodeAggregator.py +495 -0
  11. framcore/aggregators/WindSolarAggregator.py +323 -0
  12. framcore/aggregators/__init__.py +13 -0
  13. framcore/aggregators/_utils.py +184 -0
  14. framcore/attributes/Arrow.py +305 -0
  15. framcore/attributes/ElasticDemand.py +90 -0
  16. framcore/attributes/ReservoirCurve.py +37 -0
  17. framcore/attributes/SoftBound.py +19 -0
  18. framcore/attributes/StartUpCost.py +54 -0
  19. framcore/attributes/Storage.py +146 -0
  20. framcore/attributes/TargetBound.py +18 -0
  21. framcore/attributes/__init__.py +65 -0
  22. framcore/attributes/hydro/HydroBypass.py +42 -0
  23. framcore/attributes/hydro/HydroGenerator.py +83 -0
  24. framcore/attributes/hydro/HydroPump.py +156 -0
  25. framcore/attributes/hydro/HydroReservoir.py +27 -0
  26. framcore/attributes/hydro/__init__.py +13 -0
  27. framcore/attributes/level_profile_attributes.py +714 -0
  28. framcore/components/Component.py +112 -0
  29. framcore/components/Demand.py +130 -0
  30. framcore/components/Flow.py +167 -0
  31. framcore/components/HydroModule.py +330 -0
  32. framcore/components/Node.py +76 -0
  33. framcore/components/Thermal.py +204 -0
  34. framcore/components/Transmission.py +183 -0
  35. framcore/components/_PowerPlant.py +81 -0
  36. framcore/components/__init__.py +22 -0
  37. framcore/components/wind_solar.py +67 -0
  38. framcore/curves/Curve.py +44 -0
  39. framcore/curves/LoadedCurve.py +155 -0
  40. framcore/curves/__init__.py +9 -0
  41. framcore/events/__init__.py +21 -0
  42. framcore/events/events.py +51 -0
  43. framcore/expressions/Expr.py +490 -0
  44. framcore/expressions/__init__.py +28 -0
  45. framcore/expressions/_get_constant_from_expr.py +483 -0
  46. framcore/expressions/_time_vector_operations.py +615 -0
  47. framcore/expressions/_utils.py +73 -0
  48. framcore/expressions/queries.py +423 -0
  49. framcore/expressions/units.py +207 -0
  50. framcore/fingerprints/__init__.py +11 -0
  51. framcore/fingerprints/fingerprint.py +293 -0
  52. framcore/juliamodels/JuliaModel.py +161 -0
  53. framcore/juliamodels/__init__.py +7 -0
  54. framcore/loaders/__init__.py +10 -0
  55. framcore/loaders/loaders.py +407 -0
  56. framcore/metadata/Div.py +73 -0
  57. framcore/metadata/ExprMeta.py +50 -0
  58. framcore/metadata/LevelExprMeta.py +17 -0
  59. framcore/metadata/Member.py +55 -0
  60. framcore/metadata/Meta.py +44 -0
  61. framcore/metadata/__init__.py +15 -0
  62. framcore/populators/Populator.py +108 -0
  63. framcore/populators/__init__.py +7 -0
  64. framcore/querydbs/CacheDB.py +50 -0
  65. framcore/querydbs/ModelDB.py +34 -0
  66. framcore/querydbs/QueryDB.py +45 -0
  67. framcore/querydbs/__init__.py +11 -0
  68. framcore/solvers/Solver.py +48 -0
  69. framcore/solvers/SolverConfig.py +272 -0
  70. framcore/solvers/__init__.py +9 -0
  71. framcore/timeindexes/AverageYearRange.py +20 -0
  72. framcore/timeindexes/ConstantTimeIndex.py +17 -0
  73. framcore/timeindexes/DailyIndex.py +21 -0
  74. framcore/timeindexes/FixedFrequencyTimeIndex.py +762 -0
  75. framcore/timeindexes/HourlyIndex.py +21 -0
  76. framcore/timeindexes/IsoCalendarDay.py +31 -0
  77. framcore/timeindexes/ListTimeIndex.py +197 -0
  78. framcore/timeindexes/ModelYear.py +17 -0
  79. framcore/timeindexes/ModelYears.py +18 -0
  80. framcore/timeindexes/OneYearProfileTimeIndex.py +21 -0
  81. framcore/timeindexes/ProfileTimeIndex.py +32 -0
  82. framcore/timeindexes/SinglePeriodTimeIndex.py +37 -0
  83. framcore/timeindexes/TimeIndex.py +90 -0
  84. framcore/timeindexes/WeeklyIndex.py +21 -0
  85. framcore/timeindexes/__init__.py +36 -0
  86. framcore/timevectors/ConstantTimeVector.py +135 -0
  87. framcore/timevectors/LinearTransformTimeVector.py +114 -0
  88. framcore/timevectors/ListTimeVector.py +123 -0
  89. framcore/timevectors/LoadedTimeVector.py +104 -0
  90. framcore/timevectors/ReferencePeriod.py +41 -0
  91. framcore/timevectors/TimeVector.py +94 -0
  92. framcore/timevectors/__init__.py +17 -0
  93. framcore/utils/__init__.py +36 -0
  94. framcore/utils/get_regional_volumes.py +369 -0
  95. framcore/utils/get_supported_components.py +60 -0
  96. framcore/utils/global_energy_equivalent.py +46 -0
  97. framcore/utils/isolate_subnodes.py +163 -0
  98. framcore/utils/loaders.py +97 -0
  99. framcore/utils/node_flow_utils.py +236 -0
  100. framcore/utils/storage_subsystems.py +107 -0
  101. fram_core-0.0.0.dist-info/METADATA +0 -5
  102. fram_core-0.0.0.dist-info/RECORD +0 -4
  103. fram_core-0.0.0.dist-info/top_level.txt +0 -1
@@ -0,0 +1,615 @@
1
+ # ruff: noqa: PLR2004
2
+ import math
3
+ from datetime import date, datetime, timedelta
4
+
5
+ import numpy as np
6
+ from numpy.typing import NDArray
7
+
8
+ HOURS_PER_WEEK = 168
9
+ MINUTES_PER_WEEK = HOURS_PER_WEEK * 60
10
+ SECONDS_PER_WEEK = MINUTES_PER_WEEK * 60
11
+ MODEL_WEEKS_PER_YEAR = 52
12
+
13
+
14
+ def aggregate(input_vector: NDArray, output_vector: NDArray, is_aggfunc_sum: bool) -> None:
15
+ """Aggregate input vector to output vector."""
16
+ assert len(input_vector.shape) == 1
17
+ assert len(output_vector.shape) == 1
18
+ assert input_vector.size > output_vector.size
19
+ assert input_vector.size % output_vector.size == 0
20
+ assert input_vector.dtype == output_vector.dtype
21
+
22
+ multiplier = input_vector.size // output_vector.size
23
+ num_macro_periods = output_vector.size
24
+ input_vector.reshape((num_macro_periods, multiplier)).mean(axis=1, out=output_vector)
25
+
26
+ if is_aggfunc_sum:
27
+ np.multiply(output_vector, multiplier, out=output_vector)
28
+
29
+
30
+ def disaggregate(input_vector: NDArray, output_vector: NDArray, is_disaggfunc_repeat: bool) -> None:
31
+ """Disaggregate input vector to output vector."""
32
+ assert len(input_vector.shape) == 1
33
+ assert len(output_vector.shape) == 1
34
+ assert input_vector.size < output_vector.size
35
+ assert output_vector.size % input_vector.size == 0
36
+ assert input_vector.dtype == output_vector.dtype
37
+
38
+ multiplier = output_vector.size // input_vector.size
39
+ output_vector[:] = np.repeat(input_vector, multiplier)
40
+
41
+ if not is_disaggfunc_repeat:
42
+ np.multiply(output_vector, 1 / multiplier, out=output_vector)
43
+
44
+
45
+ def convert_to_modeltime(input_vector: NDArray, startdate: datetime, period_duration: timedelta) -> tuple[datetime, NDArray]:
46
+ """
47
+ Convert isotime input vector to model time (52-weeks) of various data resolutions by removing week 53 data if present.
48
+
49
+ The method supports input vector period durations starting at 1 second up to multiple weeks.
50
+
51
+ If the input vector period duration is not compatible with the target period after removing week 53 data, the method will raise a ValueError.
52
+
53
+ If start_date of input vector is in week 53, the start_date will be moved to the first week of the next year.
54
+
55
+ Args:
56
+ input_vector (NDArray): The input time series vector in isotime format.
57
+ startdate (datetime): The start date of the input vector.
58
+ period_duration (timedelta): The duration of each period in the input vector.
59
+
60
+ Returns:
61
+ tuple[datetime, NDArray]: A tuple with two elements, where the first element is a (possibly adjusted) start date and the second element is the converted
62
+ model time vector.
63
+
64
+ """
65
+ assert isinstance(input_vector, np.ndarray)
66
+ assert input_vector.ndim == 1
67
+ assert isinstance(startdate, datetime)
68
+ assert isinstance(period_duration, timedelta)
69
+ assert period_duration.total_seconds() > 0, "Period duration must be greater than zero."
70
+
71
+ end_date = startdate + period_duration * input_vector.size
72
+
73
+ if not _period_contains_week_53(startdate, end_date):
74
+ return startdate, input_vector.copy()
75
+
76
+ whole_duration = end_date - startdate
77
+ week_53_periods = _find_all_week_53_periods(startdate, end_date)
78
+ remaining_period = whole_duration - _total_duration(week_53_periods)
79
+
80
+ # check if the remaining period is compatible with the target period duration
81
+ if remaining_period % period_duration != timedelta(0):
82
+ suggested_period_duration = _common_compatible_period_duration(whole_duration, remaining_period)
83
+ err_message = f"Incompatible period duration detected! The resulting vector would be incompatible with period duration of {period_duration} after week 53 data is removed. Solution: use period duration that is compatible with both input and resulting vectors. Suggested period duration: {suggested_period_duration}."
84
+ raise ValueError(err_message)
85
+
86
+ sub_periods = _find_all_sub_periods(startdate, end_date, week_53_periods)
87
+
88
+ if _period_duration_compatible_with_all_sub_periods(period_duration, sub_periods):
89
+ return _to_modeltime(input_vector, startdate, period_duration)
90
+
91
+ new_period_duration = _common_compatible_period_duration(period_duration, *[sub_period[1] - sub_period[0] for sub_period in sub_periods])
92
+ scaling_factor = period_duration // new_period_duration
93
+
94
+ tmp_vector = np.zeros(input_vector.size * scaling_factor, dtype=input_vector.dtype)
95
+
96
+ disaggregate(
97
+ input_vector=input_vector,
98
+ output_vector=tmp_vector,
99
+ is_disaggfunc_repeat=True,
100
+ )
101
+
102
+ output_date, tmp_vector = _to_modeltime(
103
+ input_vector=tmp_vector,
104
+ startdate=startdate,
105
+ period_duration=new_period_duration,
106
+ )
107
+
108
+ assert tmp_vector.size % scaling_factor == 0, "This should never happen: expected tmp_vector.size to be multiple of scaling_factor before aggregation."
109
+
110
+ out_vector = np.zeros(tmp_vector.size // scaling_factor, dtype=input_vector.dtype)
111
+
112
+ aggregate(
113
+ input_vector=tmp_vector,
114
+ output_vector=out_vector,
115
+ is_aggfunc_sum=False,
116
+ )
117
+
118
+ return output_date, out_vector
119
+
120
+ def _total_duration(periods: list[tuple[datetime, datetime]]) -> timedelta:
121
+ return sum((end - start for start, end in periods), timedelta(0))
122
+
123
+ def _find_all_sub_periods(startdate: datetime, enddate: datetime, week_53_periods: list[tuple[datetime, datetime]]) -> list[tuple[datetime, datetime]]:
124
+ if week_53_periods is None or len(week_53_periods) == 0:
125
+ return [(startdate, enddate)]
126
+
127
+ assert week_53_periods[0][0] >= startdate, "First week 53 period must be after or equal to startdate."
128
+ assert week_53_periods[-1][0] < enddate, "Last week 53 period start must be before the enddate."
129
+ assert week_53_periods[-1][1] <= enddate, "Last week 53 period end must be before or equal to the enddate."
130
+
131
+ sub_periods = []
132
+
133
+ for i, (week_53_start, week_53_end) in enumerate(week_53_periods):
134
+ if i == 0 and week_53_start != startdate:
135
+ sub_periods.append((startdate, week_53_start))
136
+ else:
137
+ prev_week_53_end = week_53_periods[i - 1][1]
138
+ sub_periods.append((prev_week_53_end, week_53_start))
139
+ sub_periods.append((week_53_start, week_53_end))
140
+ if week_53_periods[-1][1] != enddate:
141
+ sub_periods.append((week_53_periods[-1][1], enddate))
142
+ return sub_periods
143
+
144
+ def _period_duration_compatible_with_all_sub_periods(period_duration: timedelta, periods: list[tuple[datetime, datetime]]) -> bool:
145
+ return not any((period[1] - period[0]) % period_duration != timedelta(0) for period in periods)
146
+
147
+ def _common_compatible_period_duration(*period_durations: timedelta) -> timedelta:
148
+ return timedelta(seconds=math.gcd(*[int(period_duration.total_seconds()) for period_duration in period_durations]))
149
+
150
+ def _to_modeltime(input_vector: NDArray, startdate: datetime, period_duration: timedelta) -> tuple[datetime, NDArray]:
151
+ output_vector = _remove_week_53_data(input_vector, startdate, period_duration)
152
+
153
+ if not _is_within_week_53(startdate):
154
+ output_date = startdate
155
+ else:
156
+ output_date = _get_start_of_next_year(startdate)
157
+
158
+ return output_date, output_vector
159
+
160
+
161
+ def convert_to_isotime(
162
+ input_vector: NDArray,
163
+ startdate: datetime,
164
+ period_duration: timedelta,
165
+ ) -> NDArray:
166
+ """
167
+ Convert model time input vector to isotime, handling week 53 if present.
168
+
169
+ Args:
170
+ input_vector (NDArray): The input vector in model time. Input can be in weekly, daily, hourly or minute format.
171
+ For example year, week and hour format: (2025, 3, 1), (2025, 3, 2), ..., (2025, 52, 168), (2026, 1, 1). Time
172
+ index can start at any date, not necessarily the first day of the year.
173
+ startdate (date): The start date of the input vector.
174
+ period_duration (int): The duration of each period in minutes.
175
+
176
+ Returns:
177
+ NDArray: The converted isotime vector.
178
+
179
+ """
180
+ assert isinstance(input_vector, np.ndarray)
181
+ assert input_vector.ndim == 1
182
+
183
+ total_duration = period_duration * input_vector.size
184
+
185
+ is_whole_years = startdate.isocalendar().week == 1 and startdate.isocalendar().weekday == 1 and (total_duration % timedelta(weeks=52) == timedelta(0))
186
+
187
+ if is_whole_years:
188
+ total_years = total_duration // timedelta(weeks=52)
189
+ end_date = datetime.fromisocalendar(startdate.isocalendar().year + total_years, 1, 1)
190
+ else:
191
+ end_date = startdate + total_duration
192
+
193
+ if not (is_whole_years and _has_week_53(startdate.isocalendar().year)) and not _period_contains_week_53(startdate, end_date):
194
+ return input_vector.copy()
195
+
196
+ week_53_periods = _find_all_week_53_periods(startdate, end_date)
197
+ extended_total_duration = total_duration + timedelta(weeks=len(week_53_periods))
198
+
199
+ # check if the extended period is compatible with the target period duration
200
+ if extended_total_duration % period_duration != timedelta(0):
201
+ suggested_period_duration = _common_compatible_period_duration(total_duration, extended_total_duration)
202
+ err_message = f"Incompatible period duration detected when converting to ISO-time! The resulting vector would be incompatible with period duration of {period_duration} after week 53 data is added. Solution: use period duration that is compatible with both input and resulting vectors. Suggested period duration: {suggested_period_duration}."
203
+ raise ValueError(err_message)
204
+
205
+ sub_periods = _find_all_sub_periods(startdate, end_date, week_53_periods)
206
+
207
+ if _period_duration_compatible_with_all_sub_periods(period_duration, sub_periods):
208
+ return _to_isotime(input_vector, period_duration, sub_periods)
209
+
210
+ new_period_duration = _common_compatible_period_duration(period_duration, *[sub_period[1] - sub_period[0] for sub_period in sub_periods])
211
+
212
+ scaling_factor = period_duration // new_period_duration
213
+ tmp_vector = np.zeros(input_vector.size * scaling_factor, dtype=input_vector.dtype)
214
+
215
+ disaggregate(
216
+ input_vector=input_vector,
217
+ output_vector=tmp_vector,
218
+ is_disaggfunc_repeat=True,
219
+ )
220
+
221
+ assert tmp_vector.size % scaling_factor == 0, "This should never happen: expected tmp_vector.size to be multiple of scaling_factor before aggregation."
222
+
223
+ adjusted_vector = _to_isotime(tmp_vector, new_period_duration, sub_periods)
224
+
225
+
226
+ out_vector = np.zeros(adjusted_vector.size // scaling_factor, dtype=input_vector.dtype)
227
+
228
+ aggregate(
229
+ input_vector=adjusted_vector,
230
+ output_vector=out_vector,
231
+ is_aggfunc_sum=False,
232
+ )
233
+
234
+ return out_vector
235
+
236
+ def _to_isotime(input_vector: NDArray, period_duration: timedelta, sub_periods: list[tuple[datetime, datetime]]) -> NDArray:
237
+ periods_per_week = timedelta(weeks=1) // period_duration
238
+
239
+ idxs, values = [], []
240
+
241
+ for sub_period in sub_periods:
242
+ if sub_period[0].isocalendar().week == 53:
243
+ delta = sub_period[0] - sub_periods[0][0]
244
+ offset = delta // period_duration - len(idxs)
245
+ for i in range(periods_per_week):
246
+ idxs.append(offset)
247
+ values.append(input_vector[offset-periods_per_week + i])
248
+
249
+ return np.insert(input_vector, idxs, values)
250
+
251
+
252
+ MINUTES_PER_DAY = 24 * 60
253
+
254
+
255
+ def periodize_isotime(
256
+ input_vector: NDArray,
257
+ input_start_year: int,
258
+ input_num_years: int,
259
+ output_start_year: int,
260
+ output_num_years: int,
261
+ ) -> NDArray:
262
+ """
263
+ Extract data for a given number of years from an input time series vector.
264
+
265
+ This function supports input vectors representing yearly, monthly, or higher-resolution data.
266
+ It calculates the appropriate indices to slice the input vector based on the input and output
267
+ time periods and returns the corresponding subset of the data.
268
+
269
+ Args:
270
+ input_vector (NDArray): A 1D NumPy array representing the input time series data.
271
+ input_start_year (int): The starting year of the input time series.
272
+ input_num_years (int): The number of years covered by the input time series.
273
+ output_start_year (int): The starting year of the desired output time series.
274
+ output_num_years (int): The number of years to include in the output time series.
275
+
276
+ Returns:
277
+ NDArray: A 1D NumPy array containing the subset of the input vector corresponding to the
278
+ specified output time period.
279
+
280
+ AssertionError: If any of the following conditions are not met:
281
+ - `input_vector` is a 1D NumPy array.
282
+ - `input_start_year` is less than or equal to `output_start_year`.
283
+ - `input_num_years` is less than or equal to the size of `input_vector`.
284
+ - `output_num_years` is less than or equal to `input_num_years`.
285
+ - For higher-resolution data, the input vector size must be a multiple of the
286
+ number of minutes in the input period.
287
+ of minutes in the input period.
288
+
289
+ Notes:
290
+ - If the input vector size matches the number of years (`input_num_years`), it is assumed
291
+ to represent yearly data.
292
+ - If the input vector size matches `input_num_years * 12`, it is assumed to represent
293
+ monthly data.
294
+ - For higher-resolution data (e.g., minute-level), the function calculates the appropriate
295
+ indices based on the number of minutes in the input and output periods.
296
+
297
+ """
298
+ assert isinstance(input_vector, np.ndarray), "Input vector must be a 1D NumPy array."
299
+ assert input_vector.ndim == 1, "Input vector must be a 1D NumPy array."
300
+ assert input_start_year <= output_start_year, "Input start year must be greater than or equal to output start year."
301
+ assert input_num_years <= input_vector.size, "Input number of years must be less than or equal to input vector size."
302
+ assert output_num_years <= input_num_years, "Output number of years must be less or equalt to input_number_years."
303
+
304
+ if input_vector.size == input_num_years:
305
+ # If the input vector size is equal to the number of years.
306
+ start_idx = output_start_year - input_start_year
307
+ end_idx = start_idx + output_num_years
308
+ elif input_vector.size == input_num_years * 12:
309
+ # If the input vector size is monthly data.
310
+ start_idx = (output_start_year - input_start_year) * 12
311
+ end_idx = start_idx + output_num_years * 12
312
+ else:
313
+ input_start_date = date.fromisocalendar(input_start_year, 1, 1)
314
+ input_end_date = date.fromisocalendar(input_start_year + input_num_years, 1, 1)
315
+ output_start_date = date.fromisocalendar(output_start_year, 1, 1)
316
+ output_end_date = date.fromisocalendar(output_start_year + output_num_years, 1, 1)
317
+
318
+ data_size_minutes = (input_end_date - input_start_date).days * MINUTES_PER_DAY
319
+ assert data_size_minutes % input_vector.size == 0, "Input vector size must be a multiple of the number of minutes in the input period."
320
+
321
+ period_size_minutes = data_size_minutes // input_vector.size
322
+ offset_minutes = (output_start_date - input_start_date).days * MINUTES_PER_DAY
323
+ output_size_minutes = (output_end_date - output_start_date).days * MINUTES_PER_DAY
324
+
325
+ start_idx = offset_minutes // period_size_minutes
326
+ end_idx = start_idx + output_size_minutes // period_size_minutes
327
+
328
+ return input_vector[start_idx:end_idx]
329
+
330
+
331
+ def periodize_modeltime(
332
+ input_vector: NDArray,
333
+ input_start_year: int,
334
+ input_num_years: int,
335
+ output_start_year: int,
336
+ output_num_years: int,
337
+ ) -> NDArray:
338
+ """
339
+ Extract a portion of a time-series input vector corresponding to a specified range of years.
340
+
341
+ This function assumes that the input vector represents a time series divided into equal periods
342
+ per year. It extracts a subset of the input vector corresponding to the specified output years.
343
+
344
+ Args:
345
+ input_vector (NDArray): A 1-dimensional NumPy array representing the input time series.
346
+ input_start_year (int): The starting year of the input vector.
347
+ input_num_years (int): The total number of years represented in the input vector.
348
+ output_start_year (int): The starting year for the output vector.
349
+ output_num_years (int): The number of years to include in the output vector.
350
+
351
+ Returns:
352
+ NDArray: A 1-dimensional NumPy array containing the portion of the input vector
353
+ corresponding to the specified output years.
354
+
355
+ Raises:
356
+ AssertionError: If any of the following conditions are not met:
357
+ - `input_vector` is a 1-dimensional NumPy array.
358
+ - `output_start_year` is greater than or equal to `input_start_year`.
359
+ - `input_num_years` is less than or equal to the size of `input_vector`.
360
+ - The size of `input_vector` is a multiple of `input_num_years`.
361
+ - `output_num_years` is less than or equal to `input_num_years`.
362
+ - The requested output vector does not exceed the size of `input_vector`.
363
+
364
+ """
365
+ assert isinstance(input_vector, np.ndarray)
366
+ assert input_vector.ndim == 1
367
+ assert output_start_year >= input_start_year, "Output start year must be greater than or equal to input start year."
368
+ assert input_num_years <= input_vector.size, "Input number of years must be less than or equal to input vector size."
369
+ assert input_vector.size % input_num_years == 0, "Input vector size must be a multiple of input number of years."
370
+ assert output_num_years <= input_num_years, "Output number of years must be less or equalt to input_number_years."
371
+
372
+ periods_per_year = input_vector.size // input_num_years
373
+ start_idx = (output_start_year - input_start_year) * periods_per_year
374
+ end_idx = start_idx + periods_per_year * output_num_years
375
+
376
+ assert end_idx < input_vector.size + 1, "Requested output vector exceeds input vector size."
377
+
378
+ return input_vector[start_idx:end_idx]
379
+
380
+
381
+ def repeat_oneyear_modeltime(
382
+ input_vector: NDArray,
383
+ input_start_date: datetime,
384
+ period_duration: timedelta,
385
+ output_start_date: datetime,
386
+ output_end_date: datetime,
387
+ ) -> NDArray:
388
+ """
389
+ Repeat a one-year input vector to cover the specified output date range.
390
+
391
+ Args:
392
+ input_vector (NDArray): A 1D NumPy array representing the input time series for one year.
393
+ input_start_date (date): The start date of the input vector.
394
+ period_duration (timedelta): The duration of each period in the input vector.
395
+ output_start_date (date): The start date of the output period.
396
+ output_end_date (date): The end date of the output period.
397
+
398
+ Returns:
399
+ NDArray: A 1D NumPy array containing the repeated time series data for the specified output period.
400
+
401
+ """
402
+ assert isinstance(input_vector, np.ndarray), "input_vector must be a 1D numpy array."
403
+ assert input_vector.ndim == 1, "input_vector must be a 1D numpy array."
404
+ assert isinstance(input_start_date, datetime), "input_start_date must be a datetime object."
405
+ assert isinstance(period_duration, timedelta), "period_duration must be a timedelta object."
406
+ assert period_duration.total_seconds() >= 0, "period_duration must be at least one second."
407
+ assert period_duration.total_seconds() % 60 == 0, "period_duration must be at least one minute resolution."
408
+ assert isinstance(output_start_date, datetime), "output_start_date must be a datetime object."
409
+ assert isinstance(output_end_date, datetime), "output_end_date must be a datetime object."
410
+ assert output_start_date < output_end_date, "output_end_date must be after output_start_date."
411
+
412
+ output_total_duration = output_end_date - output_start_date
413
+ assert output_total_duration >= period_duration, "Output period must be at least one period duration long."
414
+ assert output_total_duration % period_duration == timedelta(0), "Output period must be a multiple of input period duration."
415
+
416
+ output_periods_count = int((output_end_date - output_start_date) / period_duration)
417
+
418
+ _, input_start_week, input_start_weekday = input_start_date.isocalendar()
419
+ _, output_start_week, output_start_weekday = output_start_date.isocalendar()
420
+
421
+ start_offset_days = (output_start_week - input_start_week) * 7 + (output_start_weekday - input_start_weekday)
422
+ start_offset_periods = int(timedelta(days=start_offset_days) / period_duration)
423
+
424
+ # Repeat the input vector enough times to cover the output period
425
+ repeat_count = (start_offset_periods + output_periods_count) / len(input_vector)
426
+
427
+ if start_offset_periods + output_periods_count > len(input_vector):
428
+ repeat_count += 1 # Ensure we have enough data to cover the offset
429
+
430
+ repeated_vector = np.tile(input_vector, int(repeat_count))
431
+
432
+ # Slice the repeated vector to match the exact output period
433
+ return repeated_vector[start_offset_periods : start_offset_periods + output_periods_count]
434
+
435
+
436
+ def repeat_oneyear_isotime(
437
+ input_vector: NDArray,
438
+ input_start_date: datetime,
439
+ period_duration: timedelta,
440
+ output_start_date: datetime,
441
+ output_end_date: datetime,
442
+ ) -> NDArray:
443
+ """
444
+ Repeat a one-year input vector to cover the specified output date range in isotime format.
445
+
446
+ Args:
447
+ input_vector (NDArray): A 1D NumPy array representing the input time series for one year.
448
+ input_start_date (date): The start date of the input vector.
449
+ period_duration (timedelta): The duration of each period in the input vector.
450
+ output_start_date (date): The start date of the output period.
451
+ output_end_date (date): The end date of the output period.
452
+
453
+ Returns:
454
+ NDArray: A 1D NumPy array containing the repeated time series data for the specified output period.
455
+
456
+ """
457
+ assert isinstance(input_vector, np.ndarray), "input_vector must be a 1D numpy array."
458
+ assert input_vector.ndim == 1, "input_vector must be a 1D numpy array."
459
+ assert isinstance(input_start_date, date), "input_start_date must be a date object."
460
+ assert isinstance(period_duration, timedelta), "period_duration must be a timedelta object."
461
+ assert period_duration.total_seconds() >= 0, "period_duration must be at least one second."
462
+ assert period_duration.total_seconds() % 1 == 0, "period_duration must be at least one second resolution."
463
+ assert isinstance(output_start_date, datetime), "output_start_date must be a date object."
464
+ assert isinstance(output_end_date, datetime), "output_end_date must be a date object."
465
+ assert output_start_date < output_end_date, "output_end_date must be after output_start_date."
466
+
467
+ output_total_duration = output_end_date - output_start_date
468
+ assert output_total_duration >= period_duration, "Output period must be at least one period duration long."
469
+
470
+ total_years = output_end_date.isocalendar().year - output_start_date.isocalendar().year
471
+
472
+ if period_duration > timedelta(weeks=1):
473
+ if period_duration == timedelta(weeks=52) or period_duration == timedelta(weeks=53):
474
+ _, output_start_week, output_start_weekday = output_start_date.isocalendar()
475
+ _, output_end_week, output_end_weekday = output_end_date.isocalendar()
476
+
477
+ assert ( # noqa: PT018
478
+ output_start_week == 1 and output_start_weekday == 1 and output_end_week == 1 and output_end_weekday == 1
479
+ ), "Output period must be whole years."
480
+ return np.repeat(input_vector, total_years)
481
+ return ValueError("Provided period duration is not supported for isotime conversion.")
482
+
483
+ assert output_total_duration % period_duration == timedelta(0), "Output period must be a multiple of input period duration."
484
+
485
+ periods_per_week = SECONDS_PER_WEEK / period_duration.total_seconds()
486
+ assert periods_per_week.is_integer(), "Week must be a multiple of input period duration."
487
+ periods_per_week = int(periods_per_week)
488
+
489
+ # Initialize 2D array with 53 weeks per year
490
+ output_vector = np.zeros((total_years, 53 * periods_per_week), dtype=np.float32)
491
+
492
+ # Repeat input vector across all years
493
+ output_vector[:, : input_vector.size] = np.tile(input_vector, (total_years, 1))
494
+
495
+ # Fill week 53 with the data from week 52 for each year
496
+ if len(input_vector) == 52 * periods_per_week:
497
+ output_vector[:, 52 * periods_per_week :] = output_vector[:, 51 * periods_per_week : 52 * periods_per_week]
498
+
499
+ # Flatten the output vector to 1D
500
+ output_vector = np.reshape(output_vector, -1)
501
+
502
+ # Array of all years in the output period
503
+ years = np.arange(output_start_date.isocalendar().year, output_end_date.isocalendar().year)
504
+
505
+ # Find all indices of years with only 52 weeks
506
+ years_with_52_weeks = np.argwhere(~np.vectorize(_has_week_53)(years)).flatten()
507
+
508
+ if years_with_52_weeks.size > 0:
509
+ indices_to_delete = np.reshape(
510
+ [
511
+ np.arange(
512
+ idx * 53 * periods_per_week + 52 * periods_per_week,
513
+ idx * 53 * periods_per_week + 52 * periods_per_week + periods_per_week,
514
+ )
515
+ for idx in years_with_52_weeks
516
+ ],
517
+ -1,
518
+ )
519
+
520
+ # Remove week 53 for years with only 52 weeks
521
+ output_vector = np.delete(output_vector, indices_to_delete)
522
+
523
+ return output_vector
524
+
525
+
526
+ def _is_within_week_53(starttime: datetime) -> bool:
527
+ """Check if the start date is in week 53 of the year."""
528
+ return starttime.isocalendar().week == 53
529
+
530
+
531
+ def _get_start_of_next_year(starttime: datetime) -> datetime:
532
+ """Move the start date to the first week of the next year if it starts in week 53."""
533
+ if starttime.isocalendar().week != 53:
534
+ raise ValueError("Start date is not in week 53.")
535
+
536
+ return datetime.fromisocalendar(starttime.isocalendar().year + 1, 1, 1)
537
+
538
+
539
+ def _is_week_53(starttime: datetime) -> bool:
540
+ """Check if the given date is in week 53 of the year."""
541
+ return starttime.isocalendar().week == 53
542
+
543
+
544
+ def _remove_week_53_data(input_vector: NDArray, starttime: datetime, period_duration: timedelta) -> NDArray:
545
+ """Remove data corresponding to week 53 from the input vector."""
546
+ period_duration_seconds = int(period_duration.total_seconds())
547
+
548
+ tracking_index = 0
549
+ tracking_date = starttime
550
+
551
+ # Adjust start date to the beginning of the week if it doesn't start on a Monday
552
+ if starttime.isocalendar().weekday != 1:
553
+ seconds_to_adjust = (1 - starttime.isocalendar().weekday) * 24 * 60 * 60
554
+ tracking_index += seconds_to_adjust // period_duration_seconds
555
+ tracking_date += timedelta(seconds=seconds_to_adjust)
556
+
557
+ indexes_to_remove = []
558
+
559
+ while tracking_index < input_vector.size:
560
+ # Calculate the start of week 53
561
+ weeks_to_start_of_week_53 = 53 - tracking_date.isocalendar().week
562
+ seconds_to_start_of_week_53 = weeks_to_start_of_week_53 * SECONDS_PER_WEEK
563
+ tracking_date += timedelta(seconds=seconds_to_start_of_week_53)
564
+ tracking_index += seconds_to_start_of_week_53 // period_duration_seconds
565
+
566
+ # Check if week 53 exists and mark its indexes for removal
567
+ if _is_week_53(tracking_date):
568
+ periods_per_week = SECONDS_PER_WEEK // period_duration_seconds
569
+ indexes_to_remove.extend(range(max(tracking_index, 0), min(tracking_index + periods_per_week, input_vector.size)))
570
+ tracking_date += timedelta(seconds=SECONDS_PER_WEEK)
571
+ tracking_index += periods_per_week
572
+ return np.delete(input_vector, indexes_to_remove)
573
+
574
+
575
+ def _has_week_53(year_: int) -> bool:
576
+ """Check if the year of the given date has week 53."""
577
+ return date(year_, 12, 31).isocalendar().week == 53
578
+
579
+ def _period_contains_week_53(startdate: datetime, enddate: datetime) -> bool:
580
+ """Check if the period between startdate and enddate contains week 53."""
581
+ start_year = startdate.isocalendar().year
582
+ end_year = enddate.isocalendar().year
583
+
584
+ for year in range(start_year, end_year + 1):
585
+ if _has_week_53(year):
586
+ week_53_start = datetime.fromisocalendar(year, 53, 1)
587
+ week_53_end = week_53_start + timedelta(weeks=1)
588
+ if startdate < week_53_end and enddate > week_53_start:
589
+ return True
590
+ return False
591
+
592
+ def _find_all_week_53_periods(startdate: datetime, enddate: datetime) -> list[tuple[datetime, datetime]]:
593
+ """
594
+ Find all week 53 periods between startdate and enddate.
595
+
596
+ Returns:
597
+ list of tuples: Each tuple is (start, end), where 'start' is inclusive and 'end' is exclusive.
598
+ Both 'start' and 'end' are datetime objects, representing the start and end of week 53 periods
599
+ within the given range, with granularity at the datetime level.
600
+
601
+ """
602
+ week_53_periods = []
603
+ start_year = startdate.isocalendar().year
604
+ end_year = enddate.isocalendar().year
605
+
606
+ for year in range(start_year, end_year + 1):
607
+ if _has_week_53(year):
608
+ week_53_start = datetime.fromisocalendar(year, 53, 1)
609
+ week_53_end = week_53_start + timedelta(weeks=1)
610
+ start = max(startdate, week_53_start)
611
+ end = min(enddate, week_53_end)
612
+
613
+ if start < end:
614
+ week_53_periods.append((start, end))
615
+ return week_53_periods
@@ -0,0 +1,73 @@
1
+ from __future__ import annotations
2
+
3
+ import copy
4
+ from typing import TYPE_CHECKING
5
+
6
+ from framcore.curves import Curve
7
+ from framcore.expressions import Expr
8
+ from framcore.querydbs import QueryDB
9
+ from framcore.timevectors import ConstantTimeVector, TimeVector
10
+
11
+ if TYPE_CHECKING:
12
+ from framcore import Model
13
+
14
+
15
+ def _load_model_and_create_model_db(db: QueryDB | Model) -> QueryDB:
16
+ from framcore import Model
17
+
18
+ if isinstance(db, Model):
19
+ from framcore.querydbs import ModelDB
20
+
21
+ db = ModelDB(db)
22
+
23
+ if not isinstance(db, QueryDB):
24
+ message = f"Expected db to be Model or QueryDB, got {db} of type {type(db).__name__}"
25
+ raise ValueError(message)
26
+ return db
27
+
28
+
29
+ def _lookup_expr_from_constants_with_units(
30
+ constants_with_units: dict[str, tuple],
31
+ expr: Expr,
32
+ ) -> tuple[str, float, str | None]:
33
+ src = expr.get_src()
34
+ if isinstance(src, ConstantTimeVector):
35
+ src = src.get_expr_str()
36
+ sym, value, unit = constants_with_units[src]
37
+ return sym, value, unit
38
+
39
+
40
+ def _is_real_expr(expr: Expr, db: QueryDB) -> bool:
41
+ if expr.is_leaf():
42
+ src = expr.get_src()
43
+ if isinstance(src, TimeVector | Curve):
44
+ return True
45
+ obj = db.get(src)
46
+ return not isinstance(obj, Expr)
47
+ __, args = expr.get_operations(expect_ops=True, copy_list=False)
48
+ return all(_is_real_expr(ex, db) for ex in args)
49
+
50
+
51
+ def _ensure_real_expr(expr: Expr, db: QueryDB) -> Expr:
52
+ if _is_real_expr(expr, db):
53
+ return expr
54
+ expr = copy.deepcopy(expr)
55
+ _extend_expr(expr, db)
56
+ return expr
57
+
58
+
59
+ def _extend_expr(expr: Expr, db: QueryDB) -> None:
60
+ if expr.is_leaf():
61
+ src = expr.get_src()
62
+ if isinstance(src, TimeVector | Curve):
63
+ return
64
+ obj = db.get(src)
65
+ if isinstance(obj, Expr):
66
+ for name, value in obj.__dict__.items():
67
+ setattr(expr, name, value)
68
+ _extend_expr(expr, db)
69
+ assert isinstance(obj, TimeVector | Curve), f"Got {obj}"
70
+ return
71
+ __, args = expr.get_operations(expect_ops=True, copy_list=False)
72
+ for ex in args:
73
+ _extend_expr(ex, db)