openseries 1.9.5__py3-none-any.whl → 1.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,22 +16,25 @@ from math import ceil
16
16
  from pathlib import Path
17
17
  from secrets import choice
18
18
  from string import ascii_letters
19
- from typing import TYPE_CHECKING, Any, Literal, SupportsFloat, cast
19
+ from typing import TYPE_CHECKING, Any, Generic, Literal, cast
20
20
 
21
- from numpy import float64, inf, isnan, log, maximum, sqrt
21
+ from numpy import asarray, float64, inf, isnan, log, maximum, sqrt
22
22
 
23
23
  from .owntypes import (
24
+ CaptorLogoType,
24
25
  DateAlignmentError,
25
26
  InitialValueZeroError,
26
27
  NumberOfItemsAndLabelsNotSameError,
28
+ PlotlyConfigType,
27
29
  ResampleDataLossError,
28
30
  Self,
31
+ SeriesOrFloat_co,
29
32
  ValueType,
30
33
  )
31
34
 
32
35
  if TYPE_CHECKING: # pragma: no cover
33
- from numpy.typing import NDArray
34
36
  from openpyxl.worksheet.worksheet import Worksheet
37
+ from pandas import Timestamp
35
38
 
36
39
  from .owntypes import (
37
40
  CountriesType,
@@ -84,8 +87,127 @@ from .datefixer import (
84
87
  from .load_plotly import load_plotly_dict
85
88
 
86
89
 
87
- # noinspection PyTypeChecker
88
- class _CommonModel(BaseModel): # type: ignore[misc]
90
+ def _get_date_range_and_factor(
91
+ self: _CommonModel[SeriesOrFloat_co],
92
+ months_from_last: int | None = None,
93
+ from_date: dt.date | None = None,
94
+ to_date: dt.date | None = None,
95
+ periods_in_a_year_fixed: DaysInYearType | None = None,
96
+ ) -> tuple[dt.date, dt.date, float, DataFrame]:
97
+ """Common logic for date range and time factor calculation.
98
+
99
+ Parameters
100
+ ----------
101
+ months_from_last : int, optional
102
+ Number of months offset as positive integer. Overrides use of from_date
103
+ and to_date
104
+ from_date : datetime.date, optional
105
+ Specific from date
106
+ to_date : datetime.date, optional
107
+ Specific to date
108
+ periods_in_a_year_fixed : DaysInYearType, optional
109
+ Allows locking the periods-in-a-year to simplify test cases and
110
+ comparisons
111
+
112
+ Returns:
113
+ -------
114
+ tuple[dt.date, dt.date, float, DataFrame]
115
+ earlier, later, time_factor, data
116
+ """
117
+ earlier, later = self.calc_range(
118
+ months_offset=months_from_last,
119
+ from_dt=from_date,
120
+ to_dt=to_date,
121
+ )
122
+
123
+ if periods_in_a_year_fixed:
124
+ time_factor = float(periods_in_a_year_fixed)
125
+ else:
126
+ how_many = (
127
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
128
+ .count()
129
+ .iloc[0]
130
+ )
131
+ fraction = (later - earlier).days / 365.25
132
+ time_factor = how_many / fraction
133
+
134
+ data = self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
135
+ return earlier, later, time_factor, data
136
+
137
+
138
+ def _get_base_column_data(
139
+ self: _CommonModel[SeriesOrFloat_co],
140
+ base_column: tuple[str, ValueType] | int,
141
+ earlier: dt.date,
142
+ later: dt.date,
143
+ ) -> tuple[Series[float], tuple[str, ValueType], str]:
144
+ """Common logic for base column data extraction.
145
+
146
+ Parameters
147
+ ----------
148
+ base_column : tuple[str, ValueType] | int
149
+ Column reference
150
+ earlier : dt.date
151
+ Start date
152
+ later : dt.date
153
+ End date
154
+
155
+ Returns:
156
+ -------
157
+ tuple[Series[float], tuple[str, ValueType], str]
158
+ data, item, label
159
+ """
160
+ if isinstance(base_column, tuple):
161
+ data = self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)][
162
+ base_column
163
+ ]
164
+ item = base_column
165
+ label = cast("tuple[str, str]", self.tsdf[base_column].name)[0]
166
+ elif isinstance(base_column, int):
167
+ data = self.tsdf.loc[
168
+ cast("Timestamp", earlier) : cast("Timestamp", later)
169
+ ].iloc[:, base_column]
170
+ item = cast("tuple[str, ValueType]", self.tsdf.iloc[:, base_column].name)
171
+ label = cast("tuple[str, str]", self.tsdf.iloc[:, base_column].name)[0]
172
+ else:
173
+ msg = "base_column should be a tuple[str, ValueType] or an integer."
174
+ raise TypeError(msg)
175
+
176
+ return data, item, label
177
+
178
+
179
+ def _calculate_time_factor(
180
+ data: Series[float],
181
+ earlier: dt.date,
182
+ later: dt.date,
183
+ periods_in_a_year_fixed: DaysInYearType | None = None,
184
+ ) -> float:
185
+ """Calculate time factor for annualization.
186
+
187
+ Parameters
188
+ ----------
189
+ data : Series[float]
190
+ Data series for counting observations
191
+ earlier : dt.date
192
+ Start date
193
+ later : dt.date
194
+ End date
195
+ periods_in_a_year_fixed : DaysInYearType, optional
196
+ Fixed periods in year
197
+
198
+ Returns:
199
+ -------
200
+ float
201
+ Time factor
202
+ """
203
+ if periods_in_a_year_fixed:
204
+ return float(periods_in_a_year_fixed)
205
+
206
+ fraction = (later - earlier).days / 365.25
207
+ return data.count() / fraction
208
+
209
+
210
+ class _CommonModel(BaseModel, Generic[SeriesOrFloat_co]):
89
211
  """Declare _CommonModel."""
90
212
 
91
213
  tsdf: DataFrame = DataFrame(dtype="float64")
@@ -96,6 +218,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
96
218
  revalidate_instances="always",
97
219
  )
98
220
 
221
+ def _coerce_result(
222
+ self: Self, result: Series[float], name: str
223
+ ) -> SeriesOrFloat_co:
224
+ if self.tsdf.shape[1] == 1:
225
+ arr = float(asarray(a=result, dtype=float64).squeeze())
226
+ return cast("SeriesOrFloat_co", arr) # type: ignore[redundant-cast]
227
+ return cast(
228
+ "SeriesOrFloat_co",
229
+ Series(
230
+ data=result,
231
+ index=self.tsdf.columns,
232
+ name=name,
233
+ dtype="float64",
234
+ ),
235
+ )
236
+
99
237
  @property
100
238
  def length(self: Self) -> int:
101
239
  """Number of observations.
@@ -170,17 +308,18 @@ class _CommonModel(BaseModel): # type: ignore[misc]
170
308
  return self.length / self.yearfrac
171
309
 
172
310
  @property
173
- def max_drawdown_cal_year(self: Self) -> float | Series[float]:
311
+ def max_drawdown_cal_year(self: Self) -> SeriesOrFloat_co:
174
312
  """https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp.
175
313
 
176
314
  Returns:
177
315
  -------
178
- float | Pandas.Series[float]
316
+ SeriesOrFloat_co
179
317
  Maximum drawdown in a single calendar year.
318
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
180
319
 
181
320
  """
182
321
  years = Index(d.year for d in self.tsdf.index)
183
- mddc = (
322
+ result = (
184
323
  self.tsdf.groupby(years)
185
324
  .apply(
186
325
  lambda prices: (prices / prices.expanding(min_periods=1).max()).min()
@@ -188,53 +327,52 @@ class _CommonModel(BaseModel): # type: ignore[misc]
188
327
  )
189
328
  .min()
190
329
  )
191
- if self.tsdf.shape[1] == 1:
192
- return float(mddc.iloc[0])
193
- return Series(
194
- data=mddc,
195
- index=self.tsdf.columns,
196
- name="Max drawdown in cal yr",
197
- dtype="float64",
198
- )
330
+ return self._coerce_result(result=result, name="Max drawdown in cal yr")
199
331
 
200
332
  @property
201
- def geo_ret(self: Self) -> float | Series[float]:
333
+ def geo_ret(self: Self) -> SeriesOrFloat_co:
202
334
  """https://www.investopedia.com/terms/c/cagr.asp.
203
335
 
204
336
  Returns:
205
337
  -------
206
- float | Pandas.Series[float]
207
- Compounded Annual Growth Rate (CAGR)
338
+ SeriesOrFloat_co
339
+ Compounded Annual Growth Rate (CAGR).
340
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
341
+
208
342
 
209
343
  """
210
344
  return self.geo_ret_func()
211
345
 
212
346
  @property
213
- def arithmetic_ret(self: Self) -> float | Series[float]:
347
+ def arithmetic_ret(self: Self) -> SeriesOrFloat_co:
214
348
  """https://www.investopedia.com/terms/a/arithmeticmean.asp.
215
349
 
216
350
  Returns:
217
351
  -------
218
- float | Pandas.Series[float]
219
- Annualized arithmetic mean of returns
352
+ SeriesOrFloat_co
353
+ Annualized arithmetic mean of returns.
354
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
355
+
220
356
 
221
357
  """
222
358
  return self.arithmetic_ret_func()
223
359
 
224
360
  @property
225
- def value_ret(self: Self) -> float | Series[float]:
361
+ def value_ret(self: Self) -> SeriesOrFloat_co:
226
362
  """Simple return.
227
363
 
228
364
  Returns:
229
365
  -------
230
- float | Pandas.Series[float]
231
- Simple return
366
+ SeriesOrFloat_co
367
+ Simple return.
368
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
369
+
232
370
 
233
371
  """
234
372
  return self.value_ret_func()
235
373
 
236
374
  @property
237
- def vol(self: Self) -> float | Series[float]:
375
+ def vol(self: Self) -> SeriesOrFloat_co:
238
376
  """Annualized volatility.
239
377
 
240
378
  Based on Pandas .std() which is the equivalent of stdev.s([...]) in MS Excel.
@@ -242,14 +380,16 @@ class _CommonModel(BaseModel): # type: ignore[misc]
242
380
 
243
381
  Returns:
244
382
  -------
245
- float | Pandas.Series[float]
246
- Annualized volatility
383
+ SeriesOrFloat_co
384
+ Annualized volatility.
385
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
386
+
247
387
 
248
388
  """
249
389
  return self.vol_func()
250
390
 
251
391
  @property
252
- def downside_deviation(self: Self) -> float | Series[float]:
392
+ def downside_deviation(self: Self) -> SeriesOrFloat_co:
253
393
  """Downside Deviation.
254
394
 
255
395
  Standard deviation of returns that are below a Minimum Accepted Return
@@ -258,40 +398,47 @@ class _CommonModel(BaseModel): # type: ignore[misc]
258
398
 
259
399
  Returns:
260
400
  -------
261
- float | Pandas.Series[float]
262
- Downside deviation
401
+ SeriesOrFloat_co
402
+ Downside deviation.
403
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
404
+
263
405
 
264
406
  """
265
407
  min_accepted_return: float = 0.0
266
408
  order: Literal[2, 3] = 2
267
409
  return self.lower_partial_moment_func(
268
- min_accepted_return=min_accepted_return, order=order
410
+ min_accepted_return=min_accepted_return,
411
+ order=order,
269
412
  )
270
413
 
271
414
  @property
272
- def ret_vol_ratio(self: Self) -> float | Series[float]:
415
+ def ret_vol_ratio(self: Self) -> SeriesOrFloat_co:
273
416
  """Ratio of annualized arithmetic mean of returns and annualized volatility.
274
417
 
275
418
  Returns:
276
419
  -------
277
- float | Pandas.Series[float]
278
- Ratio of the annualized arithmetic mean of returns and annualized
420
+ SeriesOrFloat_co
421
+ Ratio of the annualized arithmetic mean of returns and annualized.
422
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
279
423
  volatility.
424
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
280
425
 
281
426
  """
282
427
  riskfree_rate: float = 0.0
283
428
  return self.ret_vol_ratio_func(riskfree_rate=riskfree_rate)
284
429
 
285
430
  @property
286
- def sortino_ratio(self: Self) -> float | Series[float]:
431
+ def sortino_ratio(self: Self) -> SeriesOrFloat_co:
287
432
  """https://www.investopedia.com/terms/s/sortinoratio.asp.
288
433
 
289
434
  Returns:
290
435
  -------
291
- float | Pandas.Series[float]
292
- Sortino ratio calculated as the annualized arithmetic mean of returns
436
+ SeriesOrFloat_co
437
+ Sortino ratio calculated as the annualized arithmetic mean of returns.
438
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame
293
439
  / downside deviation. The ratio implies that the riskfree asset has zero
294
440
  volatility, and a minimum acceptable return of zero.
441
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
295
442
 
296
443
  """
297
444
  riskfree_rate: float = 0.0
@@ -302,7 +449,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
302
449
  )
303
450
 
304
451
  @property
305
- def kappa3_ratio(self: Self) -> float | Series[float]:
452
+ def kappa3_ratio(self: Self) -> SeriesOrFloat_co:
306
453
  """Kappa-3 ratio.
307
454
 
308
455
  The Kappa-3 ratio is a generalized downside-risk ratio defined as
@@ -313,9 +460,11 @@ class _CommonModel(BaseModel): # type: ignore[misc]
313
460
 
314
461
  Returns:
315
462
  -------
316
- float | Pandas.Series[float]
317
- Kappa-3 ratio calculation with the riskfree rate and
463
+ SeriesOrFloat_co
464
+ Kappa-3 ratio calculation with the riskfree rate and.
465
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame
318
466
  Minimum Acceptable Return (MAR) both set to zero.
467
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
319
468
 
320
469
  """
321
470
  riskfree_rate: float = 0.0
@@ -328,38 +477,44 @@ class _CommonModel(BaseModel): # type: ignore[misc]
328
477
  )
329
478
 
330
479
  @property
331
- def omega_ratio(self: Self) -> float | Series[float]:
480
+ def omega_ratio(self: Self) -> SeriesOrFloat_co:
332
481
  """https://en.wikipedia.org/wiki/Omega_ratio.
333
482
 
334
483
  Returns:
335
484
  -------
336
- float | Pandas.Series[float]
337
- Omega ratio calculation
485
+ SeriesOrFloat_co
486
+ Omega ratio calculation.
487
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
488
+
338
489
 
339
490
  """
340
491
  minimum_accepted_return: float = 0.0
341
492
  return self.omega_ratio_func(min_accepted_return=minimum_accepted_return)
342
493
 
343
494
  @property
344
- def z_score(self: Self) -> float | Series[float]:
495
+ def z_score(self: Self) -> SeriesOrFloat_co:
345
496
  """https://www.investopedia.com/terms/z/zscore.asp.
346
497
 
347
498
  Returns:
348
499
  -------
349
- float | Pandas.Series[float]
500
+ SeriesOrFloat_co
350
501
  Z-score as (last return - mean return) / standard deviation of returns.
502
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
503
+
351
504
 
352
505
  """
353
506
  return self.z_score_func()
354
507
 
355
508
  @property
356
- def max_drawdown(self: Self) -> float | Series[float]:
509
+ def max_drawdown(self: Self) -> SeriesOrFloat_co:
357
510
  """https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp.
358
511
 
359
512
  Returns:
360
513
  -------
361
- float | Pandas.Series[float]
362
- Maximum drawdown without any limit on date range
514
+ SeriesOrFloat_co
515
+ Maximum drawdown without any limit on date range.
516
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
517
+
363
518
 
364
519
  """
365
520
  return self.max_drawdown_func()
@@ -390,26 +545,30 @@ class _CommonModel(BaseModel): # type: ignore[misc]
390
545
  ).dt.date
391
546
 
392
547
  @property
393
- def worst(self: Self) -> float | Series[float]:
548
+ def worst(self: Self) -> SeriesOrFloat_co:
394
549
  """Most negative percentage change.
395
550
 
396
551
  Returns:
397
552
  -------
398
- float | Pandas.Series[float]
399
- Most negative percentage change
553
+ SeriesOrFloat_co
554
+ Most negative percentage change.
555
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
556
+
400
557
 
401
558
  """
402
559
  observations: int = 1
403
560
  return self.worst_func(observations=observations)
404
561
 
405
562
  @property
406
- def worst_month(self: Self) -> float | Series[float]:
563
+ def worst_month(self: Self) -> SeriesOrFloat_co:
407
564
  """Most negative month.
408
565
 
409
566
  Returns:
410
567
  -------
411
- Pandas.Series[float]
412
- Most negative month
568
+ SeriesOrFloat_co
569
+ Most negative month.
570
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
571
+
413
572
 
414
573
  """
415
574
  method: LiteralPandasReindexMethod = "nearest"
@@ -418,8 +577,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
418
577
  countries = self.countries
419
578
  markets = self.markets
420
579
  except AttributeError:
421
- countries = self.constituents[0].countries
422
- markets = self.constituents[0].markets
580
+ countries = self.constituents[0].countries # type: ignore[attr-defined]
581
+ markets = self.constituents[0].markets # type: ignore[attr-defined]
423
582
 
424
583
  wmdf = self.tsdf.copy()
425
584
 
@@ -444,66 +603,67 @@ class _CommonModel(BaseModel): # type: ignore[misc]
444
603
 
445
604
  result = wmdf.ffill().pct_change().min()
446
605
 
447
- if self.tsdf.shape[1] == 1:
448
- return float(result.iloc[0])
449
- return Series(
450
- data=result,
451
- index=self.tsdf.columns,
452
- name="Worst month",
453
- dtype="float64",
454
- )
606
+ return self._coerce_result(result=result, name="Worst month")
455
607
 
456
608
  @property
457
- def positive_share(self: Self) -> float | Series[float]:
609
+ def positive_share(self: Self) -> SeriesOrFloat_co:
458
610
  """The share of percentage changes that are greater than zero.
459
611
 
460
612
  Returns:
461
613
  -------
462
- float | Pandas.Series[float]
463
- The share of percentage changes that are greater than zero
614
+ SeriesOrFloat_co
615
+ The share of percentage changes that are greater than zero.
616
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
617
+
464
618
 
465
619
  """
466
620
  return self.positive_share_func()
467
621
 
468
622
  @property
469
- def skew(self: Self) -> float | Series[float]:
623
+ def skew(self: Self) -> SeriesOrFloat_co:
470
624
  """https://www.investopedia.com/terms/s/skewness.asp.
471
625
 
472
626
  Returns:
473
627
  -------
474
- float | Pandas.Series[float]
475
- Skew of the return distribution
628
+ SeriesOrFloat_co
629
+ Skew of the return distribution.
630
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
631
+
476
632
 
477
633
  """
478
634
  return self.skew_func()
479
635
 
480
636
  @property
481
- def kurtosis(self: Self) -> float | Series[float]:
637
+ def kurtosis(self: Self) -> SeriesOrFloat_co:
482
638
  """https://www.investopedia.com/terms/k/kurtosis.asp.
483
639
 
484
640
  Returns:
485
641
  -------
486
- float | Pandas.Series[float]
487
- Kurtosis of the return distribution
642
+ SeriesOrFloat_co
643
+ Kurtosis of the return distribution.
644
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
645
+
488
646
 
489
647
  """
490
648
  return self.kurtosis_func()
491
649
 
492
650
  @property
493
- def cvar_down(self: Self) -> float | Series[float]:
651
+ def cvar_down(self: Self) -> SeriesOrFloat_co:
494
652
  """https://www.investopedia.com/terms/c/conditional_value_at_risk.asp.
495
653
 
496
654
  Returns:
497
655
  -------
498
- float | Pandas.Series[float]
499
- Downside 95% Conditional Value At Risk "CVaR"
656
+ SeriesOrFloat_co
657
+ Downside 95% Conditional Value At Risk "CVaR".
658
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
659
+
500
660
 
501
661
  """
502
662
  level: float = 0.95
503
663
  return self.cvar_down_func(level=level)
504
664
 
505
665
  @property
506
- def var_down(self: Self) -> float | Series[float]:
666
+ def var_down(self: Self) -> SeriesOrFloat_co:
507
667
  """Downside 95% Value At Risk (VaR).
508
668
 
509
669
  The equivalent of percentile.inc([...], 1-level) over returns in MS Excel.
@@ -511,8 +671,10 @@ class _CommonModel(BaseModel): # type: ignore[misc]
511
671
 
512
672
  Returns:
513
673
  -------
514
- float | Pandas.Series[float]
515
- Downside 95% Value At Risk (VaR)
674
+ SeriesOrFloat_co
675
+ Downside 95% Value At Risk (VaR).
676
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
677
+
516
678
 
517
679
  """
518
680
  level: float = 0.95
@@ -520,16 +682,18 @@ class _CommonModel(BaseModel): # type: ignore[misc]
520
682
  return self.var_down_func(level=level, interpolation=interpolation)
521
683
 
522
684
  @property
523
- def vol_from_var(self: Self) -> float | Series[float]:
685
+ def vol_from_var(self: Self) -> SeriesOrFloat_co:
524
686
  """Implied annualized volatility from Downside 95% Value at Risk.
525
687
 
526
688
  Assumes that returns are normally distributed.
527
689
 
528
690
  Returns:
529
691
  -------
530
- float | Pandas.Series[float]
531
- Implied annualized volatility from the Downside 95% VaR using the
692
+ SeriesOrFloat_co
693
+ Implied annualized volatility from the Downside 95% VaR using the.
694
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
532
695
  assumption that returns are normally distributed.
696
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
533
697
 
534
698
  """
535
699
  level: float = 0.95
@@ -625,30 +789,30 @@ class _CommonModel(BaseModel): # type: ignore[misc]
625
789
  try:
626
790
  self.countries = countries
627
791
  except ValidationError:
628
- for serie in self.constituents:
792
+ for serie in self.constituents: # type: ignore[attr-defined]
629
793
  serie.countries = countries
630
794
  else:
631
795
  try:
632
796
  countries = self.countries
633
797
  except AttributeError:
634
- countries = self.constituents[0].countries
798
+ countries = self.constituents[0].countries # type: ignore[attr-defined]
635
799
 
636
800
  if markets:
637
801
  try:
638
802
  self.markets = markets
639
803
  except ValidationError:
640
- for serie in self.constituents:
804
+ for serie in self.constituents: # type: ignore[attr-defined]
641
805
  serie.markets = markets
642
806
  else:
643
807
  try:
644
808
  markets = self.markets
645
809
  except AttributeError:
646
- markets = self.constituents[0].markets
810
+ markets = self.constituents[0].markets # type: ignore[attr-defined]
647
811
 
648
812
  calendar = holiday_calendar(
649
813
  startyear=startyear,
650
814
  endyear=endyear,
651
- countries=countries,
815
+ countries=countries or "SE",
652
816
  markets=markets,
653
817
  custom_holidays=custom_holidays,
654
818
  )
@@ -658,7 +822,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
658
822
  for d in date_range(
659
823
  start=cast("dt.date", self.tsdf.first_valid_index()),
660
824
  end=cast("dt.date", self.tsdf.last_valid_index()),
661
- freq=CustomBusinessDay(calendar=calendar),
825
+ freq=CustomBusinessDay(calendar=calendar)
826
+ if any([countries, markets, custom_holidays])
827
+ else None,
662
828
  )
663
829
  ]
664
830
  self.tsdf = self.tsdf.reindex(labels=d_range, method=method, copy=False)
@@ -859,9 +1025,103 @@ class _CommonModel(BaseModel): # type: ignore[misc]
859
1025
 
860
1026
  return str(sheetfile)
861
1027
 
1028
+ @staticmethod
1029
+ def _ensure_labels(
1030
+ ncols: int,
1031
+ labels: list[str] | None,
1032
+ default_labels: list[str],
1033
+ ) -> list[str]:
1034
+ """Validate or infer labels for plotting."""
1035
+ if labels:
1036
+ if len(labels) != ncols:
1037
+ msg = "Must provide same number of labels as items in frame."
1038
+ raise NumberOfItemsAndLabelsNotSameError(msg)
1039
+ return labels
1040
+ return default_labels
1041
+
1042
+ @staticmethod
1043
+ def _resolve_dir(directory: DirectoryPath | None) -> Path:
1044
+ """Resolve output directory for plot files."""
1045
+ if directory:
1046
+ return Path(directory).resolve()
1047
+ if (Path.home() / "Documents").exists():
1048
+ return Path.home() / "Documents"
1049
+ return Path(stack()[2].filename).parent
1050
+
1051
+ @staticmethod
1052
+ def _hover_xy(tick_fmt: str | None) -> str:
1053
+ """Create hovertemplate for y-value and date x-axis."""
1054
+ return (
1055
+ f"%{{y:{tick_fmt}}}<br>%{{x|{'%Y-%m-%d'}}}"
1056
+ if tick_fmt
1057
+ else "%{y}<br>%{x|%Y-%m-%d}"
1058
+ )
1059
+
1060
+ @staticmethod
1061
+ def _hover_hist(x_fmt: str | None, y_fmt: str | None) -> str:
1062
+ """Create hovertemplate for histogram plots."""
1063
+ y = f"%{{y:{y_fmt}}}" if y_fmt else "%{y}"
1064
+ x = f"%{{x:{x_fmt}}}" if x_fmt else "%{x}"
1065
+ return f"Count: {y}<br>{x}"
1066
+
1067
+ @staticmethod
1068
+ def _apply_title_logo(
1069
+ figure: Figure,
1070
+ logo: CaptorLogoType,
1071
+ title: str | None,
1072
+ *,
1073
+ add_logo: bool,
1074
+ ) -> None:
1075
+ """Apply optional title and logo to a Plotly Figure."""
1076
+ if add_logo:
1077
+ figure.add_layout_image(logo)
1078
+ if title:
1079
+ figure.update_layout(
1080
+ {"title": {"text": f"<b>{title}</b><br>", "font": {"size": 36}}},
1081
+ )
1082
+
1083
+ @staticmethod
1084
+ def _emit_output(
1085
+ figure: Figure,
1086
+ fig_config: PlotlyConfigType,
1087
+ output_type: LiteralPlotlyOutput,
1088
+ plotfile: Path,
1089
+ filename: str,
1090
+ *,
1091
+ include_plotlyjs_bool: bool,
1092
+ auto_open: bool,
1093
+ ) -> str:
1094
+ """Write a file or return inline HTML string from a Plotly Figure."""
1095
+ if output_type == "file":
1096
+ plot(
1097
+ figure_or_data=figure,
1098
+ filename=str(plotfile),
1099
+ auto_open=auto_open,
1100
+ auto_play=False,
1101
+ link_text="",
1102
+ include_plotlyjs=include_plotlyjs_bool,
1103
+ config=fig_config,
1104
+ output_type=output_type,
1105
+ )
1106
+ return str(plotfile)
1107
+
1108
+ div_id = filename.rsplit(".", 1)[0]
1109
+ return cast(
1110
+ "str",
1111
+ to_html(
1112
+ fig=figure,
1113
+ config=fig_config,
1114
+ auto_play=False,
1115
+ include_plotlyjs=include_plotlyjs_bool,
1116
+ full_html=False,
1117
+ div_id=div_id,
1118
+ ),
1119
+ )
1120
+
862
1121
  def plot_bars(
863
1122
  self: Self,
864
1123
  mode: LiteralBarPlotMode = "group",
1124
+ title: str | None = None,
865
1125
  tick_fmt: str | None = None,
866
1126
  filename: str | None = None,
867
1127
  directory: DirectoryPath | None = None,
@@ -880,6 +1140,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
880
1140
  The timeseries self.tsdf
881
1141
  mode: LiteralBarPlotMode
882
1142
  The type of bar to use
1143
+ title: str, optional
1144
+ A title above the plot
883
1145
  tick_fmt: str, optional
884
1146
  None, '%', '.1%' depending on number of decimals to show
885
1147
  filename: str, optional
@@ -904,34 +1166,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
904
1166
  Plotly Figure and a div section or a html filename with location
905
1167
 
906
1168
  """
907
- if labels:
908
- if len(labels) != self.tsdf.shape[1]:
909
- msg = "Must provide same number of labels as items in frame."
910
- raise NumberOfItemsAndLabelsNotSameError(msg)
911
- else:
912
- labels = list(self.tsdf.columns.get_level_values(0))
913
-
914
- if directory:
915
- dirpath = Path(directory).resolve()
916
- elif Path.home().joinpath("Documents").exists():
917
- dirpath = Path.home().joinpath("Documents")
918
- else:
919
- dirpath = Path(stack()[1].filename).parent
1169
+ labels = self._ensure_labels(
1170
+ ncols=self.tsdf.shape[1],
1171
+ labels=labels,
1172
+ default_labels=list(self.tsdf.columns.get_level_values(0)),
1173
+ )
920
1174
 
1175
+ dirpath = self._resolve_dir(directory=directory)
921
1176
  if not filename:
922
- filename = "".join(choice(ascii_letters) for _ in range(6)) + ".html"
923
- plotfile = dirpath.joinpath(filename)
1177
+ filename = f"{''.join(choice(ascii_letters) for _ in range(6))}.html"
1178
+ plotfile = dirpath / filename
924
1179
 
925
1180
  fig, logo = load_plotly_dict()
926
1181
  figure = Figure(fig)
927
1182
 
928
1183
  opacity = 0.7 if mode == "overlay" else None
929
-
930
- hovertemplate = (
931
- f"%{{y:{tick_fmt}}}<br>%{{x|{'%Y-%m-%d'}}}"
932
- if tick_fmt
933
- else "%{y}<br>%{x|%Y-%m-%d}"
934
- )
1184
+ hovertemplate = self._hover_xy(tick_fmt=tick_fmt)
935
1185
 
936
1186
  for item in range(self.tsdf.shape[1]):
937
1187
  figure.add_bar(
@@ -943,37 +1193,29 @@ class _CommonModel(BaseModel): # type: ignore[misc]
943
1193
  )
944
1194
  figure.update_layout(barmode=mode, yaxis={"tickformat": tick_fmt})
945
1195
 
946
- if add_logo:
947
- figure.add_layout_image(logo)
1196
+ self._apply_title_logo(
1197
+ figure=figure,
1198
+ title=title,
1199
+ add_logo=add_logo,
1200
+ logo=logo,
1201
+ )
948
1202
 
949
- if output_type == "file":
950
- plot(
951
- figure_or_data=figure,
952
- filename=str(plotfile),
953
- auto_open=auto_open,
954
- auto_play=False,
955
- link_text="",
956
- include_plotlyjs=cast("bool", include_plotlyjs),
957
- config=fig["config"],
958
- output_type=output_type,
959
- )
960
- string_output = str(plotfile)
961
- else:
962
- div_id = filename.split(sep=".")[0]
963
- string_output = to_html(
964
- fig=figure,
965
- config=fig["config"],
966
- auto_play=False,
967
- include_plotlyjs=cast("bool", include_plotlyjs),
968
- full_html=False,
969
- div_id=div_id,
970
- )
1203
+ string_output = self._emit_output(
1204
+ figure=figure,
1205
+ fig_config=fig["config"],
1206
+ include_plotlyjs_bool=cast("bool", include_plotlyjs),
1207
+ output_type=output_type,
1208
+ auto_open=auto_open,
1209
+ plotfile=plotfile,
1210
+ filename=filename,
1211
+ )
971
1212
 
972
1213
  return figure, string_output
973
1214
 
974
1215
  def plot_series(
975
1216
  self: Self,
976
1217
  mode: LiteralLinePlotMode = "lines",
1218
+ title: str | None = None,
977
1219
  tick_fmt: str | None = None,
978
1220
  filename: str | None = None,
979
1221
  directory: DirectoryPath | None = None,
@@ -993,6 +1235,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
993
1235
  The timeseries self.tsdf
994
1236
  mode: LiteralLinePlotMode, default: "lines"
995
1237
  The type of scatter to use
1238
+ title: str, optional
1239
+ A title above the plot
996
1240
  tick_fmt: str, optional
997
1241
  None, '%', '.1%' depending on number of decimals to show
998
1242
  filename: str, optional
@@ -1019,32 +1263,21 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1019
1263
  Plotly Figure and a div section or a html filename with location
1020
1264
 
1021
1265
  """
1022
- if labels:
1023
- if len(labels) != self.tsdf.shape[1]:
1024
- msg = "Must provide same number of labels as items in frame."
1025
- raise NumberOfItemsAndLabelsNotSameError(msg)
1026
- else:
1027
- labels = list(self.tsdf.columns.get_level_values(0))
1028
-
1029
- if directory:
1030
- dirpath = Path(directory).resolve()
1031
- elif Path.home().joinpath("Documents").exists():
1032
- dirpath = Path.home().joinpath("Documents")
1033
- else:
1034
- dirpath = Path(stack()[1].filename).parent
1266
+ labels = self._ensure_labels(
1267
+ ncols=self.tsdf.shape[1],
1268
+ labels=labels,
1269
+ default_labels=list(self.tsdf.columns.get_level_values(0)),
1270
+ )
1035
1271
 
1272
+ dirpath = self._resolve_dir(directory=directory)
1036
1273
  if not filename:
1037
- filename = "".join(choice(ascii_letters) for _ in range(6)) + ".html"
1038
- plotfile = dirpath.joinpath(filename)
1274
+ filename = f"{''.join(choice(ascii_letters) for _ in range(6))}.html"
1275
+ plotfile = dirpath / filename
1039
1276
 
1040
1277
  fig, logo = load_plotly_dict()
1041
1278
  figure = Figure(fig)
1042
1279
 
1043
- hovertemplate = (
1044
- f"%{{y:{tick_fmt}}}<br>%{{x|{'%Y-%m-%d'}}}"
1045
- if tick_fmt
1046
- else "%{y}<br>%{x|%Y-%m-%d}"
1047
- )
1280
+ hovertemplate = self._hover_xy(tick_fmt=tick_fmt)
1048
1281
 
1049
1282
  for item in range(self.tsdf.shape[1]):
1050
1283
  figure.add_scatter(
@@ -1059,7 +1292,6 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1059
1292
 
1060
1293
  if show_last:
1061
1294
  txt = f"Last {{:{tick_fmt}}}" if tick_fmt else "Last {}"
1062
-
1063
1295
  for item in range(self.tsdf.shape[1]):
1064
1296
  figure.add_scatter(
1065
1297
  x=[Series(self.tsdf.iloc[:, item]).index[-1]],
@@ -1073,31 +1305,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1073
1305
  textposition="top center",
1074
1306
  )
1075
1307
 
1076
- if add_logo:
1077
- figure.add_layout_image(logo)
1308
+ self._apply_title_logo(
1309
+ figure=figure,
1310
+ title=title,
1311
+ add_logo=add_logo,
1312
+ logo=logo,
1313
+ )
1078
1314
 
1079
- if output_type == "file":
1080
- plot(
1081
- figure_or_data=figure,
1082
- filename=str(plotfile),
1083
- auto_open=auto_open,
1084
- auto_play=False,
1085
- link_text="",
1086
- include_plotlyjs=cast("bool", include_plotlyjs),
1087
- config=fig["config"],
1088
- output_type=output_type,
1089
- )
1090
- string_output = str(plotfile)
1091
- else:
1092
- div_id = filename.split(sep=".")[0]
1093
- string_output = to_html(
1094
- fig=figure,
1095
- config=fig["config"],
1096
- auto_play=False,
1097
- include_plotlyjs=cast("bool", include_plotlyjs),
1098
- full_html=False,
1099
- div_id=div_id,
1100
- )
1315
+ string_output = self._emit_output(
1316
+ figure=figure,
1317
+ fig_config=fig["config"],
1318
+ include_plotlyjs_bool=cast("bool", include_plotlyjs),
1319
+ output_type=output_type,
1320
+ auto_open=auto_open,
1321
+ plotfile=plotfile,
1322
+ filename=filename,
1323
+ )
1101
1324
 
1102
1325
  return figure, string_output
1103
1326
 
@@ -1111,6 +1334,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1111
1334
  bargap: float = 0.0,
1112
1335
  bargroupgap: float = 0.0,
1113
1336
  curve_type: LiteralPlotlyHistogramCurveType = "kde",
1337
+ title: str | None = None,
1114
1338
  x_fmt: str | None = None,
1115
1339
  y_fmt: str | None = None,
1116
1340
  filename: str | None = None,
@@ -1144,6 +1368,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1144
1368
  Sets the gap between bar “groups” at the same location coordinate
1145
1369
  curve_type: LiteralPlotlyHistogramCurveType, default: kde
1146
1370
  Specifies the type of distribution curve to overlay on the histogram
1371
+ title: str, optional
1372
+ A title above the plot
1147
1373
  y_fmt: str, optional
1148
1374
  None, '%', '.1%' depending on number of decimals to show on the y-axis
1149
1375
  x_fmt: str, optional
@@ -1174,32 +1400,19 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1174
1400
  Plotly Figure and a div section or a html filename with location
1175
1401
 
1176
1402
  """
1177
- if labels:
1178
- if len(labels) != self.tsdf.shape[1]:
1179
- msg = "Must provide same number of labels as items in frame."
1180
- raise NumberOfItemsAndLabelsNotSameError(msg)
1181
- else:
1182
- labels = list(self.tsdf.columns.get_level_values(0))
1183
-
1184
- if directory:
1185
- dirpath = Path(directory).resolve()
1186
- elif Path.home().joinpath("Documents").exists():
1187
- dirpath = Path.home().joinpath("Documents")
1188
- else:
1189
- dirpath = Path(stack()[1].filename).parent
1403
+ labels = self._ensure_labels(
1404
+ ncols=self.tsdf.shape[1],
1405
+ labels=labels,
1406
+ default_labels=list(self.tsdf.columns.get_level_values(0)),
1407
+ )
1190
1408
 
1409
+ dirpath = self._resolve_dir(directory=directory)
1191
1410
  if not filename:
1192
- filename = "".join(choice(ascii_letters) for _ in range(6)) + ".html"
1193
- plotfile = dirpath.joinpath(filename)
1411
+ filename = f"{''.join(choice(ascii_letters) for _ in range(6))}.html"
1412
+ plotfile = dirpath / filename
1194
1413
 
1195
1414
  fig_dict, logo = load_plotly_dict()
1196
-
1197
- hovertemplate = f"Count: %{{y:{y_fmt}}}" if y_fmt else "Count: %{y}"
1198
-
1199
- if x_fmt:
1200
- hovertemplate += f"<br>%{{x:{x_fmt}}}"
1201
- else:
1202
- hovertemplate += "<br>%{x}"
1415
+ hovertemplate = self._hover_hist(x_fmt=x_fmt, y_fmt=y_fmt)
1203
1416
 
1204
1417
  msg = "plot_type must be 'bars' or 'lines'."
1205
1418
  if plot_type == "bars":
@@ -1221,10 +1434,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1221
1434
  bargroupgap=bargroupgap,
1222
1435
  )
1223
1436
  elif plot_type == "lines":
1224
- hist_data = [
1225
- cast("Series[float]", self.tsdf.loc[:, ds]).dropna().tolist()
1226
- for ds in self.tsdf
1227
- ]
1437
+ hist_data = [self.tsdf[col] for col in self.tsdf.columns]
1228
1438
  figure = create_distplot(
1229
1439
  hist_data=hist_data,
1230
1440
  curve_type=curve_type,
@@ -1238,35 +1448,25 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1238
1448
  raise TypeError(msg)
1239
1449
 
1240
1450
  figure.update_layout(xaxis={"tickformat": x_fmt}, yaxis={"tickformat": y_fmt})
1241
-
1242
1451
  figure.update_xaxes(zeroline=True, zerolinewidth=2, zerolinecolor="lightgrey")
1243
1452
  figure.update_yaxes(zeroline=True, zerolinewidth=2, zerolinecolor="lightgrey")
1244
1453
 
1245
- if add_logo:
1246
- figure.add_layout_image(logo)
1454
+ self._apply_title_logo(
1455
+ figure=figure,
1456
+ title=title,
1457
+ add_logo=add_logo,
1458
+ logo=logo,
1459
+ )
1247
1460
 
1248
- if output_type == "file":
1249
- plot(
1250
- figure_or_data=figure,
1251
- filename=str(plotfile),
1252
- auto_open=auto_open,
1253
- auto_play=False,
1254
- link_text="",
1255
- include_plotlyjs=cast("bool", include_plotlyjs),
1256
- config=fig_dict["config"],
1257
- output_type=output_type,
1258
- )
1259
- string_output = str(plotfile)
1260
- else:
1261
- div_id = filename.rsplit(".", 1)[0]
1262
- string_output = to_html(
1263
- fig=figure,
1264
- config=fig_dict["config"],
1265
- auto_play=False,
1266
- include_plotlyjs=cast("bool", include_plotlyjs),
1267
- full_html=False,
1268
- div_id=div_id,
1269
- )
1461
+ string_output = self._emit_output(
1462
+ figure=figure,
1463
+ fig_config=fig_dict["config"],
1464
+ include_plotlyjs_bool=cast("bool", include_plotlyjs),
1465
+ output_type=output_type,
1466
+ auto_open=auto_open,
1467
+ plotfile=plotfile,
1468
+ filename=filename,
1469
+ )
1270
1470
 
1271
1471
  return figure, string_output
1272
1472
 
@@ -1276,7 +1476,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1276
1476
  from_date: dt.date | None = None,
1277
1477
  to_date: dt.date | None = None,
1278
1478
  periods_in_a_year_fixed: DaysInYearType | None = None,
1279
- ) -> float | Series[float]:
1479
+ ) -> SeriesOrFloat_co:
1280
1480
  """https://www.investopedia.com/terms/a/arithmeticmean.asp.
1281
1481
 
1282
1482
  Parameters
@@ -1294,41 +1494,23 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1294
1494
 
1295
1495
  Returns:
1296
1496
  -------
1297
- float | Pandas.Series[float]
1298
- Annualized arithmetic mean of returns
1497
+ SeriesOrFloat_co
1498
+ Annualized arithmetic mean of returns.
1499
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1500
+
1299
1501
 
1300
1502
  """
1301
- earlier, later = self.calc_range(
1302
- months_offset=months_from_last,
1303
- from_dt=from_date,
1304
- to_dt=to_date,
1503
+ _earlier, _later, time_factor, data = _get_date_range_and_factor(
1504
+ self=self,
1505
+ months_from_last=months_from_last,
1506
+ from_date=from_date,
1507
+ to_date=to_date,
1508
+ periods_in_a_year_fixed=periods_in_a_year_fixed,
1305
1509
  )
1306
- if periods_in_a_year_fixed:
1307
- time_factor = float(periods_in_a_year_fixed)
1308
- else:
1309
- fraction = (later - earlier).days / 365.25
1310
- how_many = self.tsdf.loc[
1311
- cast("int", earlier) : cast("int", later),
1312
- self.tsdf.columns.to_numpy()[0],
1313
- ].count()
1314
- time_factor = cast("int", how_many) / fraction
1315
1510
 
1316
- result = (
1317
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1318
- .ffill()
1319
- .pct_change()
1320
- .mean()
1321
- * time_factor
1322
- )
1511
+ result = data.ffill().pct_change().mean() * time_factor
1323
1512
 
1324
- if self.tsdf.shape[1] == 1:
1325
- return float(result.iloc[0])
1326
- return Series(
1327
- data=result,
1328
- index=self.tsdf.columns,
1329
- name="Arithmetic return",
1330
- dtype="float64",
1331
- )
1513
+ return self._coerce_result(result=result, name="Arithmetic return")
1332
1514
 
1333
1515
  def vol_func(
1334
1516
  self: Self,
@@ -1336,7 +1518,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1336
1518
  from_date: dt.date | None = None,
1337
1519
  to_date: dt.date | None = None,
1338
1520
  periods_in_a_year_fixed: DaysInYearType | None = None,
1339
- ) -> float | Series[float]:
1521
+ ) -> SeriesOrFloat_co:
1340
1522
  """Annualized volatility.
1341
1523
 
1342
1524
  Based on Pandas .std() which is the equivalent of stdev.s([...]) in MS Excel.
@@ -1356,37 +1538,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1356
1538
 
1357
1539
  Returns:
1358
1540
  -------
1359
- float | Pandas.Series[float]
1360
- Annualized volatility
1541
+ SeriesOrFloat_co
1542
+ Annualized volatility.
1543
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1361
1544
 
1362
1545
  """
1363
- earlier, later = self.calc_range(
1364
- months_offset=months_from_last,
1365
- from_dt=from_date,
1366
- to_dt=to_date,
1546
+ _earlier, _later, time_factor, data = _get_date_range_and_factor(
1547
+ self=self,
1548
+ months_from_last=months_from_last,
1549
+ from_date=from_date,
1550
+ to_date=to_date,
1551
+ periods_in_a_year_fixed=periods_in_a_year_fixed,
1367
1552
  )
1368
- if periods_in_a_year_fixed:
1369
- time_factor = float(periods_in_a_year_fixed)
1370
- else:
1371
- fraction = (later - earlier).days / 365.25
1372
- how_many = (
1373
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1374
- .count()
1375
- .iloc[0]
1376
- )
1377
- time_factor = how_many / fraction
1378
1553
 
1379
- data = self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1380
1554
  result = data.ffill().pct_change().std().mul(sqrt(time_factor))
1381
1555
 
1382
- if self.tsdf.shape[1] == 1:
1383
- return float(cast("SupportsFloat", result.iloc[0]))
1384
- return Series(
1385
- data=result,
1386
- index=self.tsdf.columns,
1387
- name="Volatility",
1388
- dtype="float64",
1389
- )
1556
+ return self._coerce_result(result=result, name="Volatility")
1390
1557
 
1391
1558
  def vol_from_var_func(
1392
1559
  self: Self,
@@ -1398,7 +1565,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1398
1565
  periods_in_a_year_fixed: DaysInYearType | None = None,
1399
1566
  *,
1400
1567
  drift_adjust: bool = False,
1401
- ) -> float | Series[float]:
1568
+ ) -> SeriesOrFloat_co:
1402
1569
  """Implied annualized volatility.
1403
1570
 
1404
1571
  Implied annualized volatility from the Downside VaR using the assumption
@@ -1425,8 +1592,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1425
1592
 
1426
1593
  Returns:
1427
1594
  -------
1428
- float | Pandas.Series[float]
1429
- Implied annualized volatility from the Downside VaR using the
1595
+ SeriesOrFloat_co
1596
+ Implied annualized volatility from the Downside VaR using the.
1597
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1430
1598
  assumption that returns are normally distributed.
1431
1599
 
1432
1600
  """
@@ -1453,7 +1621,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1453
1621
  periods_in_a_year_fixed: DaysInYearType | None = None,
1454
1622
  *,
1455
1623
  drift_adjust: bool = False,
1456
- ) -> float | Series[float]:
1624
+ ) -> SeriesOrFloat_co:
1457
1625
  """Target weight from VaR.
1458
1626
 
1459
1627
  A position weight multiplier from the ratio between a VaR implied
@@ -1486,8 +1654,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1486
1654
 
1487
1655
  Returns:
1488
1656
  -------
1489
- float | Pandas.Series[float]
1490
- A position weight multiplier from the ratio between a VaR implied
1657
+ SeriesOrFloat_co
1658
+ A position weight multiplier from the ratio between a VaR implied.
1659
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1491
1660
  volatility and a given target volatility. Multiplier = 1.0 -> target met
1492
1661
 
1493
1662
  """
@@ -1517,7 +1686,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1517
1686
  periods_in_a_year_fixed: DaysInYearType | None = None,
1518
1687
  *,
1519
1688
  drift_adjust: bool = False,
1520
- ) -> float | Series[float]:
1689
+ ) -> SeriesOrFloat_co:
1521
1690
  """Volatility implied from VaR or Target Weight.
1522
1691
 
1523
1692
  The function returns a position weight multiplier from the ratio between
@@ -1552,8 +1721,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1552
1721
 
1553
1722
  Returns:
1554
1723
  -------
1555
- float | Pandas.Series[float]
1556
- Target volatility if target_vol is provided otherwise the VaR
1724
+ SeriesOrFloat_co
1725
+ Target volatility if target_vol is provided otherwise the VaR.
1726
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1557
1727
  implied volatility.
1558
1728
 
1559
1729
  """
@@ -1567,23 +1737,25 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1567
1737
  else:
1568
1738
  fraction = (later - earlier).days / 365.25
1569
1739
  how_many = (
1570
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1740
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
1571
1741
  .count()
1572
1742
  .iloc[0]
1573
1743
  )
1574
1744
  time_factor = how_many / fraction
1575
1745
  if drift_adjust:
1576
1746
  imp_vol = (-sqrt(time_factor) / norm.ppf(level)) * (
1577
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1747
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
1578
1748
  .ffill()
1579
1749
  .pct_change()
1580
1750
  .quantile(1 - level, interpolation=interpolation)
1581
- - self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1751
+ - self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
1582
1752
  .ffill()
1583
1753
  .pct_change()
1584
1754
  .sum()
1585
1755
  / len(
1586
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1756
+ self.tsdf.loc[
1757
+ cast("Timestamp", earlier) : cast("Timestamp", later)
1758
+ ]
1587
1759
  .ffill()
1588
1760
  .pct_change(),
1589
1761
  )
@@ -1591,7 +1763,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1591
1763
  else:
1592
1764
  imp_vol = (
1593
1765
  -sqrt(time_factor)
1594
- * self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1766
+ * self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
1595
1767
  .ffill()
1596
1768
  .pct_change()
1597
1769
  .quantile(1 - level, interpolation=interpolation)
@@ -1610,14 +1782,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1610
1782
  result = imp_vol
1611
1783
  label = f"Imp vol from VaR {level:.0%}"
1612
1784
 
1613
- if self.tsdf.shape[1] == 1:
1614
- return float(cast("SupportsFloat", result.iloc[0]))
1615
- return Series(
1616
- data=result,
1617
- index=self.tsdf.columns,
1618
- name=label,
1619
- dtype="float64",
1620
- )
1785
+ return self._coerce_result(result=result, name=label)
1621
1786
 
1622
1787
  def cvar_down_func(
1623
1788
  self: Self,
@@ -1625,7 +1790,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1625
1790
  months_from_last: int | None = None,
1626
1791
  from_date: dt.date | None = None,
1627
1792
  to_date: dt.date | None = None,
1628
- ) -> float | Series[float]:
1793
+ ) -> SeriesOrFloat_co:
1629
1794
  """Downside Conditional Value At Risk "CVaR".
1630
1795
 
1631
1796
  https://www.investopedia.com/terms/c/conditional_value_at_risk.asp.
@@ -1644,8 +1809,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1644
1809
 
1645
1810
  Returns:
1646
1811
  -------
1647
- float | Pandas.Series[float]
1648
- Downside Conditional Value At Risk "CVaR"
1812
+ SeriesOrFloat_co
1813
+ Downside Conditional Value At Risk "CVaR".
1814
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1649
1815
 
1650
1816
  """
1651
1817
  earlier, later = self.calc_range(
@@ -1653,32 +1819,19 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1653
1819
  from_dt=from_date,
1654
1820
  to_dt=to_date,
1655
1821
  )
1656
- cvar_df = self.tsdf.loc[cast("int", earlier) : cast("int", later)].copy(
1657
- deep=True
1658
- )
1822
+ cvar_df = self.tsdf.loc[
1823
+ cast("Timestamp", earlier) : cast("Timestamp", later)
1824
+ ].copy(deep=True)
1659
1825
  result = [
1660
- cvar_df.loc[:, x] # type: ignore[call-overload]
1661
- .ffill()
1662
- .pct_change()
1663
- .sort_values()
1664
- .iloc[
1665
- : ceil(
1666
- cast(
1667
- "int",
1668
- (1 - level) * cvar_df.loc[:, x].ffill().pct_change().count(),
1669
- )
1670
- ),
1671
- ]
1672
- .mean()
1673
- for x in self.tsdf
1826
+ (r := cvar_df[col].ffill().pct_change().sort_values())[
1827
+ : ceil((1 - level) * r.count())
1828
+ ].mean()
1829
+ for col in cvar_df.columns
1674
1830
  ]
1675
- if self.tsdf.shape[1] == 1:
1676
- return float(result[0])
1677
- return Series(
1678
- data=result,
1679
- index=self.tsdf.columns,
1831
+
1832
+ return self._coerce_result(
1833
+ result=cast("Series[float]", result),
1680
1834
  name=f"CVaR {level:.1%}",
1681
- dtype="float64",
1682
1835
  )
1683
1836
 
1684
1837
  def lower_partial_moment_func(
@@ -1689,7 +1842,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1689
1842
  from_date: dt.date | None = None,
1690
1843
  to_date: dt.date | None = None,
1691
1844
  periods_in_a_year_fixed: DaysInYearType | None = None,
1692
- ) -> float | Series[float]:
1845
+ ) -> SeriesOrFloat_co:
1693
1846
  """Downside Deviation if order set to 2.
1694
1847
 
1695
1848
  If order is set to 2 the function calculates the standard
@@ -1716,8 +1869,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1716
1869
 
1717
1870
  Returns:
1718
1871
  -------
1719
- float | Pandas.Series[float]
1720
- Downside deviation if order set to 2
1872
+ SeriesOrFloat_co
1873
+ Downside deviation if order set to 2.
1874
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1721
1875
 
1722
1876
  """
1723
1877
  msg = f"'order' must be 2 or 3, got {order!r}."
@@ -1732,7 +1886,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1732
1886
  )
1733
1887
 
1734
1888
  how_many = (
1735
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1889
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
1736
1890
  .ffill()
1737
1891
  .pct_change()
1738
1892
  .count(numeric_only=True)
@@ -1749,7 +1903,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1749
1903
 
1750
1904
  per_period_mar = min_accepted_return / time_factor
1751
1905
  diff = (
1752
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1906
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
1753
1907
  .ffill()
1754
1908
  .pct_change()
1755
1909
  .sub(per_period_mar)
@@ -1762,13 +1916,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1762
1916
 
1763
1917
  dd_order = 2
1764
1918
 
1765
- if self.tsdf.shape[1] == 1:
1766
- return float(result.iloc[0])
1767
- return Series(
1768
- data=result,
1769
- index=self.tsdf.columns,
1919
+ return self._coerce_result(
1920
+ result=result,
1770
1921
  name="Downside deviation" if order == dd_order else f"LPM{order}",
1771
- dtype="float64",
1772
1922
  )
1773
1923
 
1774
1924
  def geo_ret_func(
@@ -1776,7 +1926,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1776
1926
  months_from_last: int | None = None,
1777
1927
  from_date: dt.date | None = None,
1778
1928
  to_date: dt.date | None = None,
1779
- ) -> float | Series[float]:
1929
+ ) -> SeriesOrFloat_co:
1780
1930
  """Compounded Annual Growth Rate (CAGR).
1781
1931
 
1782
1932
  https://www.investopedia.com/terms/c/cagr.asp.
@@ -1793,8 +1943,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1793
1943
 
1794
1944
  Returns:
1795
1945
  -------
1796
- float | Pandas.Series[float]
1797
- Compounded Annual Growth Rate (CAGR)
1946
+ SeriesOrFloat_co
1947
+ Compounded Annual Growth Rate (CAGR).
1948
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1798
1949
 
1799
1950
  """
1800
1951
  zero = 0.0
@@ -1815,13 +1966,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1815
1966
 
1816
1967
  result = (self.tsdf.loc[later] / self.tsdf.loc[earlier]) ** (1 / fraction) - 1
1817
1968
 
1818
- if self.tsdf.shape[1] == 1:
1819
- return float(result.iloc[0])
1820
- return Series(
1821
- data=result.to_numpy(),
1822
- index=self.tsdf.columns,
1969
+ return self._coerce_result(
1970
+ result=cast("Series[float]", result),
1823
1971
  name="Geometric return",
1824
- dtype="float64",
1825
1972
  )
1826
1973
 
1827
1974
  def skew_func(
@@ -1829,7 +1976,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1829
1976
  months_from_last: int | None = None,
1830
1977
  from_date: dt.date | None = None,
1831
1978
  to_date: dt.date | None = None,
1832
- ) -> float | Series[float]:
1979
+ ) -> SeriesOrFloat_co:
1833
1980
  """Skew of the return distribution.
1834
1981
 
1835
1982
  https://www.investopedia.com/terms/s/skewness.asp.
@@ -1846,8 +1993,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1846
1993
 
1847
1994
  Returns:
1848
1995
  -------
1849
- float | Pandas.Series[float]
1850
- Skew of the return distribution
1996
+ SeriesOrFloat_co
1997
+ Skew of the return distribution.
1998
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1851
1999
 
1852
2000
  """
1853
2001
  earlier, later = self.calc_range(
@@ -1855,30 +2003,24 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1855
2003
  from_dt=from_date,
1856
2004
  to_dt=to_date,
1857
2005
  )
1858
- result: NDArray[float64] = skew(
1859
- a=self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1860
- .ffill()
1861
- .pct_change()
1862
- .to_numpy(),
2006
+ result = skew(
2007
+ a=(
2008
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
2009
+ .ffill()
2010
+ .pct_change()
2011
+ ),
1863
2012
  bias=True,
1864
2013
  nan_policy="omit",
1865
2014
  )
1866
2015
 
1867
- if self.tsdf.shape[1] == 1:
1868
- return float(result[0])
1869
- return Series(
1870
- data=result,
1871
- index=self.tsdf.columns,
1872
- name="Skew",
1873
- dtype="float64",
1874
- )
2016
+ return self._coerce_result(result=cast("Series[float]", result), name="Skew")
1875
2017
 
1876
2018
  def kurtosis_func(
1877
2019
  self: Self,
1878
2020
  months_from_last: int | None = None,
1879
2021
  from_date: dt.date | None = None,
1880
2022
  to_date: dt.date | None = None,
1881
- ) -> float | Series[float]:
2023
+ ) -> SeriesOrFloat_co:
1882
2024
  """Kurtosis of the return distribution.
1883
2025
 
1884
2026
  https://www.investopedia.com/terms/k/kurtosis.asp.
@@ -1895,8 +2037,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1895
2037
 
1896
2038
  Returns:
1897
2039
  -------
1898
- float | Pandas.Series[float]
1899
- Kurtosis of the return distribution
2040
+ SeriesOrFloat_co
2041
+ Kurtosis of the return distribution.
2042
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1900
2043
 
1901
2044
  """
1902
2045
  earlier, later = self.calc_range(
@@ -1904,9 +2047,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1904
2047
  from_dt=from_date,
1905
2048
  to_dt=to_date,
1906
2049
  )
1907
- result: NDArray[float64] = kurtosis(
2050
+ result = kurtosis(
1908
2051
  a=(
1909
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2052
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
1910
2053
  .ffill()
1911
2054
  .pct_change()
1912
2055
  ),
@@ -1915,13 +2058,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1915
2058
  nan_policy="omit",
1916
2059
  )
1917
2060
 
1918
- if self.tsdf.shape[1] == 1:
1919
- return float(result[0])
1920
- return Series(
1921
- data=result,
1922
- index=self.tsdf.columns,
2061
+ return self._coerce_result(
2062
+ result=cast("Series[float]", result),
1923
2063
  name="Kurtosis",
1924
- dtype="float64",
1925
2064
  )
1926
2065
 
1927
2066
  def max_drawdown_func(
@@ -1930,7 +2069,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1930
2069
  from_date: dt.date | None = None,
1931
2070
  to_date: dt.date | None = None,
1932
2071
  min_periods: int = 1,
1933
- ) -> float | Series[float]:
2072
+ ) -> SeriesOrFloat_co:
1934
2073
  """Maximum drawdown without any limit on date range.
1935
2074
 
1936
2075
  https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp.
@@ -1949,8 +2088,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1949
2088
 
1950
2089
  Returns:
1951
2090
  -------
1952
- float | Pandas.Series[float]
1953
- Maximum drawdown without any limit on date range
2091
+ SeriesOrFloat_co
2092
+ Maximum drawdown without any limit on date range.
2093
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1954
2094
 
1955
2095
  """
1956
2096
  earlier, later = self.calc_range(
@@ -1959,26 +2099,20 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1959
2099
  to_dt=to_date,
1960
2100
  )
1961
2101
  result = (
1962
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1963
- / self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2102
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
2103
+ / self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
1964
2104
  .expanding(min_periods=min_periods)
1965
2105
  .max()
1966
2106
  ).min() - 1
1967
- if self.tsdf.shape[1] == 1:
1968
- return float(result.iloc[0])
1969
- return Series(
1970
- data=result,
1971
- index=self.tsdf.columns,
1972
- name="Max drawdown",
1973
- dtype="float64",
1974
- )
2107
+
2108
+ return self._coerce_result(result=result, name="Max drawdown")
1975
2109
 
1976
2110
  def positive_share_func(
1977
2111
  self: Self,
1978
2112
  months_from_last: int | None = None,
1979
2113
  from_date: dt.date | None = None,
1980
2114
  to_date: dt.date | None = None,
1981
- ) -> float | Series[float]:
2115
+ ) -> SeriesOrFloat_co:
1982
2116
  """Calculate share of percentage changes that are greater than zero.
1983
2117
 
1984
2118
  Parameters
@@ -1993,8 +2127,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
1993
2127
 
1994
2128
  Returns:
1995
2129
  -------
1996
- float | Pandas.Series[float]
1997
- Calculate share of percentage changes that are greater than zero
2130
+ SeriesOrFloat_co
2131
+ Calculate share of percentage changes that are greater than zero.
2132
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
1998
2133
 
1999
2134
  """
2000
2135
  zero: float = 0.0
@@ -2004,10 +2139,10 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2004
2139
  to_dt=to_date,
2005
2140
  )
2006
2141
  pos = (
2007
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2142
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
2008
2143
  .ffill()
2009
2144
  .pct_change()[1:][
2010
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2145
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
2011
2146
  .ffill()
2012
2147
  .pct_change()[1:]
2013
2148
  > zero
@@ -2015,20 +2150,14 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2015
2150
  .count()
2016
2151
  )
2017
2152
  tot = (
2018
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2153
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
2019
2154
  .ffill()
2020
2155
  .pct_change()
2021
2156
  .count()
2022
2157
  )
2023
- share = pos / tot
2024
- if self.tsdf.shape[1] == 1:
2025
- return float(share.iloc[0])
2026
- return Series(
2027
- data=share,
2028
- index=self.tsdf.columns,
2029
- name="Positive share",
2030
- dtype="float64",
2031
- )
2158
+ result = pos / tot
2159
+
2160
+ return self._coerce_result(result=result, name="Positive share")
2032
2161
 
2033
2162
  def ret_vol_ratio_func(
2034
2163
  self: Self,
@@ -2037,7 +2166,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2037
2166
  from_date: dt.date | None = None,
2038
2167
  to_date: dt.date | None = None,
2039
2168
  periods_in_a_year_fixed: DaysInYearType | None = None,
2040
- ) -> float | Series[float]:
2169
+ ) -> SeriesOrFloat_co:
2041
2170
  """Ratio between arithmetic mean of returns and annualized volatility.
2042
2171
 
2043
2172
  The ratio of annualized arithmetic mean of returns and annualized
@@ -2063,12 +2192,13 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2063
2192
 
2064
2193
  Returns:
2065
2194
  -------
2066
- float | Pandas.Series[float]
2067
- Ratio of the annualized arithmetic mean of returns and annualized
2195
+ SeriesOrFloat_co
2196
+ Ratio of the annualized arithmetic mean of returns and annualized.
2197
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
2068
2198
  volatility or, if risk-free return provided, Sharpe ratio
2069
2199
 
2070
2200
  """
2071
- ratio = Series(
2201
+ result = Series(
2072
2202
  self.arithmetic_ret_func(
2073
2203
  months_from_last=months_from_last,
2074
2204
  from_date=from_date,
@@ -2083,14 +2213,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2083
2213
  periods_in_a_year_fixed=periods_in_a_year_fixed,
2084
2214
  )
2085
2215
 
2086
- if self.tsdf.shape[1] == 1:
2087
- return float(cast("float64", ratio.iloc[0]))
2088
- return Series(
2089
- data=ratio,
2090
- index=self.tsdf.columns,
2091
- name="Return vol ratio",
2092
- dtype="float64",
2093
- )
2216
+ return self._coerce_result(result=result, name="Return vol ratio")
2094
2217
 
2095
2218
  def sortino_ratio_func(
2096
2219
  self: Self,
@@ -2101,7 +2224,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2101
2224
  from_date: dt.date | None = None,
2102
2225
  to_date: dt.date | None = None,
2103
2226
  periods_in_a_year_fixed: DaysInYearType | None = None,
2104
- ) -> float | Series[float]:
2227
+ ) -> SeriesOrFloat_co:
2105
2228
  """Sortino Ratio or Kappa3 Ratio.
2106
2229
 
2107
2230
  The Sortino ratio calculated as ( return - risk free return )
@@ -2134,12 +2257,13 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2134
2257
 
2135
2258
  Returns:
2136
2259
  -------
2137
- float | Pandas.Series[float]
2138
- Sortino ratio calculated as ( return - riskfree return ) /
2260
+ SeriesOrFloat_co
2261
+ Sortino ratio calculated as ( return - riskfree return ) /.
2262
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
2139
2263
  downside deviation (std dev of returns below MAR)
2140
2264
 
2141
2265
  """
2142
- ratio = Series(
2266
+ result = Series(
2143
2267
  self.arithmetic_ret_func(
2144
2268
  months_from_last=months_from_last,
2145
2269
  from_date=from_date,
@@ -2157,14 +2281,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2157
2281
  )
2158
2282
 
2159
2283
  sortino_order = 2
2160
- if self.tsdf.shape[1] == 1:
2161
- return float(cast("float64", ratio.iloc[0]))
2162
- return Series(
2163
- data=ratio,
2164
- index=self.tsdf.columns,
2165
- name="Sortino ratio" if order == sortino_order else "Kappa-3 ratio",
2166
- dtype="float64",
2167
- )
2284
+ name = "Sortino ratio" if order == sortino_order else "Kappa-3 ratio"
2285
+
2286
+ return self._coerce_result(result=result, name=name)
2168
2287
 
2169
2288
  def omega_ratio_func(
2170
2289
  self: Self,
@@ -2172,7 +2291,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2172
2291
  months_from_last: int | None = None,
2173
2292
  from_date: dt.date | None = None,
2174
2293
  to_date: dt.date | None = None,
2175
- ) -> float | Series[float]:
2294
+ ) -> SeriesOrFloat_co:
2176
2295
  """Omega Ratio.
2177
2296
 
2178
2297
  The Omega Ratio compares returns above a certain target level
@@ -2194,8 +2313,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2194
2313
 
2195
2314
  Returns:
2196
2315
  -------
2197
- float | Pandas.Series[float]
2198
- Omega ratio calculation
2316
+ SeriesOrFloat_co
2317
+ Omega ratio calculation.
2318
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
2199
2319
 
2200
2320
  """
2201
2321
  earlier, later = self.calc_range(
@@ -2204,29 +2324,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2204
2324
  to_dt=to_date,
2205
2325
  )
2206
2326
  retdf = (
2207
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2327
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
2208
2328
  .ffill()
2209
2329
  .pct_change()
2210
2330
  )
2211
2331
  pos = retdf[retdf > min_accepted_return].sub(min_accepted_return).sum()
2212
2332
  neg = retdf[retdf < min_accepted_return].sub(min_accepted_return).sum()
2213
- ratio = pos / -neg
2333
+ result = pos / -neg
2214
2334
 
2215
- if self.tsdf.shape[1] == 1:
2216
- return float(cast("float64", ratio.iloc[0]))
2217
- return Series(
2218
- data=ratio,
2219
- index=self.tsdf.columns,
2220
- name="Omega ratio",
2221
- dtype="float64",
2222
- )
2335
+ return self._coerce_result(result=result, name="Omega ratio")
2223
2336
 
2224
2337
  def value_ret_func(
2225
2338
  self: Self,
2226
2339
  months_from_last: int | None = None,
2227
2340
  from_date: dt.date | None = None,
2228
2341
  to_date: dt.date | None = None,
2229
- ) -> float | Series[float]:
2342
+ ) -> SeriesOrFloat_co:
2230
2343
  """Calculate simple return.
2231
2344
 
2232
2345
  Parameters
@@ -2241,8 +2354,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2241
2354
 
2242
2355
  Returns:
2243
2356
  -------
2244
- float | Pandas.Series[float]
2245
- Calculate simple return
2357
+ SeriesOrFloat_co
2358
+ Calculate simple return.
2359
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
2246
2360
 
2247
2361
  """
2248
2362
  zero: float = 0.0
@@ -2258,22 +2372,18 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2258
2372
  )
2259
2373
  raise InitialValueZeroError(msg)
2260
2374
 
2261
- result = self.tsdf.loc[later] / self.tsdf.loc[earlier] - 1
2262
-
2263
- if self.tsdf.shape[1] == 1:
2264
- return float(result.iloc[0])
2265
- return Series(
2266
- data=result.to_numpy(),
2267
- index=self.tsdf.columns,
2268
- name="Simple return",
2269
- dtype="float64",
2375
+ result = cast(
2376
+ "Series[float]",
2377
+ self.tsdf.loc[later] / self.tsdf.loc[earlier] - 1,
2270
2378
  )
2271
2379
 
2380
+ return self._coerce_result(result=result, name="Simple return")
2381
+
2272
2382
  def value_ret_calendar_period(
2273
2383
  self: Self,
2274
2384
  year: int,
2275
2385
  month: int | None = None,
2276
- ) -> float | Series[float]:
2386
+ ) -> SeriesOrFloat_co:
2277
2387
  """Calculate simple return for a specific calendar period.
2278
2388
 
2279
2389
  Parameters
@@ -2285,8 +2395,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2285
2395
 
2286
2396
  Returns:
2287
2397
  -------
2288
- float | Pandas.Series[float]
2289
- Calculate simple return for a specific calendar period
2398
+ SeriesOrFloat_co
2399
+ Calculate simple return for a specific calendar period.
2400
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
2290
2401
 
2291
2402
  """
2292
2403
  if month is None:
@@ -2296,16 +2407,10 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2296
2407
  vrdf = self.tsdf.copy()
2297
2408
  vrdf.index = DatetimeIndex(vrdf.index)
2298
2409
  resultdf = DataFrame(vrdf.ffill().pct_change())
2299
- result = resultdf.loc[period] + 1
2300
- cal_period = result.cumprod(axis="index").iloc[-1] - 1
2301
- if self.tsdf.shape[1] == 1:
2302
- return float(cal_period.iloc[0])
2303
- return Series(
2304
- data=cal_period,
2305
- index=self.tsdf.columns,
2306
- name=period,
2307
- dtype="float64",
2308
- )
2410
+ plus_one = resultdf.loc[period] + 1
2411
+ result = plus_one.cumprod(axis="index").iloc[-1] - 1
2412
+
2413
+ return self._coerce_result(result=result, name=period)
2309
2414
 
2310
2415
  def var_down_func(
2311
2416
  self: Self,
@@ -2314,7 +2419,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2314
2419
  from_date: dt.date | None = None,
2315
2420
  to_date: dt.date | None = None,
2316
2421
  interpolation: LiteralQuantileInterp = "lower",
2317
- ) -> float | Series[float]:
2422
+ ) -> SeriesOrFloat_co:
2318
2423
  """Downside Value At Risk, "VaR".
2319
2424
 
2320
2425
  The equivalent of percentile.inc([...], 1-level) over returns in MS Excel.
@@ -2336,8 +2441,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2336
2441
 
2337
2442
  Returns:
2338
2443
  -------
2339
- float | Pandas.Series[float]
2340
- Downside Value At Risk
2444
+ SeriesOrFloat_co
2445
+ Downside Value At Risk.
2446
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
2341
2447
 
2342
2448
  """
2343
2449
  earlier, later = self.calc_range(
@@ -2346,20 +2452,13 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2346
2452
  to_dt=to_date,
2347
2453
  )
2348
2454
  result = (
2349
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2455
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
2350
2456
  .ffill()
2351
2457
  .pct_change()
2352
2458
  .quantile(1 - level, interpolation=interpolation)
2353
2459
  )
2354
2460
 
2355
- if self.tsdf.shape[1] == 1:
2356
- return float(result.iloc[0])
2357
- return Series(
2358
- data=result,
2359
- index=self.tsdf.columns,
2360
- name=f"VaR {level:.1%}",
2361
- dtype="float64",
2362
- )
2461
+ return self._coerce_result(result=result, name=f"VaR {level:.1%}")
2363
2462
 
2364
2463
  def worst_func(
2365
2464
  self: Self,
@@ -2367,7 +2466,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2367
2466
  months_from_last: int | None = None,
2368
2467
  from_date: dt.date | None = None,
2369
2468
  to_date: dt.date | None = None,
2370
- ) -> float | Series[float]:
2469
+ ) -> SeriesOrFloat_co:
2371
2470
  """Most negative percentage change over a rolling number of observations.
2372
2471
 
2373
2472
  Parameters
@@ -2384,8 +2483,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2384
2483
 
2385
2484
  Returns:
2386
2485
  -------
2387
- float | Pandas.Series[float]
2388
- Most negative percentage change over a rolling number of observations
2486
+ SeriesOrFloat_co
2487
+ Most negative percentage change over a rolling number of observations.
2488
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
2389
2489
  within a chosen date range
2390
2490
 
2391
2491
  """
@@ -2395,7 +2495,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2395
2495
  to_dt=to_date,
2396
2496
  )
2397
2497
  result = (
2398
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2498
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
2399
2499
  .ffill()
2400
2500
  .pct_change()
2401
2501
  .rolling(observations, min_periods=observations)
@@ -2403,21 +2503,14 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2403
2503
  .min()
2404
2504
  )
2405
2505
 
2406
- if self.tsdf.shape[1] == 1:
2407
- return float(result.iloc[0])
2408
- return Series(
2409
- data=result,
2410
- index=self.tsdf.columns,
2411
- name="Worst",
2412
- dtype="float64",
2413
- )
2506
+ return self._coerce_result(result=result, name="Worst")
2414
2507
 
2415
2508
  def z_score_func(
2416
2509
  self: Self,
2417
2510
  months_from_last: int | None = None,
2418
2511
  from_date: dt.date | None = None,
2419
2512
  to_date: dt.date | None = None,
2420
- ) -> float | Series[float]:
2513
+ ) -> SeriesOrFloat_co:
2421
2514
  """Z-score as (last return - mean return) / standard deviation of returns.
2422
2515
 
2423
2516
  https://www.investopedia.com/terms/z/zscore.asp.
@@ -2434,8 +2527,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2434
2527
 
2435
2528
  Returns:
2436
2529
  -------
2437
- float | Pandas.Series[float]
2438
- Z-score as (last return - mean return) / standard deviation of returns
2530
+ SeriesOrFloat_co
2531
+ Z-score as (last return - mean return) / standard deviation of returns.
2532
+ Returns float for OpenTimeSeries, Series[float] for OpenFrame.
2439
2533
 
2440
2534
  """
2441
2535
  earlier, later = self.calc_range(
@@ -2444,20 +2538,13 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2444
2538
  to_dt=to_date,
2445
2539
  )
2446
2540
  zscframe = (
2447
- self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2541
+ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
2448
2542
  .ffill()
2449
2543
  .pct_change()
2450
2544
  )
2451
2545
  result = (zscframe.iloc[-1] - zscframe.mean()) / zscframe.std()
2452
2546
 
2453
- if self.tsdf.shape[1] == 1:
2454
- return float(result.iloc[0])
2455
- return Series(
2456
- data=result,
2457
- index=self.tsdf.columns,
2458
- name="Z-score",
2459
- dtype="float64",
2460
- )
2547
+ return self._coerce_result(result=result, name="Z-score")
2461
2548
 
2462
2549
  def rolling_cvar_down(
2463
2550
  self: Self,
@@ -2601,7 +2688,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
2601
2688
 
2602
2689
  s = log(self.tsdf.iloc[:, column]).diff()
2603
2690
  volseries = s.rolling(window=observations, min_periods=observations).std(
2604
- ddof=dlta_degr_freedms
2691
+ ddof=dlta_degr_freedms,
2605
2692
  ) * sqrt(time_factor)
2606
2693
 
2607
2694
  voldf = volseries.dropna().to_frame()