openseries 1.7.8__py3-none-any.whl → 1.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
openseries/__init__.py CHANGED
@@ -10,6 +10,7 @@ from .datefixer import (
10
10
  )
11
11
  from .frame import OpenFrame
12
12
  from .load_plotly import load_plotly_dict
13
+ from .owntypes import Self, ValueType
13
14
  from .portfoliotools import (
14
15
  constrain_optimized_portfolios,
15
16
  efficient_frontier,
@@ -19,12 +20,12 @@ from .portfoliotools import (
19
20
  )
20
21
  from .series import OpenTimeSeries, timeseries_chain
21
22
  from .simulation import ReturnSimulation
22
- from .types import ValueType
23
23
 
24
24
  __all__ = [
25
25
  "OpenFrame",
26
26
  "OpenTimeSeries",
27
27
  "ReturnSimulation",
28
+ "Self",
28
29
  "ValueType",
29
30
  "constrain_optimized_portfolios",
30
31
  "date_fix",
@@ -1,5 +1,6 @@
1
1
  """Defining the _CommonModel class."""
2
2
 
3
+ # mypy: disable-error-code="no-any-return"
3
4
  from __future__ import annotations
4
5
 
5
6
  import datetime as dt
@@ -13,11 +14,32 @@ from typing import TYPE_CHECKING, Any, SupportsFloat, cast
13
14
 
14
15
  from numpy import float64, inf, isnan, log, maximum, sqrt
15
16
 
16
- if TYPE_CHECKING:
17
- from numpy.typing import NDArray # pragma: no cover
17
+ from .owntypes import (
18
+ DateAlignmentError,
19
+ InitialValueZeroError,
20
+ NumberOfItemsAndLabelsNotSameError,
21
+ Self,
22
+ )
23
+
24
+ if TYPE_CHECKING: # pragma: no cover
25
+ from numpy.typing import NDArray
26
+ from openpyxl.worksheet.worksheet import Worksheet
27
+
28
+ from .owntypes import (
29
+ CountriesType,
30
+ DaysInYearType,
31
+ LiteralBarPlotMode,
32
+ LiteralJsonOutput,
33
+ LiteralLinePlotMode,
34
+ LiteralNanMethod,
35
+ LiteralPandasReindexMethod,
36
+ LiteralPlotlyJSlib,
37
+ LiteralPlotlyOutput,
38
+ LiteralQuantileInterp,
39
+ ValueType,
40
+ )
18
41
  from openpyxl.utils.dataframe import dataframe_to_rows
19
42
  from openpyxl.workbook.workbook import Workbook
20
- from openpyxl.worksheet.worksheet import Worksheet
21
43
  from pandas import (
22
44
  DataFrame,
23
45
  DatetimeIndex,
@@ -37,7 +59,6 @@ from scipy.stats import ( # type: ignore[import-untyped,unused-ignore]
37
59
  norm,
38
60
  skew,
39
61
  )
40
- from typing_extensions import Self
41
62
 
42
63
  from ._risk import (
43
64
  _cvar_down_calc,
@@ -49,19 +70,6 @@ from .datefixer import (
49
70
  holiday_calendar,
50
71
  )
51
72
  from .load_plotly import load_plotly_dict
52
- from .types import (
53
- CountriesType,
54
- DaysInYearType,
55
- LiteralBarPlotMode,
56
- LiteralJsonOutput,
57
- LiteralLinePlotMode,
58
- LiteralNanMethod,
59
- LiteralPandasReindexMethod,
60
- LiteralPlotlyJSlib,
61
- LiteralPlotlyOutput,
62
- LiteralQuantileInterp,
63
- ValueType,
64
- )
65
73
 
66
74
 
67
75
  # noinspection PyTypeChecker
@@ -98,7 +106,7 @@ class _CommonModel(BaseModel):
98
106
  The first date in the timeseries
99
107
 
100
108
  """
101
- return cast(dt.date, self.tsdf.index[0])
109
+ return cast("dt.date", self.tsdf.index[0])
102
110
 
103
111
  @property
104
112
  def last_idx(self: Self) -> dt.date:
@@ -110,7 +118,7 @@ class _CommonModel(BaseModel):
110
118
  The last date in the timeseries
111
119
 
112
120
  """
113
- return cast(dt.date, self.tsdf.index[-1])
121
+ return cast("dt.date", self.tsdf.index[-1])
114
122
 
115
123
  @property
116
124
  def span_of_days(self: Self) -> int:
@@ -504,18 +512,18 @@ class _CommonModel(BaseModel):
504
512
  "Argument months_offset implies start"
505
513
  "date before first date in series."
506
514
  )
507
- raise ValueError(msg)
515
+ raise DateAlignmentError(msg)
508
516
  later = self.last_idx
509
517
  else:
510
518
  if from_dt is not None:
511
519
  if from_dt < self.first_idx:
512
520
  msg = "Given from_dt date < series start"
513
- raise ValueError(msg)
521
+ raise DateAlignmentError(msg)
514
522
  earlier = from_dt
515
523
  if to_dt is not None:
516
524
  if to_dt > self.last_idx:
517
525
  msg = "Given to_dt date > series end"
518
- raise ValueError(msg)
526
+ raise DateAlignmentError(msg)
519
527
  later = to_dt
520
528
  while earlier not in self.tsdf.index:
521
529
  earlier -= dt.timedelta(days=1)
@@ -552,8 +560,8 @@ class _CommonModel(BaseModel):
552
560
  d_range = [
553
561
  d.date()
554
562
  for d in date_range(
555
- start=cast(dt.date, self.tsdf.first_valid_index()),
556
- end=cast(dt.date, self.tsdf.last_valid_index()),
563
+ start=cast("dt.date", self.tsdf.first_valid_index()),
564
+ end=cast("dt.date", self.tsdf.last_valid_index()),
557
565
  freq=CustomBusinessDay(calendar=calendar),
558
566
  )
559
567
  ]
@@ -672,15 +680,15 @@ class _CommonModel(BaseModel):
672
680
  if what_output == "tsdf":
673
681
  values = self.tsdf.iloc[:, 0].tolist()
674
682
  else:
675
- values = list(cast(list[float], data.get("values")))
683
+ values = list(cast("list[float]", data.get("values")))
676
684
  for item in cleaner_list:
677
685
  data.pop(item)
678
- valuetype = cast(ValueType, data.get("valuetype")).value
686
+ valuetype = cast("ValueType", data.get("valuetype")).value
679
687
  data.update({"valuetype": valuetype})
680
688
  data.update({"values": values})
681
689
  output.append(dict(data))
682
690
  else:
683
- for serie in cast(list[Any], data.get("constituents")):
691
+ for serie in cast("list[Any]", data.get("constituents")):
684
692
  if what_output == "tsdf":
685
693
  values = serie.tsdf.iloc[:, 0].tolist()
686
694
  else:
@@ -688,7 +696,7 @@ class _CommonModel(BaseModel):
688
696
  itemdata = dict(serie.__dict__)
689
697
  for item in cleaner_list:
690
698
  itemdata.pop(item)
691
- valuetype = cast(ValueType, itemdata["valuetype"]).value
699
+ valuetype = cast("ValueType", itemdata["valuetype"]).value
692
700
  itemdata.update({"valuetype": valuetype})
693
701
  itemdata.update({"values": values})
694
702
  output.append(dict(itemdata))
@@ -742,10 +750,10 @@ class _CommonModel(BaseModel):
742
750
  wrksheet = wrkbook.active
743
751
 
744
752
  if sheet_title:
745
- cast(Worksheet, wrksheet).title = sheet_title
753
+ cast("Worksheet", wrksheet).title = sheet_title
746
754
 
747
755
  for row in dataframe_to_rows(df=self.tsdf, index=True, header=True):
748
- cast(Worksheet, wrksheet).append(row)
756
+ cast("Worksheet", wrksheet).append(row)
749
757
 
750
758
  if not overwrite and Path(sheetfile).exists():
751
759
  msg = f"{sheetfile!s} already exists."
@@ -803,7 +811,7 @@ class _CommonModel(BaseModel):
803
811
  if labels:
804
812
  if len(labels) != self.tsdf.shape[1]:
805
813
  msg = "Must provide same number of labels as items in frame."
806
- raise ValueError(msg)
814
+ raise NumberOfItemsAndLabelsNotSameError(msg)
807
815
  else:
808
816
  labels = list(self.tsdf.columns.get_level_values(0))
809
817
 
@@ -849,7 +857,7 @@ class _CommonModel(BaseModel):
849
857
  auto_open=auto_open,
850
858
  auto_play=False,
851
859
  link_text="",
852
- include_plotlyjs=cast(bool, include_plotlyjs),
860
+ include_plotlyjs=cast("bool", include_plotlyjs),
853
861
  config=fig["config"],
854
862
  output_type=output_type,
855
863
  )
@@ -860,14 +868,14 @@ class _CommonModel(BaseModel):
860
868
  fig=figure,
861
869
  config=fig["config"],
862
870
  auto_play=False,
863
- include_plotlyjs=cast(bool, include_plotlyjs),
871
+ include_plotlyjs=cast("bool", include_plotlyjs),
864
872
  full_html=False,
865
873
  div_id=div_id,
866
874
  )
867
875
 
868
876
  return figure, string_output
869
877
 
870
- def plot_series( # noqa: C901
878
+ def plot_series(
871
879
  self: Self,
872
880
  mode: LiteralLinePlotMode = "lines",
873
881
  tick_fmt: str | None = None,
@@ -918,7 +926,7 @@ class _CommonModel(BaseModel):
918
926
  if labels:
919
927
  if len(labels) != self.tsdf.shape[1]:
920
928
  msg = "Must provide same number of labels as items in frame."
921
- raise ValueError(msg)
929
+ raise NumberOfItemsAndLabelsNotSameError(msg)
922
930
  else:
923
931
  labels = list(self.tsdf.columns.get_level_values(0))
924
932
 
@@ -979,7 +987,7 @@ class _CommonModel(BaseModel):
979
987
  auto_open=auto_open,
980
988
  auto_play=False,
981
989
  link_text="",
982
- include_plotlyjs=cast(bool, include_plotlyjs),
990
+ include_plotlyjs=cast("bool", include_plotlyjs),
983
991
  config=fig["config"],
984
992
  output_type=output_type,
985
993
  )
@@ -990,7 +998,7 @@ class _CommonModel(BaseModel):
990
998
  fig=figure,
991
999
  config=fig["config"],
992
1000
  auto_play=False,
993
- include_plotlyjs=cast(bool, include_plotlyjs),
1001
+ include_plotlyjs=cast("bool", include_plotlyjs),
994
1002
  full_html=False,
995
1003
  div_id=div_id,
996
1004
  )
@@ -1035,13 +1043,13 @@ class _CommonModel(BaseModel):
1035
1043
  else:
1036
1044
  fraction = (later - earlier).days / 365.25
1037
1045
  how_many = self.tsdf.loc[
1038
- cast(int, earlier) : cast(int, later),
1046
+ cast("int", earlier) : cast("int", later),
1039
1047
  self.tsdf.columns.to_numpy()[0],
1040
1048
  ].count()
1041
1049
  time_factor = how_many / fraction
1042
1050
 
1043
1051
  result = (
1044
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1052
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1045
1053
  .ffill()
1046
1054
  .pct_change()
1047
1055
  .mean()
@@ -1097,15 +1105,17 @@ class _CommonModel(BaseModel):
1097
1105
  else:
1098
1106
  fraction = (later - earlier).days / 365.25
1099
1107
  how_many = (
1100
- self.tsdf.loc[cast(int, earlier) : cast(int, later)].count().iloc[0]
1108
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1109
+ .count()
1110
+ .iloc[0]
1101
1111
  )
1102
1112
  time_factor = how_many / fraction
1103
1113
 
1104
- data = self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1114
+ data = self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1105
1115
  result = data.ffill().pct_change().std().mul(sqrt(time_factor))
1106
1116
 
1107
1117
  if self.tsdf.shape[1] == 1:
1108
- return float(cast(SupportsFloat, result.iloc[0]))
1118
+ return float(cast("SupportsFloat", result.iloc[0]))
1109
1119
  return Series(
1110
1120
  data=result,
1111
1121
  index=self.tsdf.columns,
@@ -1292,21 +1302,23 @@ class _CommonModel(BaseModel):
1292
1302
  else:
1293
1303
  fraction = (later - earlier).days / 365.25
1294
1304
  how_many = (
1295
- self.tsdf.loc[cast(int, earlier) : cast(int, later)].count().iloc[0]
1305
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1306
+ .count()
1307
+ .iloc[0]
1296
1308
  )
1297
1309
  time_factor = how_many / fraction
1298
1310
  if drift_adjust:
1299
1311
  imp_vol = (-sqrt(time_factor) / norm.ppf(level)) * (
1300
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1312
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1301
1313
  .ffill()
1302
1314
  .pct_change()
1303
1315
  .quantile(1 - level, interpolation=interpolation)
1304
- - self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1316
+ - self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1305
1317
  .ffill()
1306
1318
  .pct_change()
1307
1319
  .sum()
1308
1320
  / len(
1309
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1321
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1310
1322
  .ffill()
1311
1323
  .pct_change(),
1312
1324
  )
@@ -1314,7 +1326,7 @@ class _CommonModel(BaseModel):
1314
1326
  else:
1315
1327
  imp_vol = (
1316
1328
  -sqrt(time_factor)
1317
- * self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1329
+ * self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1318
1330
  .ffill()
1319
1331
  .pct_change()
1320
1332
  .quantile(1 - level, interpolation=interpolation)
@@ -1334,7 +1346,7 @@ class _CommonModel(BaseModel):
1334
1346
  label = f"Imp vol from VaR {level:.0%}"
1335
1347
 
1336
1348
  if self.tsdf.shape[1] == 1:
1337
- return float(cast(SupportsFloat, result.iloc[0]))
1349
+ return float(cast("SupportsFloat", result.iloc[0]))
1338
1350
  return Series(
1339
1351
  data=result,
1340
1352
  index=self.tsdf.columns,
@@ -1376,22 +1388,22 @@ class _CommonModel(BaseModel):
1376
1388
  from_dt=from_date,
1377
1389
  to_dt=to_date,
1378
1390
  )
1379
- cvar_df = self.tsdf.loc[cast(int, earlier) : cast(int, later)].copy(deep=True)
1391
+ cvar_df = self.tsdf.loc[cast("int", earlier) : cast("int", later)].copy(
1392
+ deep=True
1393
+ )
1380
1394
  result = [
1381
- cvar_df.loc[:, x] # type: ignore[call-overload,index]
1395
+ cvar_df.loc[:, x] # type: ignore[call-overload,index,unused-ignore]
1382
1396
  .ffill()
1383
1397
  .pct_change()
1384
1398
  .sort_values()
1385
1399
  .iloc[
1386
- : int(
1387
- ceil(
1388
- (1 - level)
1389
- * cvar_df.loc[:, x] # type: ignore[index]
1390
- .ffill()
1391
- .pct_change()
1392
- .count(),
1393
- ),
1394
- )
1400
+ : ceil(
1401
+ (1 - level)
1402
+ * cvar_df.loc[:, x] # type: ignore[index,unused-ignore]
1403
+ .ffill()
1404
+ .pct_change()
1405
+ .count(),
1406
+ ),
1395
1407
  ]
1396
1408
  .mean()
1397
1409
  for x in self.tsdf
@@ -1447,7 +1459,7 @@ class _CommonModel(BaseModel):
1447
1459
  to_dt=to_date,
1448
1460
  )
1449
1461
  how_many = (
1450
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1462
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1451
1463
  .ffill()
1452
1464
  .pct_change()
1453
1465
  .count(numeric_only=True)
@@ -1463,7 +1475,7 @@ class _CommonModel(BaseModel):
1463
1475
  time_factor = how_many.div(fraction)
1464
1476
 
1465
1477
  dddf = (
1466
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1478
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1467
1479
  .ffill()
1468
1480
  .pct_change()
1469
1481
  .sub(min_accepted_return / time_factor)
@@ -1516,13 +1528,18 @@ class _CommonModel(BaseModel):
1516
1528
  )
1517
1529
  fraction = (later - earlier).days / 365.25
1518
1530
 
1519
- any_below_zero = any(self.tsdf.loc[[earlier, later]].lt(0.0).any().to_numpy())
1531
+ any_below_zero = any(
1532
+ self.tsdf.loc[[earlier, later]] # type: ignore[index,unused-ignore]
1533
+ .lt(0.0)
1534
+ .any()
1535
+ .to_numpy()
1536
+ )
1520
1537
  if zero in self.tsdf.loc[earlier].to_numpy() or any_below_zero:
1521
1538
  msg = (
1522
1539
  "Geometric return cannot be calculated due to "
1523
1540
  "an initial value being zero or a negative value."
1524
1541
  )
1525
- raise ValueError(msg)
1542
+ raise InitialValueZeroError(msg)
1526
1543
 
1527
1544
  result = (self.tsdf.loc[later] / self.tsdf.loc[earlier]) ** (1 / fraction) - 1
1528
1545
 
@@ -1567,7 +1584,7 @@ class _CommonModel(BaseModel):
1567
1584
  to_dt=to_date,
1568
1585
  )
1569
1586
  result: NDArray[float64] = skew(
1570
- a=self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1587
+ a=self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1571
1588
  .ffill()
1572
1589
  .pct_change()
1573
1590
  .to_numpy(),
@@ -1616,7 +1633,11 @@ class _CommonModel(BaseModel):
1616
1633
  to_dt=to_date,
1617
1634
  )
1618
1635
  result: NDArray[float64] = kurtosis(
1619
- self.tsdf.loc[cast(int, earlier) : cast(int, later)].ffill().pct_change(),
1636
+ a=(
1637
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1638
+ .ffill()
1639
+ .pct_change()
1640
+ ),
1620
1641
  fisher=True,
1621
1642
  bias=True,
1622
1643
  nan_policy="omit",
@@ -1666,8 +1687,8 @@ class _CommonModel(BaseModel):
1666
1687
  to_dt=to_date,
1667
1688
  )
1668
1689
  result = (
1669
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1670
- / self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1690
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1691
+ / self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1671
1692
  .expanding(min_periods=min_periods)
1672
1693
  .max()
1673
1694
  ).min() - 1
@@ -1711,10 +1732,10 @@ class _CommonModel(BaseModel):
1711
1732
  to_dt=to_date,
1712
1733
  )
1713
1734
  pos = (
1714
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1735
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1715
1736
  .ffill()
1716
1737
  .pct_change()[1:][
1717
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1738
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1718
1739
  .ffill()
1719
1740
  .pct_change()[1:]
1720
1741
  > zero
@@ -1722,7 +1743,7 @@ class _CommonModel(BaseModel):
1722
1743
  .count()
1723
1744
  )
1724
1745
  tot = (
1725
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
1746
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1726
1747
  .ffill()
1727
1748
  .pct_change()
1728
1749
  .count()
@@ -1791,7 +1812,7 @@ class _CommonModel(BaseModel):
1791
1812
  )
1792
1813
 
1793
1814
  if self.tsdf.shape[1] == 1:
1794
- return float(cast(float64, ratio.iloc[0]))
1815
+ return float(cast("float64", ratio.iloc[0]))
1795
1816
  return Series(
1796
1817
  data=ratio,
1797
1818
  index=self.tsdf.columns,
@@ -1857,7 +1878,7 @@ class _CommonModel(BaseModel):
1857
1878
  )
1858
1879
 
1859
1880
  if self.tsdf.shape[1] == 1:
1860
- return float(cast(float64, ratio.iloc[0]))
1881
+ return float(cast("float64", ratio.iloc[0]))
1861
1882
  return Series(
1862
1883
  data=ratio,
1863
1884
  index=self.tsdf.columns,
@@ -1903,14 +1924,16 @@ class _CommonModel(BaseModel):
1903
1924
  to_dt=to_date,
1904
1925
  )
1905
1926
  retdf = (
1906
- self.tsdf.loc[cast(int, earlier) : cast(int, later)].ffill().pct_change()
1927
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
1928
+ .ffill()
1929
+ .pct_change()
1907
1930
  )
1908
1931
  pos = retdf[retdf > min_accepted_return].sub(min_accepted_return).sum()
1909
1932
  neg = retdf[retdf < min_accepted_return].sub(min_accepted_return).sum()
1910
1933
  ratio = pos / -neg
1911
1934
 
1912
1935
  if self.tsdf.shape[1] == 1:
1913
- return float(cast(float64, ratio.iloc[0]))
1936
+ return float(cast("float64", ratio.iloc[0]))
1914
1937
  return Series(
1915
1938
  data=ratio,
1916
1939
  index=self.tsdf.columns,
@@ -1953,7 +1976,7 @@ class _CommonModel(BaseModel):
1953
1976
  "Simple return cannot be calculated due to "
1954
1977
  f"an initial value being zero. ({self.tsdf.head(3)})"
1955
1978
  )
1956
- raise ValueError(msg)
1979
+ raise InitialValueZeroError(msg)
1957
1980
 
1958
1981
  result = self.tsdf.loc[later] / self.tsdf.loc[earlier] - 1
1959
1982
 
@@ -2043,7 +2066,7 @@ class _CommonModel(BaseModel):
2043
2066
  to_dt=to_date,
2044
2067
  )
2045
2068
  result = (
2046
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
2069
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2047
2070
  .ffill()
2048
2071
  .pct_change()
2049
2072
  .quantile(1 - level, interpolation=interpolation)
@@ -2092,7 +2115,7 @@ class _CommonModel(BaseModel):
2092
2115
  to_dt=to_date,
2093
2116
  )
2094
2117
  result = (
2095
- self.tsdf.loc[cast(int, earlier) : cast(int, later)]
2118
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2096
2119
  .ffill()
2097
2120
  .pct_change()
2098
2121
  .rolling(observations, min_periods=observations)
@@ -2141,7 +2164,9 @@ class _CommonModel(BaseModel):
2141
2164
  to_dt=to_date,
2142
2165
  )
2143
2166
  zscframe = (
2144
- self.tsdf.loc[cast(int, earlier) : cast(int, later)].ffill().pct_change()
2167
+ self.tsdf.loc[cast("int", earlier) : cast("int", later)]
2168
+ .ffill()
2169
+ .pct_change()
2145
2170
  )
2146
2171
  result = (zscframe.iloc[-1] - zscframe.mean()) / zscframe.std()
2147
2172
 
@@ -2177,7 +2202,7 @@ class _CommonModel(BaseModel):
2177
2202
  Calculate rolling annualized downside CVaR
2178
2203
 
2179
2204
  """
2180
- cvar_label = cast(tuple[str], self.tsdf.iloc[:, column].name)[0]
2205
+ cvar_label = cast("tuple[str]", self.tsdf.iloc[:, column].name)[0]
2181
2206
  cvarseries = (
2182
2207
  self.tsdf.iloc[:, column]
2183
2208
  .rolling(observations, min_periods=observations)
@@ -2208,7 +2233,7 @@ class _CommonModel(BaseModel):
2208
2233
  Calculate rolling returns
2209
2234
 
2210
2235
  """
2211
- ret_label = cast(tuple[str], self.tsdf.iloc[:, column].name)[0]
2236
+ ret_label = cast("tuple[str]", self.tsdf.iloc[:, column].name)[0]
2212
2237
  retseries = (
2213
2238
  self.tsdf.iloc[:, column]
2214
2239
  .ffill()
@@ -2247,7 +2272,7 @@ class _CommonModel(BaseModel):
2247
2272
  Calculate rolling annualized downside Value At Risk "VaR"
2248
2273
 
2249
2274
  """
2250
- var_label = cast(tuple[str], self.tsdf.iloc[:, column].name)[0]
2275
+ var_label = cast("tuple[str]", self.tsdf.iloc[:, column].name)[0]
2251
2276
  varseries = (
2252
2277
  self.tsdf.iloc[:, column]
2253
2278
  .rolling(observations, min_periods=observations)
@@ -2288,7 +2313,7 @@ class _CommonModel(BaseModel):
2288
2313
  time_factor = float(periods_in_a_year_fixed)
2289
2314
  else:
2290
2315
  time_factor = self.periods_in_a_year
2291
- vol_label = cast(tuple[str, ValueType], self.tsdf.iloc[:, column].name)[0]
2316
+ vol_label = cast("tuple[str, ValueType]", self.tsdf.iloc[:, column].name)[0]
2292
2317
  dframe = self.tsdf.iloc[:, column].ffill().pct_change()
2293
2318
  volseries = dframe.rolling(
2294
2319
  observations,
openseries/_risk.py CHANGED
@@ -14,7 +14,7 @@ from numpy import (
14
14
  from pandas import DataFrame, Series
15
15
 
16
16
  if TYPE_CHECKING:
17
- from .types import LiteralQuantileInterp # pragma: no cover
17
+ from .owntypes import LiteralQuantileInterp # pragma: no cover
18
18
 
19
19
 
20
20
  def _cvar_down_calc(
@@ -44,7 +44,7 @@ def _cvar_down_calc(
44
44
  clean = nan_to_num(data)
45
45
  ret = clean[1:] / clean[:-1] - 1
46
46
  array = sort(ret)
47
- return cast(float, mean(array[: int(ceil(len(array) * (1 - level)))]))
47
+ return cast("float", mean(array[: ceil(len(array) * (1 - level))]))
48
48
 
49
49
 
50
50
  def _var_down_calc(
@@ -77,4 +77,4 @@ def _var_down_calc(
77
77
  else:
78
78
  clean = nan_to_num(data)
79
79
  ret = clean[1:] / clean[:-1] - 1
80
- return cast(float, quantile(ret, 1 - level, method=interpolation))
80
+ return cast("float", quantile(ret, 1 - level, method=interpolation))
openseries/datefixer.py CHANGED
@@ -21,8 +21,14 @@ from pandas import (
21
21
  )
22
22
  from pandas.tseries.offsets import CustomBusinessDay
23
23
 
24
+ from .owntypes import (
25
+ BothStartAndEndError,
26
+ CountriesNotStringNorListStrError,
27
+ TradingDaysNotAboveZeroError,
28
+ )
29
+
24
30
  if TYPE_CHECKING:
25
- from .types import ( # pragma: no cover
31
+ from .owntypes import ( # pragma: no cover
26
32
  CountriesType,
27
33
  DateType,
28
34
  HolidayType,
@@ -92,7 +98,7 @@ def holiday_calendar(
92
98
  "Argument countries must be a string country code or "
93
99
  "a list of string country codes according to ISO 3166-1 alpha-2."
94
100
  )
95
- raise ValueError(msg)
101
+ raise CountriesNotStringNorListStrError(msg)
96
102
 
97
103
  return busdaycalendar(holidays=hols)
98
104
 
@@ -114,7 +120,7 @@ def date_fix(
114
120
 
115
121
  """
116
122
  msg = f"Unknown date format {fixerdate!s} of type {type(fixerdate)!s} encountered"
117
- if isinstance(fixerdate, (Timestamp, dt.datetime)):
123
+ if isinstance(fixerdate, Timestamp | dt.datetime):
118
124
  return fixerdate.date()
119
125
  if isinstance(fixerdate, dt.date):
120
126
  return fixerdate
@@ -296,7 +302,7 @@ def offset_business_days(
296
302
 
297
303
  idx = where(array(local_bdays) == ddate)[0]
298
304
 
299
- return cast(dt.date, local_bdays[idx[0] + days])
305
+ return cast("dt.date", local_bdays[idx[0] + days])
300
306
 
301
307
 
302
308
  def generate_calendar_date_range(
@@ -326,7 +332,7 @@ def generate_calendar_date_range(
326
332
  """
327
333
  if trading_days < 1:
328
334
  msg = "Argument trading_days must be greater than zero."
329
- raise ValueError(msg)
335
+ raise TradingDaysNotAboveZeroError(msg)
330
336
 
331
337
  if start and not end:
332
338
  tmp_range = date_range(
@@ -368,7 +374,7 @@ def generate_calendar_date_range(
368
374
  "Provide one of start or end date, but not both. "
369
375
  "Date range is inferred from number of trading days."
370
376
  )
371
- raise ValueError(msg)
377
+ raise BothStartAndEndError(msg)
372
378
 
373
379
 
374
380
  def _do_resample_to_business_period_ends(