openseries 1.9.5__py3-none-any.whl → 1.9.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
openseries/owntypes.py CHANGED
@@ -12,14 +12,22 @@ from __future__ import annotations
12
12
  import datetime as dt
13
13
  from enum import Enum
14
14
  from pprint import pformat
15
- from typing import Annotated, ClassVar, Literal, Union
15
+ from typing import TYPE_CHECKING, Annotated, ClassVar, Literal, TypeAlias, TypeVar
16
16
 
17
+ from annotated_types import MinLen
17
18
  from numpy import datetime64
18
- from pandas import Timestamp
19
- from pydantic import BaseModel, Field, StringConstraints, conlist, conset
19
+ from pandas import Series, Timestamp
20
+ from pydantic import BaseModel, Field, StringConstraints
21
+
22
+ if TYPE_CHECKING:
23
+ from pandas import Series as _Series
24
+
25
+ SeriesFloat = _Series[float]
26
+ else:
27
+ SeriesFloat = Series
20
28
 
21
29
  try:
22
- from typing import Self # type: ignore[attr-defined,unused-ignore]
30
+ from typing import Self
23
31
  except ImportError: # pragma: no cover
24
32
  from typing_extensions import Self
25
33
 
@@ -27,6 +35,9 @@ except ImportError: # pragma: no cover
27
35
  __all__ = ["Self", "ValueType"]
28
36
 
29
37
 
38
+ SeriesOrFloat_co = TypeVar("SeriesOrFloat_co", float, SeriesFloat, covariant=True)
39
+
40
+
30
41
  CountryStringType = Annotated[
31
42
  str,
32
43
  StringConstraints(
@@ -38,14 +49,11 @@ CountryStringType = Annotated[
38
49
  strict=True,
39
50
  ),
40
51
  ]
41
- CountryListType = conset(
42
- item_type=CountryStringType,
43
- min_length=1,
44
- )
45
- CountriesType = Union[CountryListType, CountryStringType] # type: ignore[valid-type] # noqa: UP007
52
+ CountrySetType: TypeAlias = Annotated[set[CountryStringType], MinLen(1)]
53
+ CountriesType: TypeAlias = CountrySetType | CountryStringType
46
54
 
47
55
 
48
- class Countries(BaseModel): # type: ignore[misc]
56
+ class Countries(BaseModel):
49
57
  """Declare Countries."""
50
58
 
51
59
  countryinput: CountriesType
@@ -64,69 +72,62 @@ CurrencyStringType = Annotated[
64
72
  ]
65
73
 
66
74
 
67
- class Currency(BaseModel): # type: ignore[misc]
75
+ class Currency(BaseModel):
68
76
  """Declare Currency."""
69
77
 
70
78
  ccy: CurrencyStringType
71
79
 
72
80
 
73
- DateListType = Annotated[
74
- list[str],
75
- conset(
76
- Annotated[
77
- str,
78
- StringConstraints(
79
- pattern=r"^\d{4}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01])$",
80
- strip_whitespace=True,
81
- strict=True,
82
- min_length=10,
83
- max_length=10,
84
- ),
85
- ],
86
- min_length=1,
81
+ DateStringType = Annotated[
82
+ str,
83
+ StringConstraints(
84
+ pattern=r"^\d{4}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01])$",
85
+ strip_whitespace=True,
86
+ strict=True,
87
+ min_length=10,
88
+ max_length=10,
87
89
  ),
88
90
  ]
91
+ DateListType: TypeAlias = Annotated[list[DateStringType], MinLen(1)]
89
92
 
90
- ValueListType = Annotated[list[float], conlist(float, min_length=1)]
93
+ ValueListType: TypeAlias = Annotated[list[float], MinLen(1)]
91
94
 
92
95
  DaysInYearType = Annotated[int, Field(strict=True, ge=1, le=366)]
93
96
 
94
97
  DateType = str | dt.date | dt.datetime | datetime64 | Timestamp
95
98
 
96
- PlotlyLayoutType = dict[
97
- str,
99
+ PlotlyConfigType = (
98
100
  str
99
101
  | int
100
102
  | float
101
103
  | bool
102
104
  | list[str]
103
- | dict[str, str | int | float | bool | list[str]],
104
- ]
105
+ | dict[str, str | int | float | bool | list[str]]
106
+ )
107
+
108
+ PlotlyLayoutType = dict[str, PlotlyConfigType]
105
109
 
106
110
  CaptorLogoType = dict[str, str | float]
107
111
 
108
112
  LiteralJsonOutput = Literal["values", "tsdf"]
109
113
  LiteralTrunc = Literal["before", "after", "both"]
110
- LiteralLinePlotMode = Literal[
111
- "lines",
112
- "markers",
113
- "lines+markers",
114
- "lines+text",
115
- "markers+text",
116
- "lines+markers+text",
117
- None,
118
- ]
114
+ LiteralLinePlotMode = (
115
+ Literal[
116
+ "lines",
117
+ "markers",
118
+ "lines+markers",
119
+ "lines+text",
120
+ "markers+text",
121
+ "lines+markers+text",
122
+ ]
123
+ | None
124
+ )
119
125
  LiteralHowMerge = Literal["outer", "inner"]
120
126
  LiteralQuantileInterp = Literal["linear", "lower", "higher", "midpoint", "nearest"]
121
127
  LiteralBizDayFreq = Literal["B", "BME", "BQE", "BYE"]
122
- LiteralPandasReindexMethod = Literal[
123
- None,
124
- "pad",
125
- "ffill",
126
- "backfill",
127
- "bfill",
128
- "nearest",
129
- ]
128
+ LiteralPandasReindexMethod = (
129
+ Literal["pad", "ffill", "backfill", "bfill", "nearest"] | None
130
+ )
130
131
  LiteralNanMethod = Literal["fill", "drop"]
131
132
  LiteralCaptureRatio = Literal["up", "down", "both"]
132
133
  LiteralBarPlotMode = Literal["stack", "group", "overlay", "relative"]
@@ -16,13 +16,13 @@ from typing import TYPE_CHECKING, cast
16
16
  from numpy import (
17
17
  append,
18
18
  array,
19
+ einsum,
19
20
  float64,
20
21
  inf,
21
22
  isnan,
22
23
  linspace,
23
24
  nan,
24
25
  sqrt,
25
- zeros,
26
26
  )
27
27
  from numpy import (
28
28
  sum as npsum,
@@ -48,12 +48,9 @@ from .owntypes import (
48
48
  ValueType,
49
49
  )
50
50
  from .series import OpenTimeSeries
51
-
52
- # noinspection PyProtectedMember
53
51
  from .simulation import _random_generator
54
52
 
55
53
  if TYPE_CHECKING: # pragma: no cover
56
- # noinspection PyUnresolvedReferences
57
54
  from collections.abc import Callable
58
55
 
59
56
  from numpy.typing import NDArray
@@ -106,25 +103,16 @@ def simulate_portfolios(
106
103
 
107
104
  log_ret.columns = log_ret.columns.droplevel(level=1)
108
105
 
109
- randomizer = _random_generator(seed=seed)
110
-
111
- all_weights = zeros((num_ports, simframe.item_count))
112
- ret_arr = zeros(num_ports)
113
- vol_arr = zeros(num_ports)
114
- sharpe_arr = zeros(num_ports)
115
-
116
- for x in range(num_ports):
117
- weights = array(randomizer.random(simframe.item_count))
118
- weights = weights / npsum(weights)
119
- all_weights[x, :] = weights
106
+ cov_matrix = log_ret.cov() * simframe.periods_in_a_year
107
+ mean_returns = log_ret.mean() * simframe.periods_in_a_year
120
108
 
121
- vol_arr[x] = sqrt(
122
- weights.T @ (log_ret.cov() * simframe.periods_in_a_year @ weights),
123
- )
124
-
125
- ret_arr[x] = npsum(log_ret.mean() * weights * simframe.periods_in_a_year)
109
+ randomizer = _random_generator(seed=seed)
110
+ all_weights = randomizer.random((num_ports, simframe.item_count))
111
+ all_weights = all_weights / all_weights.sum(axis=1, keepdims=True)
126
112
 
127
- sharpe_arr[x] = ret_arr[x] / vol_arr[x]
113
+ ret_arr = all_weights @ mean_returns
114
+ vol_arr = sqrt(einsum("ij,jk,ik->i", all_weights, cov_matrix, all_weights))
115
+ sharpe_arr = ret_arr / vol_arr
128
116
 
129
117
  simdf = concat(
130
118
  [
@@ -137,7 +125,6 @@ def simulate_portfolios(
137
125
  return simdf.dropna()
138
126
 
139
127
 
140
- # noinspection PyUnusedLocal
141
128
  def efficient_frontier(
142
129
  eframe: OpenFrame,
143
130
  num_ports: int = 5000,
@@ -230,7 +217,7 @@ def efficient_frontier(
230
217
  _get_ret_vol_sr(
231
218
  lg_ret=log_ret,
232
219
  weights=weights,
233
- per_in_yr=eframe.periods_in_a_year,
220
+ per_in_yr=copi.periods_in_a_year,
234
221
  )[2]
235
222
  * -1,
236
223
  )
@@ -243,7 +230,7 @@ def efficient_frontier(
243
230
  _get_ret_vol_sr(
244
231
  lg_ret=log_ret,
245
232
  weights=weights,
246
- per_in_yr=eframe.periods_in_a_year,
233
+ per_in_yr=copi.periods_in_a_year,
247
234
  )[1],
248
235
  )
249
236
 
@@ -263,7 +250,7 @@ def efficient_frontier(
263
250
  optimal = _get_ret_vol_sr(
264
251
  lg_ret=log_ret,
265
252
  weights=opt_results.x,
266
- per_in_yr=eframe.periods_in_a_year,
253
+ per_in_yr=copi.periods_in_a_year,
267
254
  )
268
255
 
269
256
  frontier_y = linspace(start=frontier_min, stop=frontier_max, num=frontier_points)
@@ -280,7 +267,7 @@ def efficient_frontier(
280
267
  "fun": lambda w, poss_return=possible_return: _diff_return(
281
268
  lg_ret=log_ret,
282
269
  weights=w,
283
- per_in_yr=eframe.periods_in_a_year,
270
+ per_in_yr=copi.periods_in_a_year,
284
271
  poss_return=poss_return,
285
272
  ),
286
273
  },
@@ -366,7 +353,7 @@ def constrain_optimized_portfolios(
366
353
  if not bounds:
367
354
  bounds = tuple((0.0, 1.0) for _ in range(data.item_count))
368
355
 
369
- front_frame, sim_frame, optimal = efficient_frontier(
356
+ front_frame, _, _ = efficient_frontier(
370
357
  eframe=data,
371
358
  num_ports=simulations,
372
359
  frontier_points=curve_points,
@@ -375,7 +362,6 @@ def constrain_optimized_portfolios(
375
362
  )
376
363
 
377
364
  condition_least_ret = front_frame.ret > serie.arithmetic_ret
378
- # noinspection PyArgumentList
379
365
  least_ret_frame = front_frame[condition_least_ret].sort_values(by="stdev")
380
366
  least_ret_port: Series[float] = least_ret_frame.iloc[0]
381
367
  least_ret_port_name = f"Minimize vol & target return of {portfolioname}"
@@ -386,7 +372,6 @@ def constrain_optimized_portfolios(
386
372
  resleast = OpenTimeSeries.from_df(lr_frame.make_portfolio(least_ret_port_name))
387
373
 
388
374
  condition_most_vol = front_frame.stdev < serie.vol
389
- # noinspection PyArgumentList
390
375
  most_vol_frame = front_frame[condition_most_vol].sort_values(
391
376
  by="ret",
392
377
  ascending=False,
@@ -440,14 +425,13 @@ def prepare_plot_data(
440
425
  for wgt, nm in zip(optimized[3:], assets.columns_lvl_zero, strict=True)
441
426
  ]
442
427
  opt_text = "<br><br>Weights:<br>" + "<br>".join(opt_text_list)
443
- vol = cast("Series[float]", assets.vol)
444
428
  plotframe = DataFrame(
445
429
  data=[
446
430
  assets.arithmetic_ret,
447
- vol,
431
+ assets.vol,
448
432
  Series(
449
433
  data=[""] * assets.item_count,
450
- index=vol.index,
434
+ index=assets.vol.index,
451
435
  ),
452
436
  ],
453
437
  index=["ret", "stdev", "text"],
openseries/py.typed ADDED
File without changes
openseries/report.py CHANGED
@@ -75,7 +75,7 @@ def calendar_period_returns(
75
75
  cldr.index = Index([d.year for d in cldr.index])
76
76
  elif freq.upper() == "BQE":
77
77
  cldr.index = Index(
78
- [Timestamp(d).to_period("Q").strftime("Q%q %Y") for d in cldr.index]
78
+ [Timestamp(d).to_period("Q").strftime("Q%q %Y") for d in cldr.index],
79
79
  )
80
80
  else:
81
81
  cldr.index = Index([d.strftime("%b %y") for d in cldr.index])
@@ -264,9 +264,8 @@ def report_html(
264
264
  "{:.2f}",
265
265
  ]
266
266
 
267
- # noinspection PyTypeChecker
268
267
  rpt_df = copied.all_properties(
269
- properties=cast("list[LiteralFrameProps]", properties)
268
+ properties=cast("list[LiteralFrameProps]", properties),
270
269
  )
271
270
  alpha_frame = copied.from_deepcopy()
272
271
  alpha_frame.to_cumret()
@@ -282,7 +281,9 @@ def report_html(
282
281
  ]
283
282
  alphas.append("")
284
283
  ar = DataFrame(
285
- data=alphas, index=copied.tsdf.columns, columns=["Jensen's Alpha"]
284
+ data=alphas,
285
+ index=copied.tsdf.columns,
286
+ columns=["Jensen's Alpha"],
286
287
  ).T
287
288
  rpt_df = concat([rpt_df, ar])
288
289
  ir = copied.info_ratio_func()
@@ -294,7 +295,7 @@ def report_html(
294
295
  te_frame.resample("7D")
295
296
  with catch_warnings():
296
297
  simplefilter("ignore")
297
- te = te_frame.tracking_error_func()
298
+ te: Series[float] | Series[str] = te_frame.tracking_error_func()
298
299
  if te.hasnans:
299
300
  te = Series(
300
301
  data=[""] * te_frame.item_count,
@@ -318,7 +319,7 @@ def report_html(
318
319
  with catch_warnings():
319
320
  simplefilter("ignore")
320
321
  try:
321
- cru = crm.capture_ratio_func(ratio="both")
322
+ cru: Series[float] | Series[str] = crm.capture_ratio_func(ratio="both")
322
323
  except ZeroDivisionError as exc: # pragma: no cover
323
324
  msg = f"Capture ratio calculation error: {exc!s}" # pragma: no cover
324
325
  logger.warning(msg=msg) # pragma: no cover
@@ -351,21 +352,18 @@ def report_html(
351
352
 
352
353
  for item, f in zip(rpt_df.index, formats, strict=False):
353
354
  rpt_df.loc[item] = rpt_df.loc[item].apply(
354
- lambda x, fmt=f: x if (isinstance(x, str) or x is None) else fmt.format(x),
355
+ lambda x, fmt=f: str(x)
356
+ if (isinstance(x, str) or x is None)
357
+ else fmt.format(x),
355
358
  )
356
359
 
357
360
  rpt_df.index = Index(labels_init)
358
361
 
359
362
  this_year = copied.last_idx.year
360
363
  this_month = copied.last_idx.month
361
- ytd = cast("Series[float]", copied.value_ret_calendar_period(year=this_year)).map(
362
- "{:.2%}".format
363
- )
364
+ ytd = copied.value_ret_calendar_period(year=this_year).map("{:.2%}".format)
364
365
  ytd.name = "Year-to-Date"
365
- mtd = cast(
366
- "Series[float]",
367
- copied.value_ret_calendar_period(year=this_year, month=this_month),
368
- ).map(
366
+ mtd = copied.value_ret_calendar_period(year=this_year, month=this_month).map(
369
367
  "{:.2%}".format,
370
368
  )
371
369
  mtd.name = "Month-to-Date"
@@ -430,7 +428,8 @@ def report_html(
430
428
 
431
429
  figure.update_layout(fig.get("layout"))
432
430
  colorway: list[str] = cast("dict[str, list[str]]", fig["layout"]).get(
433
- "colorway", []
431
+ "colorway",
432
+ [],
434
433
  )
435
434
 
436
435
  if vertical_legend:
@@ -457,7 +456,7 @@ def report_html(
457
456
  figure.update_xaxes(gridcolor="#EEEEEE", automargin=True, tickangle=-45)
458
457
  figure.update_yaxes(tickformat=".2%", gridcolor="#EEEEEE", automargin=True)
459
458
 
460
- if isinstance(title, str):
459
+ if title:
461
460
  figure.update_layout(
462
461
  {"title": {"text": f"<b>{title}</b><br>", "font": {"size": 36}}},
463
462
  )
openseries/series.py CHANGED
@@ -15,12 +15,15 @@ from typing import TYPE_CHECKING, Any, TypeVar, cast
15
15
 
16
16
  if TYPE_CHECKING: # pragma: no cover
17
17
  import datetime as dt
18
- from collections.abc import Callable
18
+
19
+ from numpy.typing import NDArray
20
+ from pandas import Timestamp
19
21
 
20
22
  from numpy import (
21
23
  append,
22
24
  array,
23
25
  cumprod,
26
+ float64,
24
27
  insert,
25
28
  isnan,
26
29
  log,
@@ -37,7 +40,7 @@ from pandas import (
37
40
  )
38
41
  from pydantic import field_validator, model_validator
39
42
 
40
- from ._common_model import _CommonModel
43
+ from ._common_model import _calculate_time_factor, _CommonModel
41
44
  from .datefixer import _do_resample_to_business_period_ends, date_fix
42
45
  from .owntypes import (
43
46
  Countries,
@@ -59,9 +62,6 @@ from .owntypes import (
59
62
  ValueType,
60
63
  )
61
64
 
62
- FieldValidator = cast("Callable[..., Callable[..., Any]]", field_validator)
63
- ModelValidator = cast("Callable[..., Callable[..., Any]]", model_validator)
64
-
65
65
  logger = getLogger(__name__)
66
66
 
67
67
  __all__ = ["OpenTimeSeries", "timeseries_chain"]
@@ -69,8 +69,7 @@ __all__ = ["OpenTimeSeries", "timeseries_chain"]
69
69
  TypeOpenTimeSeries = TypeVar("TypeOpenTimeSeries", bound="OpenTimeSeries")
70
70
 
71
71
 
72
- # noinspection PyUnresolvedReferences,PyNestedDecorators
73
- class OpenTimeSeries(_CommonModel):
72
+ class OpenTimeSeries(_CommonModel[float]):
74
73
  """OpenTimeSeries objects are at the core of the openseries package.
75
74
 
76
75
  The intended use is to allow analyses of financial timeseries.
@@ -126,24 +125,25 @@ class OpenTimeSeries(_CommonModel):
126
125
  isin: str | None = None
127
126
  label: str | None = None
128
127
 
129
- @FieldValidator("domestic", mode="before")
128
+ @field_validator("domestic", mode="before")
130
129
  @classmethod
131
130
  def _validate_domestic(cls, value: CurrencyStringType) -> CurrencyStringType:
132
131
  """Pydantic validator to ensure domestic field is validated."""
133
132
  _ = Currency(ccy=value)
134
133
  return value
135
134
 
136
- @FieldValidator("countries", mode="before")
135
+ @field_validator("countries", mode="before")
137
136
  @classmethod
138
137
  def _validate_countries(cls, value: CountriesType) -> CountriesType:
139
138
  """Pydantic validator to ensure countries field is validated."""
140
139
  _ = Countries(countryinput=value)
141
140
  return value
142
141
 
143
- @FieldValidator("markets", mode="before")
142
+ @field_validator("markets", mode="before")
144
143
  @classmethod
145
144
  def _validate_markets(
146
- cls, value: list[str] | str | None
145
+ cls,
146
+ value: list[str] | str | None,
147
147
  ) -> list[str] | str | None:
148
148
  """Pydantic validator to ensure markets field is validated."""
149
149
  msg = (
@@ -159,7 +159,7 @@ class OpenTimeSeries(_CommonModel):
159
159
  raise MarketsNotStringNorListStrError(item_msg)
160
160
  raise MarketsNotStringNorListStrError(msg)
161
161
 
162
- @ModelValidator(mode="after")
162
+ @model_validator(mode="after")
163
163
  def _dates_and_values_validate(self: Self) -> Self:
164
164
  """Pydantic validator to ensure dates and values are validated."""
165
165
  values_list_length = len(self.values)
@@ -168,9 +168,6 @@ class OpenTimeSeries(_CommonModel):
168
168
  if dates_list_length != dates_set_length:
169
169
  msg = "Dates are not unique"
170
170
  raise ValueError(msg)
171
- if values_list_length < 1:
172
- msg = "There must be at least 1 value"
173
- raise ValueError(msg)
174
171
  if (
175
172
  (dates_list_length != values_list_length)
176
173
  or (len(self.tsdf.index) != self.tsdf.shape[0])
@@ -280,7 +277,7 @@ class OpenTimeSeries(_CommonModel):
280
277
  label, _ = dframe.name
281
278
  else:
282
279
  label = dframe.name
283
- values = cast("list[float]", dframe.to_numpy().tolist())
280
+ values = dframe.to_numpy().tolist()
284
281
  elif isinstance(dframe, DataFrame):
285
282
  values = dframe.iloc[:, column_nmbr].to_list()
286
283
  if isinstance(dframe.columns, MultiIndex):
@@ -299,12 +296,11 @@ class OpenTimeSeries(_CommonModel):
299
296
  msg = f"valuetype missing. Adding: {valuetype.value}"
300
297
  logger.warning(msg=msg)
301
298
  else:
302
- valuetype = cast(
303
- "ValueType",
304
- dframe.columns.get_level_values(1).to_numpy()[column_nmbr],
305
- )
299
+ valuetype = dframe.columns.get_level_values(1).to_numpy()[
300
+ column_nmbr
301
+ ]
306
302
  else:
307
- label = cast("MultiIndex", dframe.columns).to_numpy()[column_nmbr]
303
+ label = dframe.columns.to_numpy()[column_nmbr]
308
304
  else:
309
305
  raise TypeError(msg)
310
306
 
@@ -475,7 +471,7 @@ class OpenTimeSeries(_CommonModel):
475
471
  self.valuetype = ValueType.RTRN
476
472
  arrays = [[self.label], [self.valuetype]]
477
473
  returns.columns = MultiIndex.from_arrays(
478
- arrays=arrays # type: ignore[arg-type]
474
+ arrays=arrays, # type: ignore[arg-type]
479
475
  )
480
476
  self.tsdf = returns.copy()
481
477
  return self
@@ -550,11 +546,16 @@ class OpenTimeSeries(_CommonModel):
550
546
  An OpenTimeSeries object
551
547
 
552
548
  """
553
- arr = array(self.values) / divider
549
+ arr: NDArray[float64] = array(self.values) / divider
554
550
 
555
551
  deltas = array([i.days for i in self.tsdf.index[1:] - self.tsdf.index[:-1]])
556
- arr = cumprod(
557
- a=insert(arr=1.0 + deltas * arr[:-1] / days_in_year, obj=0, values=1.0)
552
+ arr = cast(
553
+ "NDArray[float64]",
554
+ cumprod(
555
+ a=insert(
556
+ arr=1.0 + deltas * arr[:-1] / days_in_year, obj=0, values=1.0
557
+ ),
558
+ ),
558
559
  )
559
560
 
560
561
  self.dates = [d.strftime("%Y-%m-%d") for d in self.tsdf.index]
@@ -672,32 +673,35 @@ class OpenTimeSeries(_CommonModel):
672
673
 
673
674
  """
674
675
  earlier, later = self.calc_range(
675
- months_offset=months_from_last, from_dt=from_date, to_dt=to_date
676
+ months_offset=months_from_last,
677
+ from_dt=from_date,
678
+ to_dt=to_date,
679
+ )
680
+ time_factor = _calculate_time_factor(
681
+ data=self.tsdf.loc[
682
+ cast("Timestamp", earlier) : cast("Timestamp", later)
683
+ ].iloc[:, 0],
684
+ earlier=earlier,
685
+ later=later,
686
+ periods_in_a_year_fixed=periods_in_a_year_fixed,
676
687
  )
677
- if periods_in_a_year_fixed:
678
- time_factor = float(periods_in_a_year_fixed)
679
- else:
680
- how_many = self.tsdf.loc[
681
- cast("int", earlier) : cast("int", later),
682
- self.tsdf.columns.to_numpy()[0],
683
- ].count()
684
- fraction = (later - earlier).days / 365.25
685
- time_factor = cast("int", how_many) / fraction
686
688
 
687
- data = self.tsdf.loc[cast("int", earlier) : cast("int", later)].copy()
689
+ data = self.tsdf.loc[
690
+ cast("Timestamp", earlier) : cast("Timestamp", later)
691
+ ].copy()
688
692
 
689
693
  data[self.label, ValueType.RTRN] = log(
690
- data.loc[:, self.tsdf.columns.to_numpy()[0]]
694
+ data.loc[:, self.tsdf.columns.to_numpy()[0]],
691
695
  ).diff()
692
696
 
693
697
  rawdata = [
694
- data.loc[:, cast("int", (self.label, ValueType.RTRN))]
698
+ data[(self.label, ValueType.RTRN)]
695
699
  .iloc[1:day_chunk]
696
700
  .std(ddof=dlta_degr_freedms)
697
701
  * sqrt(time_factor),
698
702
  ]
699
703
 
700
- for item in data.loc[:, cast("int", (self.label, ValueType.RTRN))].iloc[1:]:
704
+ for item in data[(self.label, ValueType.RTRN)].iloc[1:]:
701
705
  prev = rawdata[-1]
702
706
  rawdata.append(
703
707
  sqrt(
@@ -744,7 +748,6 @@ class OpenTimeSeries(_CommonModel):
744
748
  ra_df = ra_df.dropna()
745
749
 
746
750
  prev = self.first_idx
747
- # noinspection PyTypeChecker
748
751
  dates: list[dt.date] = [prev]
749
752
 
750
753
  for idx, row in ra_df.iterrows():