openseries 1.9.5__py3-none-any.whl → 1.9.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openseries/_common_model.py +371 -442
- openseries/datefixer.py +4 -2
- openseries/frame.py +103 -113
- openseries/owntypes.py +48 -47
- openseries/portfoliotools.py +3 -4
- openseries/report.py +12 -14
- openseries/series.py +33 -19
- openseries/simulation.py +1 -1
- {openseries-1.9.5.dist-info → openseries-1.9.6.dist-info}/METADATA +4 -3
- openseries-1.9.6.dist-info/RECORD +17 -0
- {openseries-1.9.5.dist-info → openseries-1.9.6.dist-info}/WHEEL +1 -1
- openseries-1.9.5.dist-info/RECORD +0 -17
- {openseries-1.9.5.dist-info → openseries-1.9.6.dist-info/licenses}/LICENSE.md +0 -0
openseries/_common_model.py
CHANGED
@@ -16,22 +16,25 @@ from math import ceil
|
|
16
16
|
from pathlib import Path
|
17
17
|
from secrets import choice
|
18
18
|
from string import ascii_letters
|
19
|
-
from typing import TYPE_CHECKING, Any,
|
19
|
+
from typing import TYPE_CHECKING, Any, Generic, Literal, cast
|
20
20
|
|
21
|
-
from numpy import float64, inf, isnan, log, maximum, sqrt
|
21
|
+
from numpy import asarray, float64, inf, isnan, log, maximum, sqrt
|
22
22
|
|
23
23
|
from .owntypes import (
|
24
|
+
CaptorLogoType,
|
25
|
+
Combo_co,
|
24
26
|
DateAlignmentError,
|
25
27
|
InitialValueZeroError,
|
26
28
|
NumberOfItemsAndLabelsNotSameError,
|
29
|
+
PlotlyConfigType,
|
27
30
|
ResampleDataLossError,
|
28
31
|
Self,
|
29
32
|
ValueType,
|
30
33
|
)
|
31
34
|
|
32
35
|
if TYPE_CHECKING: # pragma: no cover
|
33
|
-
from numpy.typing import NDArray
|
34
36
|
from openpyxl.worksheet.worksheet import Worksheet
|
37
|
+
from pandas import Timestamp
|
35
38
|
|
36
39
|
from .owntypes import (
|
37
40
|
CountriesType,
|
@@ -85,7 +88,7 @@ from .load_plotly import load_plotly_dict
|
|
85
88
|
|
86
89
|
|
87
90
|
# noinspection PyTypeChecker
|
88
|
-
class _CommonModel(BaseModel
|
91
|
+
class _CommonModel(BaseModel, Generic[Combo_co]):
|
89
92
|
"""Declare _CommonModel."""
|
90
93
|
|
91
94
|
tsdf: DataFrame = DataFrame(dtype="float64")
|
@@ -96,6 +99,20 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
96
99
|
revalidate_instances="always",
|
97
100
|
)
|
98
101
|
|
102
|
+
def _coerce_result(self: Self, result: Series[float], name: str) -> Combo_co:
|
103
|
+
if self.tsdf.shape[1] == 1:
|
104
|
+
arr = float(asarray(a=result, dtype=float64).squeeze())
|
105
|
+
return cast("Combo_co", arr) # type: ignore[redundant-cast]
|
106
|
+
return cast(
|
107
|
+
"Combo_co",
|
108
|
+
Series(
|
109
|
+
data=result,
|
110
|
+
index=self.tsdf.columns,
|
111
|
+
name=name,
|
112
|
+
dtype="float64",
|
113
|
+
),
|
114
|
+
)
|
115
|
+
|
99
116
|
@property
|
100
117
|
def length(self: Self) -> int:
|
101
118
|
"""Number of observations.
|
@@ -170,17 +187,17 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
170
187
|
return self.length / self.yearfrac
|
171
188
|
|
172
189
|
@property
|
173
|
-
def max_drawdown_cal_year(self: Self) ->
|
190
|
+
def max_drawdown_cal_year(self: Self) -> Combo_co:
|
174
191
|
"""https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp.
|
175
192
|
|
176
193
|
Returns:
|
177
194
|
-------
|
178
|
-
|
195
|
+
Combo_co
|
179
196
|
Maximum drawdown in a single calendar year.
|
180
197
|
|
181
198
|
"""
|
182
199
|
years = Index(d.year for d in self.tsdf.index)
|
183
|
-
|
200
|
+
result = (
|
184
201
|
self.tsdf.groupby(years)
|
185
202
|
.apply(
|
186
203
|
lambda prices: (prices / prices.expanding(min_periods=1).max()).min()
|
@@ -188,53 +205,46 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
188
205
|
)
|
189
206
|
.min()
|
190
207
|
)
|
191
|
-
|
192
|
-
return float(mddc.iloc[0])
|
193
|
-
return Series(
|
194
|
-
data=mddc,
|
195
|
-
index=self.tsdf.columns,
|
196
|
-
name="Max drawdown in cal yr",
|
197
|
-
dtype="float64",
|
198
|
-
)
|
208
|
+
return self._coerce_result(result=result, name="Max drawdown in cal yr")
|
199
209
|
|
200
210
|
@property
|
201
|
-
def geo_ret(self: Self) ->
|
211
|
+
def geo_ret(self: Self) -> Combo_co:
|
202
212
|
"""https://www.investopedia.com/terms/c/cagr.asp.
|
203
213
|
|
204
214
|
Returns:
|
205
215
|
-------
|
206
|
-
|
216
|
+
Combo_co
|
207
217
|
Compounded Annual Growth Rate (CAGR)
|
208
218
|
|
209
219
|
"""
|
210
220
|
return self.geo_ret_func()
|
211
221
|
|
212
222
|
@property
|
213
|
-
def arithmetic_ret(self: Self) ->
|
223
|
+
def arithmetic_ret(self: Self) -> Combo_co:
|
214
224
|
"""https://www.investopedia.com/terms/a/arithmeticmean.asp.
|
215
225
|
|
216
226
|
Returns:
|
217
227
|
-------
|
218
|
-
|
228
|
+
Combo_co
|
219
229
|
Annualized arithmetic mean of returns
|
220
230
|
|
221
231
|
"""
|
222
232
|
return self.arithmetic_ret_func()
|
223
233
|
|
224
234
|
@property
|
225
|
-
def value_ret(self: Self) ->
|
235
|
+
def value_ret(self: Self) -> Combo_co:
|
226
236
|
"""Simple return.
|
227
237
|
|
228
238
|
Returns:
|
229
239
|
-------
|
230
|
-
|
240
|
+
Combo_co
|
231
241
|
Simple return
|
232
242
|
|
233
243
|
"""
|
234
244
|
return self.value_ret_func()
|
235
245
|
|
236
246
|
@property
|
237
|
-
def vol(self: Self) ->
|
247
|
+
def vol(self: Self) -> Combo_co:
|
238
248
|
"""Annualized volatility.
|
239
249
|
|
240
250
|
Based on Pandas .std() which is the equivalent of stdev.s([...]) in MS Excel.
|
@@ -242,14 +252,14 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
242
252
|
|
243
253
|
Returns:
|
244
254
|
-------
|
245
|
-
|
255
|
+
Combo_co
|
246
256
|
Annualized volatility
|
247
257
|
|
248
258
|
"""
|
249
259
|
return self.vol_func()
|
250
260
|
|
251
261
|
@property
|
252
|
-
def downside_deviation(self: Self) ->
|
262
|
+
def downside_deviation(self: Self) -> Combo_co:
|
253
263
|
"""Downside Deviation.
|
254
264
|
|
255
265
|
Standard deviation of returns that are below a Minimum Accepted Return
|
@@ -258,23 +268,24 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
258
268
|
|
259
269
|
Returns:
|
260
270
|
-------
|
261
|
-
|
271
|
+
Combo_co
|
262
272
|
Downside deviation
|
263
273
|
|
264
274
|
"""
|
265
275
|
min_accepted_return: float = 0.0
|
266
276
|
order: Literal[2, 3] = 2
|
267
277
|
return self.lower_partial_moment_func(
|
268
|
-
min_accepted_return=min_accepted_return,
|
278
|
+
min_accepted_return=min_accepted_return,
|
279
|
+
order=order,
|
269
280
|
)
|
270
281
|
|
271
282
|
@property
|
272
|
-
def ret_vol_ratio(self: Self) ->
|
283
|
+
def ret_vol_ratio(self: Self) -> Combo_co:
|
273
284
|
"""Ratio of annualized arithmetic mean of returns and annualized volatility.
|
274
285
|
|
275
286
|
Returns:
|
276
287
|
-------
|
277
|
-
|
288
|
+
Combo_co
|
278
289
|
Ratio of the annualized arithmetic mean of returns and annualized
|
279
290
|
volatility.
|
280
291
|
|
@@ -283,12 +294,12 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
283
294
|
return self.ret_vol_ratio_func(riskfree_rate=riskfree_rate)
|
284
295
|
|
285
296
|
@property
|
286
|
-
def sortino_ratio(self: Self) ->
|
297
|
+
def sortino_ratio(self: Self) -> Combo_co:
|
287
298
|
"""https://www.investopedia.com/terms/s/sortinoratio.asp.
|
288
299
|
|
289
300
|
Returns:
|
290
301
|
-------
|
291
|
-
|
302
|
+
Combo_co
|
292
303
|
Sortino ratio calculated as the annualized arithmetic mean of returns
|
293
304
|
/ downside deviation. The ratio implies that the riskfree asset has zero
|
294
305
|
volatility, and a minimum acceptable return of zero.
|
@@ -302,7 +313,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
302
313
|
)
|
303
314
|
|
304
315
|
@property
|
305
|
-
def kappa3_ratio(self: Self) ->
|
316
|
+
def kappa3_ratio(self: Self) -> Combo_co:
|
306
317
|
"""Kappa-3 ratio.
|
307
318
|
|
308
319
|
The Kappa-3 ratio is a generalized downside-risk ratio defined as
|
@@ -313,7 +324,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
313
324
|
|
314
325
|
Returns:
|
315
326
|
-------
|
316
|
-
|
327
|
+
Combo_co
|
317
328
|
Kappa-3 ratio calculation with the riskfree rate and
|
318
329
|
Minimum Acceptable Return (MAR) both set to zero.
|
319
330
|
|
@@ -328,12 +339,12 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
328
339
|
)
|
329
340
|
|
330
341
|
@property
|
331
|
-
def omega_ratio(self: Self) ->
|
342
|
+
def omega_ratio(self: Self) -> Combo_co:
|
332
343
|
"""https://en.wikipedia.org/wiki/Omega_ratio.
|
333
344
|
|
334
345
|
Returns:
|
335
346
|
-------
|
336
|
-
|
347
|
+
Combo_co
|
337
348
|
Omega ratio calculation
|
338
349
|
|
339
350
|
"""
|
@@ -341,24 +352,24 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
341
352
|
return self.omega_ratio_func(min_accepted_return=minimum_accepted_return)
|
342
353
|
|
343
354
|
@property
|
344
|
-
def z_score(self: Self) ->
|
355
|
+
def z_score(self: Self) -> Combo_co:
|
345
356
|
"""https://www.investopedia.com/terms/z/zscore.asp.
|
346
357
|
|
347
358
|
Returns:
|
348
359
|
-------
|
349
|
-
|
360
|
+
Combo_co
|
350
361
|
Z-score as (last return - mean return) / standard deviation of returns.
|
351
362
|
|
352
363
|
"""
|
353
364
|
return self.z_score_func()
|
354
365
|
|
355
366
|
@property
|
356
|
-
def max_drawdown(self: Self) ->
|
367
|
+
def max_drawdown(self: Self) -> Combo_co:
|
357
368
|
"""https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp.
|
358
369
|
|
359
370
|
Returns:
|
360
371
|
-------
|
361
|
-
|
372
|
+
Combo_co
|
362
373
|
Maximum drawdown without any limit on date range
|
363
374
|
|
364
375
|
"""
|
@@ -390,12 +401,12 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
390
401
|
).dt.date
|
391
402
|
|
392
403
|
@property
|
393
|
-
def worst(self: Self) ->
|
404
|
+
def worst(self: Self) -> Combo_co:
|
394
405
|
"""Most negative percentage change.
|
395
406
|
|
396
407
|
Returns:
|
397
408
|
-------
|
398
|
-
|
409
|
+
Combo_co
|
399
410
|
Most negative percentage change
|
400
411
|
|
401
412
|
"""
|
@@ -403,7 +414,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
403
414
|
return self.worst_func(observations=observations)
|
404
415
|
|
405
416
|
@property
|
406
|
-
def worst_month(self: Self) ->
|
417
|
+
def worst_month(self: Self) -> Combo_co:
|
407
418
|
"""Most negative month.
|
408
419
|
|
409
420
|
Returns:
|
@@ -418,8 +429,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
418
429
|
countries = self.countries
|
419
430
|
markets = self.markets
|
420
431
|
except AttributeError:
|
421
|
-
countries = self.constituents[0].countries
|
422
|
-
markets = self.constituents[0].markets
|
432
|
+
countries = self.constituents[0].countries # type: ignore[attr-defined]
|
433
|
+
markets = self.constituents[0].markets # type: ignore[attr-defined]
|
423
434
|
|
424
435
|
wmdf = self.tsdf.copy()
|
425
436
|
|
@@ -444,58 +455,51 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
444
455
|
|
445
456
|
result = wmdf.ffill().pct_change().min()
|
446
457
|
|
447
|
-
|
448
|
-
return float(result.iloc[0])
|
449
|
-
return Series(
|
450
|
-
data=result,
|
451
|
-
index=self.tsdf.columns,
|
452
|
-
name="Worst month",
|
453
|
-
dtype="float64",
|
454
|
-
)
|
458
|
+
return self._coerce_result(result=result, name="Worst month")
|
455
459
|
|
456
460
|
@property
|
457
|
-
def positive_share(self: Self) ->
|
461
|
+
def positive_share(self: Self) -> Combo_co:
|
458
462
|
"""The share of percentage changes that are greater than zero.
|
459
463
|
|
460
464
|
Returns:
|
461
465
|
-------
|
462
|
-
|
466
|
+
Combo_co
|
463
467
|
The share of percentage changes that are greater than zero
|
464
468
|
|
465
469
|
"""
|
466
470
|
return self.positive_share_func()
|
467
471
|
|
468
472
|
@property
|
469
|
-
def skew(self: Self) ->
|
473
|
+
def skew(self: Self) -> Combo_co:
|
470
474
|
"""https://www.investopedia.com/terms/s/skewness.asp.
|
471
475
|
|
472
476
|
Returns:
|
473
477
|
-------
|
474
|
-
|
478
|
+
Combo_co
|
475
479
|
Skew of the return distribution
|
476
480
|
|
477
481
|
"""
|
478
482
|
return self.skew_func()
|
479
483
|
|
480
484
|
@property
|
481
|
-
def kurtosis(self: Self) ->
|
485
|
+
def kurtosis(self: Self) -> Combo_co:
|
482
486
|
"""https://www.investopedia.com/terms/k/kurtosis.asp.
|
483
487
|
|
484
488
|
Returns:
|
485
489
|
-------
|
486
|
-
|
490
|
+
Combo_co
|
487
491
|
Kurtosis of the return distribution
|
488
492
|
|
489
493
|
"""
|
490
494
|
return self.kurtosis_func()
|
491
495
|
|
492
496
|
@property
|
493
|
-
def cvar_down(self: Self) ->
|
497
|
+
def cvar_down(self: Self) -> Combo_co:
|
494
498
|
"""https://www.investopedia.com/terms/c/conditional_value_at_risk.asp.
|
495
499
|
|
496
500
|
Returns:
|
497
501
|
-------
|
498
|
-
|
502
|
+
Combo_co
|
499
503
|
Downside 95% Conditional Value At Risk "CVaR"
|
500
504
|
|
501
505
|
"""
|
@@ -503,7 +507,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
503
507
|
return self.cvar_down_func(level=level)
|
504
508
|
|
505
509
|
@property
|
506
|
-
def var_down(self: Self) ->
|
510
|
+
def var_down(self: Self) -> Combo_co:
|
507
511
|
"""Downside 95% Value At Risk (VaR).
|
508
512
|
|
509
513
|
The equivalent of percentile.inc([...], 1-level) over returns in MS Excel.
|
@@ -511,7 +515,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
511
515
|
|
512
516
|
Returns:
|
513
517
|
-------
|
514
|
-
|
518
|
+
Combo_co
|
515
519
|
Downside 95% Value At Risk (VaR)
|
516
520
|
|
517
521
|
"""
|
@@ -520,14 +524,14 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
520
524
|
return self.var_down_func(level=level, interpolation=interpolation)
|
521
525
|
|
522
526
|
@property
|
523
|
-
def vol_from_var(self: Self) ->
|
527
|
+
def vol_from_var(self: Self) -> Combo_co:
|
524
528
|
"""Implied annualized volatility from Downside 95% Value at Risk.
|
525
529
|
|
526
530
|
Assumes that returns are normally distributed.
|
527
531
|
|
528
532
|
Returns:
|
529
533
|
-------
|
530
|
-
|
534
|
+
Combo_co
|
531
535
|
Implied annualized volatility from the Downside 95% VaR using the
|
532
536
|
assumption that returns are normally distributed.
|
533
537
|
|
@@ -625,25 +629,25 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
625
629
|
try:
|
626
630
|
self.countries = countries
|
627
631
|
except ValidationError:
|
628
|
-
for serie in self.constituents:
|
632
|
+
for serie in self.constituents: # type: ignore[attr-defined]
|
629
633
|
serie.countries = countries
|
630
634
|
else:
|
631
635
|
try:
|
632
636
|
countries = self.countries
|
633
637
|
except AttributeError:
|
634
|
-
countries = self.constituents[0].countries
|
638
|
+
countries = self.constituents[0].countries # type: ignore[attr-defined]
|
635
639
|
|
636
640
|
if markets:
|
637
641
|
try:
|
638
642
|
self.markets = markets
|
639
643
|
except ValidationError:
|
640
|
-
for serie in self.constituents:
|
644
|
+
for serie in self.constituents: # type: ignore[attr-defined]
|
641
645
|
serie.markets = markets
|
642
646
|
else:
|
643
647
|
try:
|
644
648
|
markets = self.markets
|
645
649
|
except AttributeError:
|
646
|
-
markets = self.constituents[0].markets
|
650
|
+
markets = self.constituents[0].markets # type: ignore[attr-defined]
|
647
651
|
|
648
652
|
calendar = holiday_calendar(
|
649
653
|
startyear=startyear,
|
@@ -658,7 +662,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
658
662
|
for d in date_range(
|
659
663
|
start=cast("dt.date", self.tsdf.first_valid_index()),
|
660
664
|
end=cast("dt.date", self.tsdf.last_valid_index()),
|
661
|
-
freq=CustomBusinessDay(calendar=calendar)
|
665
|
+
freq=CustomBusinessDay(calendar=calendar)
|
666
|
+
if any([countries, markets, custom_holidays])
|
667
|
+
else None,
|
662
668
|
)
|
663
669
|
]
|
664
670
|
self.tsdf = self.tsdf.reindex(labels=d_range, method=method, copy=False)
|
@@ -859,9 +865,103 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
859
865
|
|
860
866
|
return str(sheetfile)
|
861
867
|
|
868
|
+
@staticmethod
|
869
|
+
def _ensure_labels(
|
870
|
+
ncols: int,
|
871
|
+
labels: list[str] | None,
|
872
|
+
default_labels: list[str],
|
873
|
+
) -> list[str]:
|
874
|
+
"""Validate or infer labels for plotting."""
|
875
|
+
if labels:
|
876
|
+
if len(labels) != ncols:
|
877
|
+
msg = "Must provide same number of labels as items in frame."
|
878
|
+
raise NumberOfItemsAndLabelsNotSameError(msg)
|
879
|
+
return labels
|
880
|
+
return default_labels
|
881
|
+
|
882
|
+
@staticmethod
|
883
|
+
def _resolve_dir(directory: DirectoryPath | None) -> Path:
|
884
|
+
"""Resolve output directory for plot files."""
|
885
|
+
if directory:
|
886
|
+
return Path(directory).resolve()
|
887
|
+
if (Path.home() / "Documents").exists():
|
888
|
+
return Path.home() / "Documents"
|
889
|
+
return Path(stack()[2].filename).parent
|
890
|
+
|
891
|
+
@staticmethod
|
892
|
+
def _hover_xy(tick_fmt: str | None) -> str:
|
893
|
+
"""Create hovertemplate for y-value and date x-axis."""
|
894
|
+
return (
|
895
|
+
f"%{{y:{tick_fmt}}}<br>%{{x|{'%Y-%m-%d'}}}"
|
896
|
+
if tick_fmt
|
897
|
+
else "%{y}<br>%{x|%Y-%m-%d}"
|
898
|
+
)
|
899
|
+
|
900
|
+
@staticmethod
|
901
|
+
def _hover_hist(x_fmt: str | None, y_fmt: str | None) -> str:
|
902
|
+
"""Create hovertemplate for histogram plots."""
|
903
|
+
y = f"%{{y:{y_fmt}}}" if y_fmt else "%{y}"
|
904
|
+
x = f"%{{x:{x_fmt}}}" if x_fmt else "%{x}"
|
905
|
+
return f"Count: {y}<br>{x}"
|
906
|
+
|
907
|
+
@staticmethod
|
908
|
+
def _apply_title_logo(
|
909
|
+
figure: Figure,
|
910
|
+
logo: CaptorLogoType,
|
911
|
+
title: str | None,
|
912
|
+
*,
|
913
|
+
add_logo: bool,
|
914
|
+
) -> None:
|
915
|
+
"""Apply optional title and logo to a Plotly Figure."""
|
916
|
+
if add_logo:
|
917
|
+
figure.add_layout_image(logo)
|
918
|
+
if title:
|
919
|
+
figure.update_layout(
|
920
|
+
{"title": {"text": f"<b>{title}</b><br>", "font": {"size": 36}}},
|
921
|
+
)
|
922
|
+
|
923
|
+
@staticmethod
|
924
|
+
def _emit_output(
|
925
|
+
figure: Figure,
|
926
|
+
fig_config: PlotlyConfigType,
|
927
|
+
output_type: LiteralPlotlyOutput,
|
928
|
+
plotfile: Path,
|
929
|
+
filename: str,
|
930
|
+
*,
|
931
|
+
include_plotlyjs_bool: bool,
|
932
|
+
auto_open: bool,
|
933
|
+
) -> str:
|
934
|
+
"""Write a file or return inline HTML string from a Plotly Figure."""
|
935
|
+
if output_type == "file":
|
936
|
+
plot(
|
937
|
+
figure_or_data=figure,
|
938
|
+
filename=str(plotfile),
|
939
|
+
auto_open=auto_open,
|
940
|
+
auto_play=False,
|
941
|
+
link_text="",
|
942
|
+
include_plotlyjs=include_plotlyjs_bool,
|
943
|
+
config=fig_config,
|
944
|
+
output_type=output_type,
|
945
|
+
)
|
946
|
+
return str(plotfile)
|
947
|
+
|
948
|
+
div_id = filename.rsplit(".", 1)[0]
|
949
|
+
return cast(
|
950
|
+
"str",
|
951
|
+
to_html(
|
952
|
+
fig=figure,
|
953
|
+
config=fig_config,
|
954
|
+
auto_play=False,
|
955
|
+
include_plotlyjs=include_plotlyjs_bool,
|
956
|
+
full_html=False,
|
957
|
+
div_id=div_id,
|
958
|
+
),
|
959
|
+
)
|
960
|
+
|
862
961
|
def plot_bars(
|
863
962
|
self: Self,
|
864
963
|
mode: LiteralBarPlotMode = "group",
|
964
|
+
title: str | None = None,
|
865
965
|
tick_fmt: str | None = None,
|
866
966
|
filename: str | None = None,
|
867
967
|
directory: DirectoryPath | None = None,
|
@@ -880,6 +980,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
880
980
|
The timeseries self.tsdf
|
881
981
|
mode: LiteralBarPlotMode
|
882
982
|
The type of bar to use
|
983
|
+
title: str, optional
|
984
|
+
A title above the plot
|
883
985
|
tick_fmt: str, optional
|
884
986
|
None, '%', '.1%' depending on number of decimals to show
|
885
987
|
filename: str, optional
|
@@ -904,34 +1006,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
904
1006
|
Plotly Figure and a div section or a html filename with location
|
905
1007
|
|
906
1008
|
"""
|
907
|
-
|
908
|
-
|
909
|
-
|
910
|
-
|
911
|
-
|
912
|
-
labels = list(self.tsdf.columns.get_level_values(0))
|
913
|
-
|
914
|
-
if directory:
|
915
|
-
dirpath = Path(directory).resolve()
|
916
|
-
elif Path.home().joinpath("Documents").exists():
|
917
|
-
dirpath = Path.home().joinpath("Documents")
|
918
|
-
else:
|
919
|
-
dirpath = Path(stack()[1].filename).parent
|
1009
|
+
labels = self._ensure_labels(
|
1010
|
+
ncols=self.tsdf.shape[1],
|
1011
|
+
labels=labels,
|
1012
|
+
default_labels=list(self.tsdf.columns.get_level_values(0)),
|
1013
|
+
)
|
920
1014
|
|
1015
|
+
dirpath = self._resolve_dir(directory=directory)
|
921
1016
|
if not filename:
|
922
|
-
filename = "
|
923
|
-
plotfile = dirpath
|
1017
|
+
filename = f"{''.join(choice(ascii_letters) for _ in range(6))}.html"
|
1018
|
+
plotfile = dirpath / filename
|
924
1019
|
|
925
1020
|
fig, logo = load_plotly_dict()
|
926
1021
|
figure = Figure(fig)
|
927
1022
|
|
928
1023
|
opacity = 0.7 if mode == "overlay" else None
|
929
|
-
|
930
|
-
hovertemplate = (
|
931
|
-
f"%{{y:{tick_fmt}}}<br>%{{x|{'%Y-%m-%d'}}}"
|
932
|
-
if tick_fmt
|
933
|
-
else "%{y}<br>%{x|%Y-%m-%d}"
|
934
|
-
)
|
1024
|
+
hovertemplate = self._hover_xy(tick_fmt=tick_fmt)
|
935
1025
|
|
936
1026
|
for item in range(self.tsdf.shape[1]):
|
937
1027
|
figure.add_bar(
|
@@ -943,37 +1033,29 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
943
1033
|
)
|
944
1034
|
figure.update_layout(barmode=mode, yaxis={"tickformat": tick_fmt})
|
945
1035
|
|
946
|
-
|
947
|
-
figure
|
1036
|
+
self._apply_title_logo(
|
1037
|
+
figure=figure,
|
1038
|
+
title=title,
|
1039
|
+
add_logo=add_logo,
|
1040
|
+
logo=logo,
|
1041
|
+
)
|
948
1042
|
|
949
|
-
|
950
|
-
|
951
|
-
|
952
|
-
|
953
|
-
|
954
|
-
|
955
|
-
|
956
|
-
|
957
|
-
|
958
|
-
output_type=output_type,
|
959
|
-
)
|
960
|
-
string_output = str(plotfile)
|
961
|
-
else:
|
962
|
-
div_id = filename.split(sep=".")[0]
|
963
|
-
string_output = to_html(
|
964
|
-
fig=figure,
|
965
|
-
config=fig["config"],
|
966
|
-
auto_play=False,
|
967
|
-
include_plotlyjs=cast("bool", include_plotlyjs),
|
968
|
-
full_html=False,
|
969
|
-
div_id=div_id,
|
970
|
-
)
|
1043
|
+
string_output = self._emit_output(
|
1044
|
+
figure=figure,
|
1045
|
+
fig_config=fig["config"],
|
1046
|
+
include_plotlyjs_bool=cast("bool", include_plotlyjs),
|
1047
|
+
output_type=output_type,
|
1048
|
+
auto_open=auto_open,
|
1049
|
+
plotfile=plotfile,
|
1050
|
+
filename=filename,
|
1051
|
+
)
|
971
1052
|
|
972
1053
|
return figure, string_output
|
973
1054
|
|
974
1055
|
def plot_series(
|
975
1056
|
self: Self,
|
976
1057
|
mode: LiteralLinePlotMode = "lines",
|
1058
|
+
title: str | None = None,
|
977
1059
|
tick_fmt: str | None = None,
|
978
1060
|
filename: str | None = None,
|
979
1061
|
directory: DirectoryPath | None = None,
|
@@ -993,6 +1075,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
993
1075
|
The timeseries self.tsdf
|
994
1076
|
mode: LiteralLinePlotMode, default: "lines"
|
995
1077
|
The type of scatter to use
|
1078
|
+
title: str, optional
|
1079
|
+
A title above the plot
|
996
1080
|
tick_fmt: str, optional
|
997
1081
|
None, '%', '.1%' depending on number of decimals to show
|
998
1082
|
filename: str, optional
|
@@ -1019,32 +1103,21 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1019
1103
|
Plotly Figure and a div section or a html filename with location
|
1020
1104
|
|
1021
1105
|
"""
|
1022
|
-
|
1023
|
-
|
1024
|
-
|
1025
|
-
|
1026
|
-
|
1027
|
-
labels = list(self.tsdf.columns.get_level_values(0))
|
1028
|
-
|
1029
|
-
if directory:
|
1030
|
-
dirpath = Path(directory).resolve()
|
1031
|
-
elif Path.home().joinpath("Documents").exists():
|
1032
|
-
dirpath = Path.home().joinpath("Documents")
|
1033
|
-
else:
|
1034
|
-
dirpath = Path(stack()[1].filename).parent
|
1106
|
+
labels = self._ensure_labels(
|
1107
|
+
ncols=self.tsdf.shape[1],
|
1108
|
+
labels=labels,
|
1109
|
+
default_labels=list(self.tsdf.columns.get_level_values(0)),
|
1110
|
+
)
|
1035
1111
|
|
1112
|
+
dirpath = self._resolve_dir(directory=directory)
|
1036
1113
|
if not filename:
|
1037
|
-
filename = "
|
1038
|
-
plotfile = dirpath
|
1114
|
+
filename = f"{''.join(choice(ascii_letters) for _ in range(6))}.html"
|
1115
|
+
plotfile = dirpath / filename
|
1039
1116
|
|
1040
1117
|
fig, logo = load_plotly_dict()
|
1041
1118
|
figure = Figure(fig)
|
1042
1119
|
|
1043
|
-
hovertemplate = (
|
1044
|
-
f"%{{y:{tick_fmt}}}<br>%{{x|{'%Y-%m-%d'}}}"
|
1045
|
-
if tick_fmt
|
1046
|
-
else "%{y}<br>%{x|%Y-%m-%d}"
|
1047
|
-
)
|
1120
|
+
hovertemplate = self._hover_xy(tick_fmt=tick_fmt)
|
1048
1121
|
|
1049
1122
|
for item in range(self.tsdf.shape[1]):
|
1050
1123
|
figure.add_scatter(
|
@@ -1059,7 +1132,6 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1059
1132
|
|
1060
1133
|
if show_last:
|
1061
1134
|
txt = f"Last {{:{tick_fmt}}}" if tick_fmt else "Last {}"
|
1062
|
-
|
1063
1135
|
for item in range(self.tsdf.shape[1]):
|
1064
1136
|
figure.add_scatter(
|
1065
1137
|
x=[Series(self.tsdf.iloc[:, item]).index[-1]],
|
@@ -1073,31 +1145,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1073
1145
|
textposition="top center",
|
1074
1146
|
)
|
1075
1147
|
|
1076
|
-
|
1077
|
-
figure
|
1148
|
+
self._apply_title_logo(
|
1149
|
+
figure=figure,
|
1150
|
+
title=title,
|
1151
|
+
add_logo=add_logo,
|
1152
|
+
logo=logo,
|
1153
|
+
)
|
1078
1154
|
|
1079
|
-
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
output_type=output_type,
|
1089
|
-
)
|
1090
|
-
string_output = str(plotfile)
|
1091
|
-
else:
|
1092
|
-
div_id = filename.split(sep=".")[0]
|
1093
|
-
string_output = to_html(
|
1094
|
-
fig=figure,
|
1095
|
-
config=fig["config"],
|
1096
|
-
auto_play=False,
|
1097
|
-
include_plotlyjs=cast("bool", include_plotlyjs),
|
1098
|
-
full_html=False,
|
1099
|
-
div_id=div_id,
|
1100
|
-
)
|
1155
|
+
string_output = self._emit_output(
|
1156
|
+
figure=figure,
|
1157
|
+
fig_config=fig["config"],
|
1158
|
+
include_plotlyjs_bool=cast("bool", include_plotlyjs),
|
1159
|
+
output_type=output_type,
|
1160
|
+
auto_open=auto_open,
|
1161
|
+
plotfile=plotfile,
|
1162
|
+
filename=filename,
|
1163
|
+
)
|
1101
1164
|
|
1102
1165
|
return figure, string_output
|
1103
1166
|
|
@@ -1111,6 +1174,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1111
1174
|
bargap: float = 0.0,
|
1112
1175
|
bargroupgap: float = 0.0,
|
1113
1176
|
curve_type: LiteralPlotlyHistogramCurveType = "kde",
|
1177
|
+
title: str | None = None,
|
1114
1178
|
x_fmt: str | None = None,
|
1115
1179
|
y_fmt: str | None = None,
|
1116
1180
|
filename: str | None = None,
|
@@ -1144,6 +1208,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1144
1208
|
Sets the gap between bar “groups” at the same location coordinate
|
1145
1209
|
curve_type: LiteralPlotlyHistogramCurveType, default: kde
|
1146
1210
|
Specifies the type of distribution curve to overlay on the histogram
|
1211
|
+
title: str, optional
|
1212
|
+
A title above the plot
|
1147
1213
|
y_fmt: str, optional
|
1148
1214
|
None, '%', '.1%' depending on number of decimals to show on the y-axis
|
1149
1215
|
x_fmt: str, optional
|
@@ -1174,32 +1240,19 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1174
1240
|
Plotly Figure and a div section or a html filename with location
|
1175
1241
|
|
1176
1242
|
"""
|
1177
|
-
|
1178
|
-
|
1179
|
-
|
1180
|
-
|
1181
|
-
|
1182
|
-
labels = list(self.tsdf.columns.get_level_values(0))
|
1183
|
-
|
1184
|
-
if directory:
|
1185
|
-
dirpath = Path(directory).resolve()
|
1186
|
-
elif Path.home().joinpath("Documents").exists():
|
1187
|
-
dirpath = Path.home().joinpath("Documents")
|
1188
|
-
else:
|
1189
|
-
dirpath = Path(stack()[1].filename).parent
|
1243
|
+
labels = self._ensure_labels(
|
1244
|
+
ncols=self.tsdf.shape[1],
|
1245
|
+
labels=labels,
|
1246
|
+
default_labels=list(self.tsdf.columns.get_level_values(0)),
|
1247
|
+
)
|
1190
1248
|
|
1249
|
+
dirpath = self._resolve_dir(directory=directory)
|
1191
1250
|
if not filename:
|
1192
|
-
filename = "
|
1193
|
-
plotfile = dirpath
|
1251
|
+
filename = f"{''.join(choice(ascii_letters) for _ in range(6))}.html"
|
1252
|
+
plotfile = dirpath / filename
|
1194
1253
|
|
1195
1254
|
fig_dict, logo = load_plotly_dict()
|
1196
|
-
|
1197
|
-
hovertemplate = f"Count: %{{y:{y_fmt}}}" if y_fmt else "Count: %{y}"
|
1198
|
-
|
1199
|
-
if x_fmt:
|
1200
|
-
hovertemplate += f"<br>%{{x:{x_fmt}}}"
|
1201
|
-
else:
|
1202
|
-
hovertemplate += "<br>%{x}"
|
1255
|
+
hovertemplate = self._hover_hist(x_fmt=x_fmt, y_fmt=y_fmt)
|
1203
1256
|
|
1204
1257
|
msg = "plot_type must be 'bars' or 'lines'."
|
1205
1258
|
if plot_type == "bars":
|
@@ -1221,10 +1274,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1221
1274
|
bargroupgap=bargroupgap,
|
1222
1275
|
)
|
1223
1276
|
elif plot_type == "lines":
|
1224
|
-
hist_data = [
|
1225
|
-
cast("Series[float]", self.tsdf.loc[:, ds]).dropna().tolist()
|
1226
|
-
for ds in self.tsdf
|
1227
|
-
]
|
1277
|
+
hist_data = [self.tsdf[col] for col in self.tsdf.columns]
|
1228
1278
|
figure = create_distplot(
|
1229
1279
|
hist_data=hist_data,
|
1230
1280
|
curve_type=curve_type,
|
@@ -1238,35 +1288,25 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1238
1288
|
raise TypeError(msg)
|
1239
1289
|
|
1240
1290
|
figure.update_layout(xaxis={"tickformat": x_fmt}, yaxis={"tickformat": y_fmt})
|
1241
|
-
|
1242
1291
|
figure.update_xaxes(zeroline=True, zerolinewidth=2, zerolinecolor="lightgrey")
|
1243
1292
|
figure.update_yaxes(zeroline=True, zerolinewidth=2, zerolinecolor="lightgrey")
|
1244
1293
|
|
1245
|
-
|
1246
|
-
figure
|
1294
|
+
self._apply_title_logo(
|
1295
|
+
figure=figure,
|
1296
|
+
title=title,
|
1297
|
+
add_logo=add_logo,
|
1298
|
+
logo=logo,
|
1299
|
+
)
|
1247
1300
|
|
1248
|
-
|
1249
|
-
|
1250
|
-
|
1251
|
-
|
1252
|
-
|
1253
|
-
|
1254
|
-
|
1255
|
-
|
1256
|
-
|
1257
|
-
output_type=output_type,
|
1258
|
-
)
|
1259
|
-
string_output = str(plotfile)
|
1260
|
-
else:
|
1261
|
-
div_id = filename.rsplit(".", 1)[0]
|
1262
|
-
string_output = to_html(
|
1263
|
-
fig=figure,
|
1264
|
-
config=fig_dict["config"],
|
1265
|
-
auto_play=False,
|
1266
|
-
include_plotlyjs=cast("bool", include_plotlyjs),
|
1267
|
-
full_html=False,
|
1268
|
-
div_id=div_id,
|
1269
|
-
)
|
1301
|
+
string_output = self._emit_output(
|
1302
|
+
figure=figure,
|
1303
|
+
fig_config=fig_dict["config"],
|
1304
|
+
include_plotlyjs_bool=cast("bool", include_plotlyjs),
|
1305
|
+
output_type=output_type,
|
1306
|
+
auto_open=auto_open,
|
1307
|
+
plotfile=plotfile,
|
1308
|
+
filename=filename,
|
1309
|
+
)
|
1270
1310
|
|
1271
1311
|
return figure, string_output
|
1272
1312
|
|
@@ -1276,7 +1316,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1276
1316
|
from_date: dt.date | None = None,
|
1277
1317
|
to_date: dt.date | None = None,
|
1278
1318
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1279
|
-
) ->
|
1319
|
+
) -> Combo_co:
|
1280
1320
|
"""https://www.investopedia.com/terms/a/arithmeticmean.asp.
|
1281
1321
|
|
1282
1322
|
Parameters
|
@@ -1294,7 +1334,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1294
1334
|
|
1295
1335
|
Returns:
|
1296
1336
|
-------
|
1297
|
-
|
1337
|
+
Combo_co
|
1298
1338
|
Annualized arithmetic mean of returns
|
1299
1339
|
|
1300
1340
|
"""
|
@@ -1306,29 +1346,23 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1306
1346
|
if periods_in_a_year_fixed:
|
1307
1347
|
time_factor = float(periods_in_a_year_fixed)
|
1308
1348
|
else:
|
1349
|
+
how_many = (
|
1350
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1351
|
+
.count()
|
1352
|
+
.iloc[0]
|
1353
|
+
)
|
1309
1354
|
fraction = (later - earlier).days / 365.25
|
1310
|
-
|
1311
|
-
cast("int", earlier) : cast("int", later),
|
1312
|
-
self.tsdf.columns.to_numpy()[0],
|
1313
|
-
].count()
|
1314
|
-
time_factor = cast("int", how_many) / fraction
|
1355
|
+
time_factor = how_many / fraction
|
1315
1356
|
|
1316
1357
|
result = (
|
1317
|
-
self.tsdf.loc[cast("
|
1358
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1318
1359
|
.ffill()
|
1319
1360
|
.pct_change()
|
1320
1361
|
.mean()
|
1321
1362
|
* time_factor
|
1322
1363
|
)
|
1323
1364
|
|
1324
|
-
|
1325
|
-
return float(result.iloc[0])
|
1326
|
-
return Series(
|
1327
|
-
data=result,
|
1328
|
-
index=self.tsdf.columns,
|
1329
|
-
name="Arithmetic return",
|
1330
|
-
dtype="float64",
|
1331
|
-
)
|
1365
|
+
return self._coerce_result(result=result, name="Arithmetic return")
|
1332
1366
|
|
1333
1367
|
def vol_func(
|
1334
1368
|
self: Self,
|
@@ -1336,7 +1370,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1336
1370
|
from_date: dt.date | None = None,
|
1337
1371
|
to_date: dt.date | None = None,
|
1338
1372
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1339
|
-
) ->
|
1373
|
+
) -> Combo_co:
|
1340
1374
|
"""Annualized volatility.
|
1341
1375
|
|
1342
1376
|
Based on Pandas .std() which is the equivalent of stdev.s([...]) in MS Excel.
|
@@ -1356,7 +1390,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1356
1390
|
|
1357
1391
|
Returns:
|
1358
1392
|
-------
|
1359
|
-
|
1393
|
+
Combo_co
|
1360
1394
|
Annualized volatility
|
1361
1395
|
|
1362
1396
|
"""
|
@@ -1368,25 +1402,18 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1368
1402
|
if periods_in_a_year_fixed:
|
1369
1403
|
time_factor = float(periods_in_a_year_fixed)
|
1370
1404
|
else:
|
1371
|
-
fraction = (later - earlier).days / 365.25
|
1372
1405
|
how_many = (
|
1373
|
-
self.tsdf.loc[cast("
|
1406
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1374
1407
|
.count()
|
1375
1408
|
.iloc[0]
|
1376
1409
|
)
|
1410
|
+
fraction = (later - earlier).days / 365.25
|
1377
1411
|
time_factor = how_many / fraction
|
1378
1412
|
|
1379
|
-
data = self.tsdf.loc[cast("
|
1413
|
+
data = self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1380
1414
|
result = data.ffill().pct_change().std().mul(sqrt(time_factor))
|
1381
1415
|
|
1382
|
-
|
1383
|
-
return float(cast("SupportsFloat", result.iloc[0]))
|
1384
|
-
return Series(
|
1385
|
-
data=result,
|
1386
|
-
index=self.tsdf.columns,
|
1387
|
-
name="Volatility",
|
1388
|
-
dtype="float64",
|
1389
|
-
)
|
1416
|
+
return self._coerce_result(result=result, name="Volatility")
|
1390
1417
|
|
1391
1418
|
def vol_from_var_func(
|
1392
1419
|
self: Self,
|
@@ -1398,7 +1425,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1398
1425
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1399
1426
|
*,
|
1400
1427
|
drift_adjust: bool = False,
|
1401
|
-
) ->
|
1428
|
+
) -> Combo_co:
|
1402
1429
|
"""Implied annualized volatility.
|
1403
1430
|
|
1404
1431
|
Implied annualized volatility from the Downside VaR using the assumption
|
@@ -1425,7 +1452,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1425
1452
|
|
1426
1453
|
Returns:
|
1427
1454
|
-------
|
1428
|
-
|
1455
|
+
Combo_co
|
1429
1456
|
Implied annualized volatility from the Downside VaR using the
|
1430
1457
|
assumption that returns are normally distributed.
|
1431
1458
|
|
@@ -1453,7 +1480,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1453
1480
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1454
1481
|
*,
|
1455
1482
|
drift_adjust: bool = False,
|
1456
|
-
) ->
|
1483
|
+
) -> Combo_co:
|
1457
1484
|
"""Target weight from VaR.
|
1458
1485
|
|
1459
1486
|
A position weight multiplier from the ratio between a VaR implied
|
@@ -1486,7 +1513,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1486
1513
|
|
1487
1514
|
Returns:
|
1488
1515
|
-------
|
1489
|
-
|
1516
|
+
Combo_co
|
1490
1517
|
A position weight multiplier from the ratio between a VaR implied
|
1491
1518
|
volatility and a given target volatility. Multiplier = 1.0 -> target met
|
1492
1519
|
|
@@ -1517,7 +1544,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1517
1544
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1518
1545
|
*,
|
1519
1546
|
drift_adjust: bool = False,
|
1520
|
-
) ->
|
1547
|
+
) -> Combo_co:
|
1521
1548
|
"""Volatility implied from VaR or Target Weight.
|
1522
1549
|
|
1523
1550
|
The function returns a position weight multiplier from the ratio between
|
@@ -1552,7 +1579,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1552
1579
|
|
1553
1580
|
Returns:
|
1554
1581
|
-------
|
1555
|
-
|
1582
|
+
Combo_co
|
1556
1583
|
Target volatility if target_vol is provided otherwise the VaR
|
1557
1584
|
implied volatility.
|
1558
1585
|
|
@@ -1567,23 +1594,25 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1567
1594
|
else:
|
1568
1595
|
fraction = (later - earlier).days / 365.25
|
1569
1596
|
how_many = (
|
1570
|
-
self.tsdf.loc[cast("
|
1597
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1571
1598
|
.count()
|
1572
1599
|
.iloc[0]
|
1573
1600
|
)
|
1574
1601
|
time_factor = how_many / fraction
|
1575
1602
|
if drift_adjust:
|
1576
1603
|
imp_vol = (-sqrt(time_factor) / norm.ppf(level)) * (
|
1577
|
-
self.tsdf.loc[cast("
|
1604
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1578
1605
|
.ffill()
|
1579
1606
|
.pct_change()
|
1580
1607
|
.quantile(1 - level, interpolation=interpolation)
|
1581
|
-
- self.tsdf.loc[cast("
|
1608
|
+
- self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1582
1609
|
.ffill()
|
1583
1610
|
.pct_change()
|
1584
1611
|
.sum()
|
1585
1612
|
/ len(
|
1586
|
-
self.tsdf.loc[
|
1613
|
+
self.tsdf.loc[
|
1614
|
+
cast("Timestamp", earlier) : cast("Timestamp", later)
|
1615
|
+
]
|
1587
1616
|
.ffill()
|
1588
1617
|
.pct_change(),
|
1589
1618
|
)
|
@@ -1591,7 +1620,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1591
1620
|
else:
|
1592
1621
|
imp_vol = (
|
1593
1622
|
-sqrt(time_factor)
|
1594
|
-
* self.tsdf.loc[cast("
|
1623
|
+
* self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1595
1624
|
.ffill()
|
1596
1625
|
.pct_change()
|
1597
1626
|
.quantile(1 - level, interpolation=interpolation)
|
@@ -1610,14 +1639,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1610
1639
|
result = imp_vol
|
1611
1640
|
label = f"Imp vol from VaR {level:.0%}"
|
1612
1641
|
|
1613
|
-
|
1614
|
-
return float(cast("SupportsFloat", result.iloc[0]))
|
1615
|
-
return Series(
|
1616
|
-
data=result,
|
1617
|
-
index=self.tsdf.columns,
|
1618
|
-
name=label,
|
1619
|
-
dtype="float64",
|
1620
|
-
)
|
1642
|
+
return self._coerce_result(result=result, name=label)
|
1621
1643
|
|
1622
1644
|
def cvar_down_func(
|
1623
1645
|
self: Self,
|
@@ -1625,7 +1647,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1625
1647
|
months_from_last: int | None = None,
|
1626
1648
|
from_date: dt.date | None = None,
|
1627
1649
|
to_date: dt.date | None = None,
|
1628
|
-
) ->
|
1650
|
+
) -> Combo_co:
|
1629
1651
|
"""Downside Conditional Value At Risk "CVaR".
|
1630
1652
|
|
1631
1653
|
https://www.investopedia.com/terms/c/conditional_value_at_risk.asp.
|
@@ -1644,7 +1666,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1644
1666
|
|
1645
1667
|
Returns:
|
1646
1668
|
-------
|
1647
|
-
|
1669
|
+
Combo_co
|
1648
1670
|
Downside Conditional Value At Risk "CVaR"
|
1649
1671
|
|
1650
1672
|
"""
|
@@ -1653,32 +1675,19 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1653
1675
|
from_dt=from_date,
|
1654
1676
|
to_dt=to_date,
|
1655
1677
|
)
|
1656
|
-
cvar_df = self.tsdf.loc[
|
1657
|
-
|
1658
|
-
)
|
1678
|
+
cvar_df = self.tsdf.loc[
|
1679
|
+
cast("Timestamp", earlier) : cast("Timestamp", later)
|
1680
|
+
].copy(deep=True)
|
1659
1681
|
result = [
|
1660
|
-
cvar_df
|
1661
|
-
|
1662
|
-
.
|
1663
|
-
.
|
1664
|
-
.iloc[
|
1665
|
-
: ceil(
|
1666
|
-
cast(
|
1667
|
-
"int",
|
1668
|
-
(1 - level) * cvar_df.loc[:, x].ffill().pct_change().count(),
|
1669
|
-
)
|
1670
|
-
),
|
1671
|
-
]
|
1672
|
-
.mean()
|
1673
|
-
for x in self.tsdf
|
1682
|
+
(r := cvar_df[col].ffill().pct_change().sort_values())[
|
1683
|
+
: ceil((1 - level) * r.count())
|
1684
|
+
].mean()
|
1685
|
+
for col in cvar_df.columns
|
1674
1686
|
]
|
1675
|
-
|
1676
|
-
|
1677
|
-
|
1678
|
-
data=result,
|
1679
|
-
index=self.tsdf.columns,
|
1687
|
+
|
1688
|
+
return self._coerce_result(
|
1689
|
+
result=cast("Series[float]", result),
|
1680
1690
|
name=f"CVaR {level:.1%}",
|
1681
|
-
dtype="float64",
|
1682
1691
|
)
|
1683
1692
|
|
1684
1693
|
def lower_partial_moment_func(
|
@@ -1689,7 +1698,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1689
1698
|
from_date: dt.date | None = None,
|
1690
1699
|
to_date: dt.date | None = None,
|
1691
1700
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1692
|
-
) ->
|
1701
|
+
) -> Combo_co:
|
1693
1702
|
"""Downside Deviation if order set to 2.
|
1694
1703
|
|
1695
1704
|
If order is set to 2 the function calculates the standard
|
@@ -1716,7 +1725,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1716
1725
|
|
1717
1726
|
Returns:
|
1718
1727
|
-------
|
1719
|
-
|
1728
|
+
Combo_co
|
1720
1729
|
Downside deviation if order set to 2
|
1721
1730
|
|
1722
1731
|
"""
|
@@ -1732,7 +1741,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1732
1741
|
)
|
1733
1742
|
|
1734
1743
|
how_many = (
|
1735
|
-
self.tsdf.loc[cast("
|
1744
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1736
1745
|
.ffill()
|
1737
1746
|
.pct_change()
|
1738
1747
|
.count(numeric_only=True)
|
@@ -1749,7 +1758,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1749
1758
|
|
1750
1759
|
per_period_mar = min_accepted_return / time_factor
|
1751
1760
|
diff = (
|
1752
|
-
self.tsdf.loc[cast("
|
1761
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1753
1762
|
.ffill()
|
1754
1763
|
.pct_change()
|
1755
1764
|
.sub(per_period_mar)
|
@@ -1762,13 +1771,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1762
1771
|
|
1763
1772
|
dd_order = 2
|
1764
1773
|
|
1765
|
-
|
1766
|
-
|
1767
|
-
return Series(
|
1768
|
-
data=result,
|
1769
|
-
index=self.tsdf.columns,
|
1774
|
+
return self._coerce_result(
|
1775
|
+
result=result,
|
1770
1776
|
name="Downside deviation" if order == dd_order else f"LPM{order}",
|
1771
|
-
dtype="float64",
|
1772
1777
|
)
|
1773
1778
|
|
1774
1779
|
def geo_ret_func(
|
@@ -1776,7 +1781,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1776
1781
|
months_from_last: int | None = None,
|
1777
1782
|
from_date: dt.date | None = None,
|
1778
1783
|
to_date: dt.date | None = None,
|
1779
|
-
) ->
|
1784
|
+
) -> Combo_co:
|
1780
1785
|
"""Compounded Annual Growth Rate (CAGR).
|
1781
1786
|
|
1782
1787
|
https://www.investopedia.com/terms/c/cagr.asp.
|
@@ -1793,7 +1798,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1793
1798
|
|
1794
1799
|
Returns:
|
1795
1800
|
-------
|
1796
|
-
|
1801
|
+
Combo_co
|
1797
1802
|
Compounded Annual Growth Rate (CAGR)
|
1798
1803
|
|
1799
1804
|
"""
|
@@ -1815,13 +1820,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1815
1820
|
|
1816
1821
|
result = (self.tsdf.loc[later] / self.tsdf.loc[earlier]) ** (1 / fraction) - 1
|
1817
1822
|
|
1818
|
-
|
1819
|
-
|
1820
|
-
return Series(
|
1821
|
-
data=result.to_numpy(),
|
1822
|
-
index=self.tsdf.columns,
|
1823
|
+
return self._coerce_result(
|
1824
|
+
result=cast("Series[float]", result),
|
1823
1825
|
name="Geometric return",
|
1824
|
-
dtype="float64",
|
1825
1826
|
)
|
1826
1827
|
|
1827
1828
|
def skew_func(
|
@@ -1829,7 +1830,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1829
1830
|
months_from_last: int | None = None,
|
1830
1831
|
from_date: dt.date | None = None,
|
1831
1832
|
to_date: dt.date | None = None,
|
1832
|
-
) ->
|
1833
|
+
) -> Combo_co:
|
1833
1834
|
"""Skew of the return distribution.
|
1834
1835
|
|
1835
1836
|
https://www.investopedia.com/terms/s/skewness.asp.
|
@@ -1846,7 +1847,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1846
1847
|
|
1847
1848
|
Returns:
|
1848
1849
|
-------
|
1849
|
-
|
1850
|
+
Combo_co
|
1850
1851
|
Skew of the return distribution
|
1851
1852
|
|
1852
1853
|
"""
|
@@ -1855,30 +1856,24 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1855
1856
|
from_dt=from_date,
|
1856
1857
|
to_dt=to_date,
|
1857
1858
|
)
|
1858
|
-
result
|
1859
|
-
a=
|
1860
|
-
|
1861
|
-
|
1862
|
-
|
1859
|
+
result = skew(
|
1860
|
+
a=(
|
1861
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1862
|
+
.ffill()
|
1863
|
+
.pct_change()
|
1864
|
+
),
|
1863
1865
|
bias=True,
|
1864
1866
|
nan_policy="omit",
|
1865
1867
|
)
|
1866
1868
|
|
1867
|
-
|
1868
|
-
return float(result[0])
|
1869
|
-
return Series(
|
1870
|
-
data=result,
|
1871
|
-
index=self.tsdf.columns,
|
1872
|
-
name="Skew",
|
1873
|
-
dtype="float64",
|
1874
|
-
)
|
1869
|
+
return self._coerce_result(result=cast("Series[float]", result), name="Skew")
|
1875
1870
|
|
1876
1871
|
def kurtosis_func(
|
1877
1872
|
self: Self,
|
1878
1873
|
months_from_last: int | None = None,
|
1879
1874
|
from_date: dt.date | None = None,
|
1880
1875
|
to_date: dt.date | None = None,
|
1881
|
-
) ->
|
1876
|
+
) -> Combo_co:
|
1882
1877
|
"""Kurtosis of the return distribution.
|
1883
1878
|
|
1884
1879
|
https://www.investopedia.com/terms/k/kurtosis.asp.
|
@@ -1895,7 +1890,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1895
1890
|
|
1896
1891
|
Returns:
|
1897
1892
|
-------
|
1898
|
-
|
1893
|
+
Combo_co
|
1899
1894
|
Kurtosis of the return distribution
|
1900
1895
|
|
1901
1896
|
"""
|
@@ -1904,9 +1899,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1904
1899
|
from_dt=from_date,
|
1905
1900
|
to_dt=to_date,
|
1906
1901
|
)
|
1907
|
-
result
|
1902
|
+
result = kurtosis(
|
1908
1903
|
a=(
|
1909
|
-
self.tsdf.loc[cast("
|
1904
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1910
1905
|
.ffill()
|
1911
1906
|
.pct_change()
|
1912
1907
|
),
|
@@ -1915,13 +1910,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1915
1910
|
nan_policy="omit",
|
1916
1911
|
)
|
1917
1912
|
|
1918
|
-
|
1919
|
-
|
1920
|
-
return Series(
|
1921
|
-
data=result,
|
1922
|
-
index=self.tsdf.columns,
|
1913
|
+
return self._coerce_result(
|
1914
|
+
result=cast("Series[float]", result),
|
1923
1915
|
name="Kurtosis",
|
1924
|
-
dtype="float64",
|
1925
1916
|
)
|
1926
1917
|
|
1927
1918
|
def max_drawdown_func(
|
@@ -1930,7 +1921,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1930
1921
|
from_date: dt.date | None = None,
|
1931
1922
|
to_date: dt.date | None = None,
|
1932
1923
|
min_periods: int = 1,
|
1933
|
-
) ->
|
1924
|
+
) -> Combo_co:
|
1934
1925
|
"""Maximum drawdown without any limit on date range.
|
1935
1926
|
|
1936
1927
|
https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp.
|
@@ -1949,7 +1940,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1949
1940
|
|
1950
1941
|
Returns:
|
1951
1942
|
-------
|
1952
|
-
|
1943
|
+
Combo_co
|
1953
1944
|
Maximum drawdown without any limit on date range
|
1954
1945
|
|
1955
1946
|
"""
|
@@ -1959,26 +1950,20 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1959
1950
|
to_dt=to_date,
|
1960
1951
|
)
|
1961
1952
|
result = (
|
1962
|
-
self.tsdf.loc[cast("
|
1963
|
-
/ self.tsdf.loc[cast("
|
1953
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1954
|
+
/ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1964
1955
|
.expanding(min_periods=min_periods)
|
1965
1956
|
.max()
|
1966
1957
|
).min() - 1
|
1967
|
-
|
1968
|
-
|
1969
|
-
return Series(
|
1970
|
-
data=result,
|
1971
|
-
index=self.tsdf.columns,
|
1972
|
-
name="Max drawdown",
|
1973
|
-
dtype="float64",
|
1974
|
-
)
|
1958
|
+
|
1959
|
+
return self._coerce_result(result=result, name="Max drawdown")
|
1975
1960
|
|
1976
1961
|
def positive_share_func(
|
1977
1962
|
self: Self,
|
1978
1963
|
months_from_last: int | None = None,
|
1979
1964
|
from_date: dt.date | None = None,
|
1980
1965
|
to_date: dt.date | None = None,
|
1981
|
-
) ->
|
1966
|
+
) -> Combo_co:
|
1982
1967
|
"""Calculate share of percentage changes that are greater than zero.
|
1983
1968
|
|
1984
1969
|
Parameters
|
@@ -1993,7 +1978,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1993
1978
|
|
1994
1979
|
Returns:
|
1995
1980
|
-------
|
1996
|
-
|
1981
|
+
Combo_co
|
1997
1982
|
Calculate share of percentage changes that are greater than zero
|
1998
1983
|
|
1999
1984
|
"""
|
@@ -2004,10 +1989,10 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2004
1989
|
to_dt=to_date,
|
2005
1990
|
)
|
2006
1991
|
pos = (
|
2007
|
-
self.tsdf.loc[cast("
|
1992
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2008
1993
|
.ffill()
|
2009
1994
|
.pct_change()[1:][
|
2010
|
-
self.tsdf.loc[cast("
|
1995
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2011
1996
|
.ffill()
|
2012
1997
|
.pct_change()[1:]
|
2013
1998
|
> zero
|
@@ -2015,20 +2000,14 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2015
2000
|
.count()
|
2016
2001
|
)
|
2017
2002
|
tot = (
|
2018
|
-
self.tsdf.loc[cast("
|
2003
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2019
2004
|
.ffill()
|
2020
2005
|
.pct_change()
|
2021
2006
|
.count()
|
2022
2007
|
)
|
2023
|
-
|
2024
|
-
|
2025
|
-
|
2026
|
-
return Series(
|
2027
|
-
data=share,
|
2028
|
-
index=self.tsdf.columns,
|
2029
|
-
name="Positive share",
|
2030
|
-
dtype="float64",
|
2031
|
-
)
|
2008
|
+
result = pos / tot
|
2009
|
+
|
2010
|
+
return self._coerce_result(result=result, name="Positive share")
|
2032
2011
|
|
2033
2012
|
def ret_vol_ratio_func(
|
2034
2013
|
self: Self,
|
@@ -2037,7 +2016,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2037
2016
|
from_date: dt.date | None = None,
|
2038
2017
|
to_date: dt.date | None = None,
|
2039
2018
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
2040
|
-
) ->
|
2019
|
+
) -> Combo_co:
|
2041
2020
|
"""Ratio between arithmetic mean of returns and annualized volatility.
|
2042
2021
|
|
2043
2022
|
The ratio of annualized arithmetic mean of returns and annualized
|
@@ -2063,12 +2042,12 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2063
2042
|
|
2064
2043
|
Returns:
|
2065
2044
|
-------
|
2066
|
-
|
2045
|
+
Combo_co
|
2067
2046
|
Ratio of the annualized arithmetic mean of returns and annualized
|
2068
2047
|
volatility or, if risk-free return provided, Sharpe ratio
|
2069
2048
|
|
2070
2049
|
"""
|
2071
|
-
|
2050
|
+
result = Series(
|
2072
2051
|
self.arithmetic_ret_func(
|
2073
2052
|
months_from_last=months_from_last,
|
2074
2053
|
from_date=from_date,
|
@@ -2083,14 +2062,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2083
2062
|
periods_in_a_year_fixed=periods_in_a_year_fixed,
|
2084
2063
|
)
|
2085
2064
|
|
2086
|
-
|
2087
|
-
return float(cast("float64", ratio.iloc[0]))
|
2088
|
-
return Series(
|
2089
|
-
data=ratio,
|
2090
|
-
index=self.tsdf.columns,
|
2091
|
-
name="Return vol ratio",
|
2092
|
-
dtype="float64",
|
2093
|
-
)
|
2065
|
+
return self._coerce_result(result=result, name="Return vol ratio")
|
2094
2066
|
|
2095
2067
|
def sortino_ratio_func(
|
2096
2068
|
self: Self,
|
@@ -2101,7 +2073,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2101
2073
|
from_date: dt.date | None = None,
|
2102
2074
|
to_date: dt.date | None = None,
|
2103
2075
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
2104
|
-
) ->
|
2076
|
+
) -> Combo_co:
|
2105
2077
|
"""Sortino Ratio or Kappa3 Ratio.
|
2106
2078
|
|
2107
2079
|
The Sortino ratio calculated as ( return - risk free return )
|
@@ -2134,12 +2106,12 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2134
2106
|
|
2135
2107
|
Returns:
|
2136
2108
|
-------
|
2137
|
-
|
2109
|
+
Combo_co
|
2138
2110
|
Sortino ratio calculated as ( return - riskfree return ) /
|
2139
2111
|
downside deviation (std dev of returns below MAR)
|
2140
2112
|
|
2141
2113
|
"""
|
2142
|
-
|
2114
|
+
result = Series(
|
2143
2115
|
self.arithmetic_ret_func(
|
2144
2116
|
months_from_last=months_from_last,
|
2145
2117
|
from_date=from_date,
|
@@ -2157,14 +2129,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2157
2129
|
)
|
2158
2130
|
|
2159
2131
|
sortino_order = 2
|
2160
|
-
if
|
2161
|
-
|
2162
|
-
return
|
2163
|
-
data=ratio,
|
2164
|
-
index=self.tsdf.columns,
|
2165
|
-
name="Sortino ratio" if order == sortino_order else "Kappa-3 ratio",
|
2166
|
-
dtype="float64",
|
2167
|
-
)
|
2132
|
+
name = "Sortino ratio" if order == sortino_order else "Kappa-3 ratio"
|
2133
|
+
|
2134
|
+
return self._coerce_result(result=result, name=name)
|
2168
2135
|
|
2169
2136
|
def omega_ratio_func(
|
2170
2137
|
self: Self,
|
@@ -2172,7 +2139,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2172
2139
|
months_from_last: int | None = None,
|
2173
2140
|
from_date: dt.date | None = None,
|
2174
2141
|
to_date: dt.date | None = None,
|
2175
|
-
) ->
|
2142
|
+
) -> Combo_co:
|
2176
2143
|
"""Omega Ratio.
|
2177
2144
|
|
2178
2145
|
The Omega Ratio compares returns above a certain target level
|
@@ -2194,7 +2161,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2194
2161
|
|
2195
2162
|
Returns:
|
2196
2163
|
-------
|
2197
|
-
|
2164
|
+
Combo_co
|
2198
2165
|
Omega ratio calculation
|
2199
2166
|
|
2200
2167
|
"""
|
@@ -2204,29 +2171,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2204
2171
|
to_dt=to_date,
|
2205
2172
|
)
|
2206
2173
|
retdf = (
|
2207
|
-
self.tsdf.loc[cast("
|
2174
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2208
2175
|
.ffill()
|
2209
2176
|
.pct_change()
|
2210
2177
|
)
|
2211
2178
|
pos = retdf[retdf > min_accepted_return].sub(min_accepted_return).sum()
|
2212
2179
|
neg = retdf[retdf < min_accepted_return].sub(min_accepted_return).sum()
|
2213
|
-
|
2180
|
+
result = pos / -neg
|
2214
2181
|
|
2215
|
-
|
2216
|
-
return float(cast("float64", ratio.iloc[0]))
|
2217
|
-
return Series(
|
2218
|
-
data=ratio,
|
2219
|
-
index=self.tsdf.columns,
|
2220
|
-
name="Omega ratio",
|
2221
|
-
dtype="float64",
|
2222
|
-
)
|
2182
|
+
return self._coerce_result(result=result, name="Omega ratio")
|
2223
2183
|
|
2224
2184
|
def value_ret_func(
|
2225
2185
|
self: Self,
|
2226
2186
|
months_from_last: int | None = None,
|
2227
2187
|
from_date: dt.date | None = None,
|
2228
2188
|
to_date: dt.date | None = None,
|
2229
|
-
) ->
|
2189
|
+
) -> Combo_co:
|
2230
2190
|
"""Calculate simple return.
|
2231
2191
|
|
2232
2192
|
Parameters
|
@@ -2241,7 +2201,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2241
2201
|
|
2242
2202
|
Returns:
|
2243
2203
|
-------
|
2244
|
-
|
2204
|
+
Combo_co
|
2245
2205
|
Calculate simple return
|
2246
2206
|
|
2247
2207
|
"""
|
@@ -2258,22 +2218,18 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2258
2218
|
)
|
2259
2219
|
raise InitialValueZeroError(msg)
|
2260
2220
|
|
2261
|
-
result =
|
2262
|
-
|
2263
|
-
|
2264
|
-
return float(result.iloc[0])
|
2265
|
-
return Series(
|
2266
|
-
data=result.to_numpy(),
|
2267
|
-
index=self.tsdf.columns,
|
2268
|
-
name="Simple return",
|
2269
|
-
dtype="float64",
|
2221
|
+
result = cast(
|
2222
|
+
"Series[float]",
|
2223
|
+
self.tsdf.loc[later] / self.tsdf.loc[earlier] - 1,
|
2270
2224
|
)
|
2271
2225
|
|
2226
|
+
return self._coerce_result(result=result, name="Simple return")
|
2227
|
+
|
2272
2228
|
def value_ret_calendar_period(
|
2273
2229
|
self: Self,
|
2274
2230
|
year: int,
|
2275
2231
|
month: int | None = None,
|
2276
|
-
) ->
|
2232
|
+
) -> Combo_co:
|
2277
2233
|
"""Calculate simple return for a specific calendar period.
|
2278
2234
|
|
2279
2235
|
Parameters
|
@@ -2285,7 +2241,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2285
2241
|
|
2286
2242
|
Returns:
|
2287
2243
|
-------
|
2288
|
-
|
2244
|
+
Combo_co
|
2289
2245
|
Calculate simple return for a specific calendar period
|
2290
2246
|
|
2291
2247
|
"""
|
@@ -2296,16 +2252,10 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2296
2252
|
vrdf = self.tsdf.copy()
|
2297
2253
|
vrdf.index = DatetimeIndex(vrdf.index)
|
2298
2254
|
resultdf = DataFrame(vrdf.ffill().pct_change())
|
2299
|
-
|
2300
|
-
|
2301
|
-
|
2302
|
-
|
2303
|
-
return Series(
|
2304
|
-
data=cal_period,
|
2305
|
-
index=self.tsdf.columns,
|
2306
|
-
name=period,
|
2307
|
-
dtype="float64",
|
2308
|
-
)
|
2255
|
+
plus_one = resultdf.loc[period] + 1
|
2256
|
+
result = plus_one.cumprod(axis="index").iloc[-1] - 1
|
2257
|
+
|
2258
|
+
return self._coerce_result(result=result, name=period)
|
2309
2259
|
|
2310
2260
|
def var_down_func(
|
2311
2261
|
self: Self,
|
@@ -2314,7 +2264,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2314
2264
|
from_date: dt.date | None = None,
|
2315
2265
|
to_date: dt.date | None = None,
|
2316
2266
|
interpolation: LiteralQuantileInterp = "lower",
|
2317
|
-
) ->
|
2267
|
+
) -> Combo_co:
|
2318
2268
|
"""Downside Value At Risk, "VaR".
|
2319
2269
|
|
2320
2270
|
The equivalent of percentile.inc([...], 1-level) over returns in MS Excel.
|
@@ -2336,7 +2286,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2336
2286
|
|
2337
2287
|
Returns:
|
2338
2288
|
-------
|
2339
|
-
|
2289
|
+
Combo_co
|
2340
2290
|
Downside Value At Risk
|
2341
2291
|
|
2342
2292
|
"""
|
@@ -2346,20 +2296,13 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2346
2296
|
to_dt=to_date,
|
2347
2297
|
)
|
2348
2298
|
result = (
|
2349
|
-
self.tsdf.loc[cast("
|
2299
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2350
2300
|
.ffill()
|
2351
2301
|
.pct_change()
|
2352
2302
|
.quantile(1 - level, interpolation=interpolation)
|
2353
2303
|
)
|
2354
2304
|
|
2355
|
-
|
2356
|
-
return float(result.iloc[0])
|
2357
|
-
return Series(
|
2358
|
-
data=result,
|
2359
|
-
index=self.tsdf.columns,
|
2360
|
-
name=f"VaR {level:.1%}",
|
2361
|
-
dtype="float64",
|
2362
|
-
)
|
2305
|
+
return self._coerce_result(result=result, name=f"VaR {level:.1%}")
|
2363
2306
|
|
2364
2307
|
def worst_func(
|
2365
2308
|
self: Self,
|
@@ -2367,7 +2310,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2367
2310
|
months_from_last: int | None = None,
|
2368
2311
|
from_date: dt.date | None = None,
|
2369
2312
|
to_date: dt.date | None = None,
|
2370
|
-
) ->
|
2313
|
+
) -> Combo_co:
|
2371
2314
|
"""Most negative percentage change over a rolling number of observations.
|
2372
2315
|
|
2373
2316
|
Parameters
|
@@ -2384,7 +2327,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2384
2327
|
|
2385
2328
|
Returns:
|
2386
2329
|
-------
|
2387
|
-
|
2330
|
+
Combo_co
|
2388
2331
|
Most negative percentage change over a rolling number of observations
|
2389
2332
|
within a chosen date range
|
2390
2333
|
|
@@ -2395,7 +2338,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2395
2338
|
to_dt=to_date,
|
2396
2339
|
)
|
2397
2340
|
result = (
|
2398
|
-
self.tsdf.loc[cast("
|
2341
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2399
2342
|
.ffill()
|
2400
2343
|
.pct_change()
|
2401
2344
|
.rolling(observations, min_periods=observations)
|
@@ -2403,21 +2346,14 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2403
2346
|
.min()
|
2404
2347
|
)
|
2405
2348
|
|
2406
|
-
|
2407
|
-
return float(result.iloc[0])
|
2408
|
-
return Series(
|
2409
|
-
data=result,
|
2410
|
-
index=self.tsdf.columns,
|
2411
|
-
name="Worst",
|
2412
|
-
dtype="float64",
|
2413
|
-
)
|
2349
|
+
return self._coerce_result(result=result, name="Worst")
|
2414
2350
|
|
2415
2351
|
def z_score_func(
|
2416
2352
|
self: Self,
|
2417
2353
|
months_from_last: int | None = None,
|
2418
2354
|
from_date: dt.date | None = None,
|
2419
2355
|
to_date: dt.date | None = None,
|
2420
|
-
) ->
|
2356
|
+
) -> Combo_co:
|
2421
2357
|
"""Z-score as (last return - mean return) / standard deviation of returns.
|
2422
2358
|
|
2423
2359
|
https://www.investopedia.com/terms/z/zscore.asp.
|
@@ -2434,7 +2370,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2434
2370
|
|
2435
2371
|
Returns:
|
2436
2372
|
-------
|
2437
|
-
|
2373
|
+
Combo_co
|
2438
2374
|
Z-score as (last return - mean return) / standard deviation of returns
|
2439
2375
|
|
2440
2376
|
"""
|
@@ -2444,20 +2380,13 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2444
2380
|
to_dt=to_date,
|
2445
2381
|
)
|
2446
2382
|
zscframe = (
|
2447
|
-
self.tsdf.loc[cast("
|
2383
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2448
2384
|
.ffill()
|
2449
2385
|
.pct_change()
|
2450
2386
|
)
|
2451
2387
|
result = (zscframe.iloc[-1] - zscframe.mean()) / zscframe.std()
|
2452
2388
|
|
2453
|
-
|
2454
|
-
return float(result.iloc[0])
|
2455
|
-
return Series(
|
2456
|
-
data=result,
|
2457
|
-
index=self.tsdf.columns,
|
2458
|
-
name="Z-score",
|
2459
|
-
dtype="float64",
|
2460
|
-
)
|
2389
|
+
return self._coerce_result(result=result, name="Z-score")
|
2461
2390
|
|
2462
2391
|
def rolling_cvar_down(
|
2463
2392
|
self: Self,
|
@@ -2601,7 +2530,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2601
2530
|
|
2602
2531
|
s = log(self.tsdf.iloc[:, column]).diff()
|
2603
2532
|
volseries = s.rolling(window=observations, min_periods=observations).std(
|
2604
|
-
ddof=dlta_degr_freedms
|
2533
|
+
ddof=dlta_degr_freedms,
|
2605
2534
|
) * sqrt(time_factor)
|
2606
2535
|
|
2607
2536
|
voldf = volseries.dropna().to_frame()
|