openseries 1.9.4__py3-none-any.whl → 1.9.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openseries/_common_model.py +373 -451
- openseries/datefixer.py +8 -6
- openseries/frame.py +110 -93
- openseries/owntypes.py +48 -47
- openseries/plotly_layouts.json +1 -1
- openseries/portfoliotools.py +10 -12
- openseries/report.py +66 -61
- openseries/series.py +51 -37
- openseries/simulation.py +3 -3
- {openseries-1.9.4.dist-info → openseries-1.9.6.dist-info}/METADATA +4 -3
- openseries-1.9.6.dist-info/RECORD +17 -0
- {openseries-1.9.4.dist-info → openseries-1.9.6.dist-info}/WHEEL +1 -1
- openseries-1.9.4.dist-info/RECORD +0 -17
- {openseries-1.9.4.dist-info → openseries-1.9.6.dist-info/licenses}/LICENSE.md +0 -0
openseries/_common_model.py
CHANGED
@@ -7,7 +7,6 @@ https://github.com/CaptorAB/openseries/blob/master/LICENSE.md
|
|
7
7
|
SPDX-License-Identifier: BSD-3-Clause
|
8
8
|
"""
|
9
9
|
|
10
|
-
# mypy: disable-error-code="no-any-return"
|
11
10
|
from __future__ import annotations
|
12
11
|
|
13
12
|
import datetime as dt
|
@@ -17,22 +16,25 @@ from math import ceil
|
|
17
16
|
from pathlib import Path
|
18
17
|
from secrets import choice
|
19
18
|
from string import ascii_letters
|
20
|
-
from typing import TYPE_CHECKING, Any,
|
19
|
+
from typing import TYPE_CHECKING, Any, Generic, Literal, cast
|
21
20
|
|
22
|
-
from numpy import float64, inf, isnan, log, maximum, sqrt
|
21
|
+
from numpy import asarray, float64, inf, isnan, log, maximum, sqrt
|
23
22
|
|
24
23
|
from .owntypes import (
|
24
|
+
CaptorLogoType,
|
25
|
+
Combo_co,
|
25
26
|
DateAlignmentError,
|
26
27
|
InitialValueZeroError,
|
27
28
|
NumberOfItemsAndLabelsNotSameError,
|
29
|
+
PlotlyConfigType,
|
28
30
|
ResampleDataLossError,
|
29
31
|
Self,
|
30
32
|
ValueType,
|
31
33
|
)
|
32
34
|
|
33
35
|
if TYPE_CHECKING: # pragma: no cover
|
34
|
-
from numpy.typing import NDArray
|
35
36
|
from openpyxl.worksheet.worksheet import Worksheet
|
37
|
+
from pandas import Timestamp
|
36
38
|
|
37
39
|
from .owntypes import (
|
38
40
|
CountriesType,
|
@@ -67,7 +69,7 @@ from plotly.graph_objs import Figure # type: ignore[import-untyped]
|
|
67
69
|
from plotly.io import to_html # type: ignore[import-untyped]
|
68
70
|
from plotly.offline import plot # type: ignore[import-untyped]
|
69
71
|
from pydantic import BaseModel, ConfigDict, DirectoryPath, ValidationError
|
70
|
-
from scipy.stats import (
|
72
|
+
from scipy.stats import (
|
71
73
|
kurtosis,
|
72
74
|
norm,
|
73
75
|
skew,
|
@@ -86,7 +88,7 @@ from .load_plotly import load_plotly_dict
|
|
86
88
|
|
87
89
|
|
88
90
|
# noinspection PyTypeChecker
|
89
|
-
class _CommonModel(BaseModel
|
91
|
+
class _CommonModel(BaseModel, Generic[Combo_co]):
|
90
92
|
"""Declare _CommonModel."""
|
91
93
|
|
92
94
|
tsdf: DataFrame = DataFrame(dtype="float64")
|
@@ -97,6 +99,20 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
97
99
|
revalidate_instances="always",
|
98
100
|
)
|
99
101
|
|
102
|
+
def _coerce_result(self: Self, result: Series[float], name: str) -> Combo_co:
|
103
|
+
if self.tsdf.shape[1] == 1:
|
104
|
+
arr = float(asarray(a=result, dtype=float64).squeeze())
|
105
|
+
return cast("Combo_co", arr) # type: ignore[redundant-cast]
|
106
|
+
return cast(
|
107
|
+
"Combo_co",
|
108
|
+
Series(
|
109
|
+
data=result,
|
110
|
+
index=self.tsdf.columns,
|
111
|
+
name=name,
|
112
|
+
dtype="float64",
|
113
|
+
),
|
114
|
+
)
|
115
|
+
|
100
116
|
@property
|
101
117
|
def length(self: Self) -> int:
|
102
118
|
"""Number of observations.
|
@@ -171,17 +187,17 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
171
187
|
return self.length / self.yearfrac
|
172
188
|
|
173
189
|
@property
|
174
|
-
def max_drawdown_cal_year(self: Self) ->
|
190
|
+
def max_drawdown_cal_year(self: Self) -> Combo_co:
|
175
191
|
"""https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp.
|
176
192
|
|
177
193
|
Returns:
|
178
194
|
-------
|
179
|
-
|
195
|
+
Combo_co
|
180
196
|
Maximum drawdown in a single calendar year.
|
181
197
|
|
182
198
|
"""
|
183
199
|
years = Index(d.year for d in self.tsdf.index)
|
184
|
-
|
200
|
+
result = (
|
185
201
|
self.tsdf.groupby(years)
|
186
202
|
.apply(
|
187
203
|
lambda prices: (prices / prices.expanding(min_periods=1).max()).min()
|
@@ -189,53 +205,46 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
189
205
|
)
|
190
206
|
.min()
|
191
207
|
)
|
192
|
-
|
193
|
-
return float(mddc.iloc[0])
|
194
|
-
return Series(
|
195
|
-
data=mddc,
|
196
|
-
index=self.tsdf.columns,
|
197
|
-
name="Max drawdown in cal yr",
|
198
|
-
dtype="float64",
|
199
|
-
)
|
208
|
+
return self._coerce_result(result=result, name="Max drawdown in cal yr")
|
200
209
|
|
201
210
|
@property
|
202
|
-
def geo_ret(self: Self) ->
|
211
|
+
def geo_ret(self: Self) -> Combo_co:
|
203
212
|
"""https://www.investopedia.com/terms/c/cagr.asp.
|
204
213
|
|
205
214
|
Returns:
|
206
215
|
-------
|
207
|
-
|
216
|
+
Combo_co
|
208
217
|
Compounded Annual Growth Rate (CAGR)
|
209
218
|
|
210
219
|
"""
|
211
220
|
return self.geo_ret_func()
|
212
221
|
|
213
222
|
@property
|
214
|
-
def arithmetic_ret(self: Self) ->
|
223
|
+
def arithmetic_ret(self: Self) -> Combo_co:
|
215
224
|
"""https://www.investopedia.com/terms/a/arithmeticmean.asp.
|
216
225
|
|
217
226
|
Returns:
|
218
227
|
-------
|
219
|
-
|
228
|
+
Combo_co
|
220
229
|
Annualized arithmetic mean of returns
|
221
230
|
|
222
231
|
"""
|
223
232
|
return self.arithmetic_ret_func()
|
224
233
|
|
225
234
|
@property
|
226
|
-
def value_ret(self: Self) ->
|
235
|
+
def value_ret(self: Self) -> Combo_co:
|
227
236
|
"""Simple return.
|
228
237
|
|
229
238
|
Returns:
|
230
239
|
-------
|
231
|
-
|
240
|
+
Combo_co
|
232
241
|
Simple return
|
233
242
|
|
234
243
|
"""
|
235
244
|
return self.value_ret_func()
|
236
245
|
|
237
246
|
@property
|
238
|
-
def vol(self: Self) ->
|
247
|
+
def vol(self: Self) -> Combo_co:
|
239
248
|
"""Annualized volatility.
|
240
249
|
|
241
250
|
Based on Pandas .std() which is the equivalent of stdev.s([...]) in MS Excel.
|
@@ -243,14 +252,14 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
243
252
|
|
244
253
|
Returns:
|
245
254
|
-------
|
246
|
-
|
255
|
+
Combo_co
|
247
256
|
Annualized volatility
|
248
257
|
|
249
258
|
"""
|
250
259
|
return self.vol_func()
|
251
260
|
|
252
261
|
@property
|
253
|
-
def downside_deviation(self: Self) ->
|
262
|
+
def downside_deviation(self: Self) -> Combo_co:
|
254
263
|
"""Downside Deviation.
|
255
264
|
|
256
265
|
Standard deviation of returns that are below a Minimum Accepted Return
|
@@ -259,23 +268,24 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
259
268
|
|
260
269
|
Returns:
|
261
270
|
-------
|
262
|
-
|
271
|
+
Combo_co
|
263
272
|
Downside deviation
|
264
273
|
|
265
274
|
"""
|
266
275
|
min_accepted_return: float = 0.0
|
267
276
|
order: Literal[2, 3] = 2
|
268
277
|
return self.lower_partial_moment_func(
|
269
|
-
min_accepted_return=min_accepted_return,
|
278
|
+
min_accepted_return=min_accepted_return,
|
279
|
+
order=order,
|
270
280
|
)
|
271
281
|
|
272
282
|
@property
|
273
|
-
def ret_vol_ratio(self: Self) ->
|
283
|
+
def ret_vol_ratio(self: Self) -> Combo_co:
|
274
284
|
"""Ratio of annualized arithmetic mean of returns and annualized volatility.
|
275
285
|
|
276
286
|
Returns:
|
277
287
|
-------
|
278
|
-
|
288
|
+
Combo_co
|
279
289
|
Ratio of the annualized arithmetic mean of returns and annualized
|
280
290
|
volatility.
|
281
291
|
|
@@ -284,12 +294,12 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
284
294
|
return self.ret_vol_ratio_func(riskfree_rate=riskfree_rate)
|
285
295
|
|
286
296
|
@property
|
287
|
-
def sortino_ratio(self: Self) ->
|
297
|
+
def sortino_ratio(self: Self) -> Combo_co:
|
288
298
|
"""https://www.investopedia.com/terms/s/sortinoratio.asp.
|
289
299
|
|
290
300
|
Returns:
|
291
301
|
-------
|
292
|
-
|
302
|
+
Combo_co
|
293
303
|
Sortino ratio calculated as the annualized arithmetic mean of returns
|
294
304
|
/ downside deviation. The ratio implies that the riskfree asset has zero
|
295
305
|
volatility, and a minimum acceptable return of zero.
|
@@ -303,7 +313,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
303
313
|
)
|
304
314
|
|
305
315
|
@property
|
306
|
-
def kappa3_ratio(self: Self) ->
|
316
|
+
def kappa3_ratio(self: Self) -> Combo_co:
|
307
317
|
"""Kappa-3 ratio.
|
308
318
|
|
309
319
|
The Kappa-3 ratio is a generalized downside-risk ratio defined as
|
@@ -314,7 +324,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
314
324
|
|
315
325
|
Returns:
|
316
326
|
-------
|
317
|
-
|
327
|
+
Combo_co
|
318
328
|
Kappa-3 ratio calculation with the riskfree rate and
|
319
329
|
Minimum Acceptable Return (MAR) both set to zero.
|
320
330
|
|
@@ -329,12 +339,12 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
329
339
|
)
|
330
340
|
|
331
341
|
@property
|
332
|
-
def omega_ratio(self: Self) ->
|
342
|
+
def omega_ratio(self: Self) -> Combo_co:
|
333
343
|
"""https://en.wikipedia.org/wiki/Omega_ratio.
|
334
344
|
|
335
345
|
Returns:
|
336
346
|
-------
|
337
|
-
|
347
|
+
Combo_co
|
338
348
|
Omega ratio calculation
|
339
349
|
|
340
350
|
"""
|
@@ -342,24 +352,24 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
342
352
|
return self.omega_ratio_func(min_accepted_return=minimum_accepted_return)
|
343
353
|
|
344
354
|
@property
|
345
|
-
def z_score(self: Self) ->
|
355
|
+
def z_score(self: Self) -> Combo_co:
|
346
356
|
"""https://www.investopedia.com/terms/z/zscore.asp.
|
347
357
|
|
348
358
|
Returns:
|
349
359
|
-------
|
350
|
-
|
360
|
+
Combo_co
|
351
361
|
Z-score as (last return - mean return) / standard deviation of returns.
|
352
362
|
|
353
363
|
"""
|
354
364
|
return self.z_score_func()
|
355
365
|
|
356
366
|
@property
|
357
|
-
def max_drawdown(self: Self) ->
|
367
|
+
def max_drawdown(self: Self) -> Combo_co:
|
358
368
|
"""https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp.
|
359
369
|
|
360
370
|
Returns:
|
361
371
|
-------
|
362
|
-
|
372
|
+
Combo_co
|
363
373
|
Maximum drawdown without any limit on date range
|
364
374
|
|
365
375
|
"""
|
@@ -391,12 +401,12 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
391
401
|
).dt.date
|
392
402
|
|
393
403
|
@property
|
394
|
-
def worst(self: Self) ->
|
404
|
+
def worst(self: Self) -> Combo_co:
|
395
405
|
"""Most negative percentage change.
|
396
406
|
|
397
407
|
Returns:
|
398
408
|
-------
|
399
|
-
|
409
|
+
Combo_co
|
400
410
|
Most negative percentage change
|
401
411
|
|
402
412
|
"""
|
@@ -404,7 +414,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
404
414
|
return self.worst_func(observations=observations)
|
405
415
|
|
406
416
|
@property
|
407
|
-
def worst_month(self: Self) ->
|
417
|
+
def worst_month(self: Self) -> Combo_co:
|
408
418
|
"""Most negative month.
|
409
419
|
|
410
420
|
Returns:
|
@@ -419,8 +429,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
419
429
|
countries = self.countries
|
420
430
|
markets = self.markets
|
421
431
|
except AttributeError:
|
422
|
-
countries = self.constituents[0].countries
|
423
|
-
markets = self.constituents[0].markets
|
432
|
+
countries = self.constituents[0].countries # type: ignore[attr-defined]
|
433
|
+
markets = self.constituents[0].markets # type: ignore[attr-defined]
|
424
434
|
|
425
435
|
wmdf = self.tsdf.copy()
|
426
436
|
|
@@ -445,58 +455,51 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
445
455
|
|
446
456
|
result = wmdf.ffill().pct_change().min()
|
447
457
|
|
448
|
-
|
449
|
-
return float(result.iloc[0])
|
450
|
-
return Series(
|
451
|
-
data=result,
|
452
|
-
index=self.tsdf.columns,
|
453
|
-
name="Worst month",
|
454
|
-
dtype="float64",
|
455
|
-
)
|
458
|
+
return self._coerce_result(result=result, name="Worst month")
|
456
459
|
|
457
460
|
@property
|
458
|
-
def positive_share(self: Self) ->
|
461
|
+
def positive_share(self: Self) -> Combo_co:
|
459
462
|
"""The share of percentage changes that are greater than zero.
|
460
463
|
|
461
464
|
Returns:
|
462
465
|
-------
|
463
|
-
|
466
|
+
Combo_co
|
464
467
|
The share of percentage changes that are greater than zero
|
465
468
|
|
466
469
|
"""
|
467
470
|
return self.positive_share_func()
|
468
471
|
|
469
472
|
@property
|
470
|
-
def skew(self: Self) ->
|
473
|
+
def skew(self: Self) -> Combo_co:
|
471
474
|
"""https://www.investopedia.com/terms/s/skewness.asp.
|
472
475
|
|
473
476
|
Returns:
|
474
477
|
-------
|
475
|
-
|
478
|
+
Combo_co
|
476
479
|
Skew of the return distribution
|
477
480
|
|
478
481
|
"""
|
479
482
|
return self.skew_func()
|
480
483
|
|
481
484
|
@property
|
482
|
-
def kurtosis(self: Self) ->
|
485
|
+
def kurtosis(self: Self) -> Combo_co:
|
483
486
|
"""https://www.investopedia.com/terms/k/kurtosis.asp.
|
484
487
|
|
485
488
|
Returns:
|
486
489
|
-------
|
487
|
-
|
490
|
+
Combo_co
|
488
491
|
Kurtosis of the return distribution
|
489
492
|
|
490
493
|
"""
|
491
494
|
return self.kurtosis_func()
|
492
495
|
|
493
496
|
@property
|
494
|
-
def cvar_down(self: Self) ->
|
497
|
+
def cvar_down(self: Self) -> Combo_co:
|
495
498
|
"""https://www.investopedia.com/terms/c/conditional_value_at_risk.asp.
|
496
499
|
|
497
500
|
Returns:
|
498
501
|
-------
|
499
|
-
|
502
|
+
Combo_co
|
500
503
|
Downside 95% Conditional Value At Risk "CVaR"
|
501
504
|
|
502
505
|
"""
|
@@ -504,7 +507,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
504
507
|
return self.cvar_down_func(level=level)
|
505
508
|
|
506
509
|
@property
|
507
|
-
def var_down(self: Self) ->
|
510
|
+
def var_down(self: Self) -> Combo_co:
|
508
511
|
"""Downside 95% Value At Risk (VaR).
|
509
512
|
|
510
513
|
The equivalent of percentile.inc([...], 1-level) over returns in MS Excel.
|
@@ -512,7 +515,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
512
515
|
|
513
516
|
Returns:
|
514
517
|
-------
|
515
|
-
|
518
|
+
Combo_co
|
516
519
|
Downside 95% Value At Risk (VaR)
|
517
520
|
|
518
521
|
"""
|
@@ -521,14 +524,14 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
521
524
|
return self.var_down_func(level=level, interpolation=interpolation)
|
522
525
|
|
523
526
|
@property
|
524
|
-
def vol_from_var(self: Self) ->
|
527
|
+
def vol_from_var(self: Self) -> Combo_co:
|
525
528
|
"""Implied annualized volatility from Downside 95% Value at Risk.
|
526
529
|
|
527
530
|
Assumes that returns are normally distributed.
|
528
531
|
|
529
532
|
Returns:
|
530
533
|
-------
|
531
|
-
|
534
|
+
Combo_co
|
532
535
|
Implied annualized volatility from the Downside 95% VaR using the
|
533
536
|
assumption that returns are normally distributed.
|
534
537
|
|
@@ -626,25 +629,25 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
626
629
|
try:
|
627
630
|
self.countries = countries
|
628
631
|
except ValidationError:
|
629
|
-
for serie in self.constituents:
|
632
|
+
for serie in self.constituents: # type: ignore[attr-defined]
|
630
633
|
serie.countries = countries
|
631
634
|
else:
|
632
635
|
try:
|
633
636
|
countries = self.countries
|
634
637
|
except AttributeError:
|
635
|
-
countries = self.constituents[0].countries
|
638
|
+
countries = self.constituents[0].countries # type: ignore[attr-defined]
|
636
639
|
|
637
640
|
if markets:
|
638
641
|
try:
|
639
642
|
self.markets = markets
|
640
643
|
except ValidationError:
|
641
|
-
for serie in self.constituents:
|
644
|
+
for serie in self.constituents: # type: ignore[attr-defined]
|
642
645
|
serie.markets = markets
|
643
646
|
else:
|
644
647
|
try:
|
645
648
|
markets = self.markets
|
646
649
|
except AttributeError:
|
647
|
-
markets = self.constituents[0].markets
|
650
|
+
markets = self.constituents[0].markets # type: ignore[attr-defined]
|
648
651
|
|
649
652
|
calendar = holiday_calendar(
|
650
653
|
startyear=startyear,
|
@@ -659,7 +662,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
659
662
|
for d in date_range(
|
660
663
|
start=cast("dt.date", self.tsdf.first_valid_index()),
|
661
664
|
end=cast("dt.date", self.tsdf.last_valid_index()),
|
662
|
-
freq=CustomBusinessDay(calendar=calendar)
|
665
|
+
freq=CustomBusinessDay(calendar=calendar)
|
666
|
+
if any([countries, markets, custom_holidays])
|
667
|
+
else None,
|
663
668
|
)
|
664
669
|
]
|
665
670
|
self.tsdf = self.tsdf.reindex(labels=d_range, method=method, copy=False)
|
@@ -860,9 +865,103 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
860
865
|
|
861
866
|
return str(sheetfile)
|
862
867
|
|
868
|
+
@staticmethod
|
869
|
+
def _ensure_labels(
|
870
|
+
ncols: int,
|
871
|
+
labels: list[str] | None,
|
872
|
+
default_labels: list[str],
|
873
|
+
) -> list[str]:
|
874
|
+
"""Validate or infer labels for plotting."""
|
875
|
+
if labels:
|
876
|
+
if len(labels) != ncols:
|
877
|
+
msg = "Must provide same number of labels as items in frame."
|
878
|
+
raise NumberOfItemsAndLabelsNotSameError(msg)
|
879
|
+
return labels
|
880
|
+
return default_labels
|
881
|
+
|
882
|
+
@staticmethod
|
883
|
+
def _resolve_dir(directory: DirectoryPath | None) -> Path:
|
884
|
+
"""Resolve output directory for plot files."""
|
885
|
+
if directory:
|
886
|
+
return Path(directory).resolve()
|
887
|
+
if (Path.home() / "Documents").exists():
|
888
|
+
return Path.home() / "Documents"
|
889
|
+
return Path(stack()[2].filename).parent
|
890
|
+
|
891
|
+
@staticmethod
|
892
|
+
def _hover_xy(tick_fmt: str | None) -> str:
|
893
|
+
"""Create hovertemplate for y-value and date x-axis."""
|
894
|
+
return (
|
895
|
+
f"%{{y:{tick_fmt}}}<br>%{{x|{'%Y-%m-%d'}}}"
|
896
|
+
if tick_fmt
|
897
|
+
else "%{y}<br>%{x|%Y-%m-%d}"
|
898
|
+
)
|
899
|
+
|
900
|
+
@staticmethod
|
901
|
+
def _hover_hist(x_fmt: str | None, y_fmt: str | None) -> str:
|
902
|
+
"""Create hovertemplate for histogram plots."""
|
903
|
+
y = f"%{{y:{y_fmt}}}" if y_fmt else "%{y}"
|
904
|
+
x = f"%{{x:{x_fmt}}}" if x_fmt else "%{x}"
|
905
|
+
return f"Count: {y}<br>{x}"
|
906
|
+
|
907
|
+
@staticmethod
|
908
|
+
def _apply_title_logo(
|
909
|
+
figure: Figure,
|
910
|
+
logo: CaptorLogoType,
|
911
|
+
title: str | None,
|
912
|
+
*,
|
913
|
+
add_logo: bool,
|
914
|
+
) -> None:
|
915
|
+
"""Apply optional title and logo to a Plotly Figure."""
|
916
|
+
if add_logo:
|
917
|
+
figure.add_layout_image(logo)
|
918
|
+
if title:
|
919
|
+
figure.update_layout(
|
920
|
+
{"title": {"text": f"<b>{title}</b><br>", "font": {"size": 36}}},
|
921
|
+
)
|
922
|
+
|
923
|
+
@staticmethod
|
924
|
+
def _emit_output(
|
925
|
+
figure: Figure,
|
926
|
+
fig_config: PlotlyConfigType,
|
927
|
+
output_type: LiteralPlotlyOutput,
|
928
|
+
plotfile: Path,
|
929
|
+
filename: str,
|
930
|
+
*,
|
931
|
+
include_plotlyjs_bool: bool,
|
932
|
+
auto_open: bool,
|
933
|
+
) -> str:
|
934
|
+
"""Write a file or return inline HTML string from a Plotly Figure."""
|
935
|
+
if output_type == "file":
|
936
|
+
plot(
|
937
|
+
figure_or_data=figure,
|
938
|
+
filename=str(plotfile),
|
939
|
+
auto_open=auto_open,
|
940
|
+
auto_play=False,
|
941
|
+
link_text="",
|
942
|
+
include_plotlyjs=include_plotlyjs_bool,
|
943
|
+
config=fig_config,
|
944
|
+
output_type=output_type,
|
945
|
+
)
|
946
|
+
return str(plotfile)
|
947
|
+
|
948
|
+
div_id = filename.rsplit(".", 1)[0]
|
949
|
+
return cast(
|
950
|
+
"str",
|
951
|
+
to_html(
|
952
|
+
fig=figure,
|
953
|
+
config=fig_config,
|
954
|
+
auto_play=False,
|
955
|
+
include_plotlyjs=include_plotlyjs_bool,
|
956
|
+
full_html=False,
|
957
|
+
div_id=div_id,
|
958
|
+
),
|
959
|
+
)
|
960
|
+
|
863
961
|
def plot_bars(
|
864
962
|
self: Self,
|
865
963
|
mode: LiteralBarPlotMode = "group",
|
964
|
+
title: str | None = None,
|
866
965
|
tick_fmt: str | None = None,
|
867
966
|
filename: str | None = None,
|
868
967
|
directory: DirectoryPath | None = None,
|
@@ -881,6 +980,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
881
980
|
The timeseries self.tsdf
|
882
981
|
mode: LiteralBarPlotMode
|
883
982
|
The type of bar to use
|
983
|
+
title: str, optional
|
984
|
+
A title above the plot
|
884
985
|
tick_fmt: str, optional
|
885
986
|
None, '%', '.1%' depending on number of decimals to show
|
886
987
|
filename: str, optional
|
@@ -905,34 +1006,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
905
1006
|
Plotly Figure and a div section or a html filename with location
|
906
1007
|
|
907
1008
|
"""
|
908
|
-
|
909
|
-
|
910
|
-
|
911
|
-
|
912
|
-
|
913
|
-
labels = list(self.tsdf.columns.get_level_values(0))
|
914
|
-
|
915
|
-
if directory:
|
916
|
-
dirpath = Path(directory).resolve()
|
917
|
-
elif Path.home().joinpath("Documents").exists():
|
918
|
-
dirpath = Path.home().joinpath("Documents")
|
919
|
-
else:
|
920
|
-
dirpath = Path(stack()[1].filename).parent
|
1009
|
+
labels = self._ensure_labels(
|
1010
|
+
ncols=self.tsdf.shape[1],
|
1011
|
+
labels=labels,
|
1012
|
+
default_labels=list(self.tsdf.columns.get_level_values(0)),
|
1013
|
+
)
|
921
1014
|
|
1015
|
+
dirpath = self._resolve_dir(directory=directory)
|
922
1016
|
if not filename:
|
923
|
-
filename = "
|
924
|
-
plotfile = dirpath
|
1017
|
+
filename = f"{''.join(choice(ascii_letters) for _ in range(6))}.html"
|
1018
|
+
plotfile = dirpath / filename
|
925
1019
|
|
926
1020
|
fig, logo = load_plotly_dict()
|
927
1021
|
figure = Figure(fig)
|
928
1022
|
|
929
1023
|
opacity = 0.7 if mode == "overlay" else None
|
930
|
-
|
931
|
-
hovertemplate = (
|
932
|
-
f"%{{y:{tick_fmt}}}<br>%{{x|{'%Y-%m-%d'}}}"
|
933
|
-
if tick_fmt
|
934
|
-
else "%{y}<br>%{x|%Y-%m-%d}"
|
935
|
-
)
|
1024
|
+
hovertemplate = self._hover_xy(tick_fmt=tick_fmt)
|
936
1025
|
|
937
1026
|
for item in range(self.tsdf.shape[1]):
|
938
1027
|
figure.add_bar(
|
@@ -944,37 +1033,29 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
944
1033
|
)
|
945
1034
|
figure.update_layout(barmode=mode, yaxis={"tickformat": tick_fmt})
|
946
1035
|
|
947
|
-
|
948
|
-
figure
|
1036
|
+
self._apply_title_logo(
|
1037
|
+
figure=figure,
|
1038
|
+
title=title,
|
1039
|
+
add_logo=add_logo,
|
1040
|
+
logo=logo,
|
1041
|
+
)
|
949
1042
|
|
950
|
-
|
951
|
-
|
952
|
-
|
953
|
-
|
954
|
-
|
955
|
-
|
956
|
-
|
957
|
-
|
958
|
-
|
959
|
-
output_type=output_type,
|
960
|
-
)
|
961
|
-
string_output = str(plotfile)
|
962
|
-
else:
|
963
|
-
div_id = filename.split(sep=".")[0]
|
964
|
-
string_output = to_html(
|
965
|
-
fig=figure,
|
966
|
-
config=fig["config"],
|
967
|
-
auto_play=False,
|
968
|
-
include_plotlyjs=cast("bool", include_plotlyjs),
|
969
|
-
full_html=False,
|
970
|
-
div_id=div_id,
|
971
|
-
)
|
1043
|
+
string_output = self._emit_output(
|
1044
|
+
figure=figure,
|
1045
|
+
fig_config=fig["config"],
|
1046
|
+
include_plotlyjs_bool=cast("bool", include_plotlyjs),
|
1047
|
+
output_type=output_type,
|
1048
|
+
auto_open=auto_open,
|
1049
|
+
plotfile=plotfile,
|
1050
|
+
filename=filename,
|
1051
|
+
)
|
972
1052
|
|
973
1053
|
return figure, string_output
|
974
1054
|
|
975
1055
|
def plot_series(
|
976
1056
|
self: Self,
|
977
1057
|
mode: LiteralLinePlotMode = "lines",
|
1058
|
+
title: str | None = None,
|
978
1059
|
tick_fmt: str | None = None,
|
979
1060
|
filename: str | None = None,
|
980
1061
|
directory: DirectoryPath | None = None,
|
@@ -994,6 +1075,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
994
1075
|
The timeseries self.tsdf
|
995
1076
|
mode: LiteralLinePlotMode, default: "lines"
|
996
1077
|
The type of scatter to use
|
1078
|
+
title: str, optional
|
1079
|
+
A title above the plot
|
997
1080
|
tick_fmt: str, optional
|
998
1081
|
None, '%', '.1%' depending on number of decimals to show
|
999
1082
|
filename: str, optional
|
@@ -1020,32 +1103,21 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1020
1103
|
Plotly Figure and a div section or a html filename with location
|
1021
1104
|
|
1022
1105
|
"""
|
1023
|
-
|
1024
|
-
|
1025
|
-
|
1026
|
-
|
1027
|
-
|
1028
|
-
labels = list(self.tsdf.columns.get_level_values(0))
|
1029
|
-
|
1030
|
-
if directory:
|
1031
|
-
dirpath = Path(directory).resolve()
|
1032
|
-
elif Path.home().joinpath("Documents").exists():
|
1033
|
-
dirpath = Path.home().joinpath("Documents")
|
1034
|
-
else:
|
1035
|
-
dirpath = Path(stack()[1].filename).parent
|
1106
|
+
labels = self._ensure_labels(
|
1107
|
+
ncols=self.tsdf.shape[1],
|
1108
|
+
labels=labels,
|
1109
|
+
default_labels=list(self.tsdf.columns.get_level_values(0)),
|
1110
|
+
)
|
1036
1111
|
|
1112
|
+
dirpath = self._resolve_dir(directory=directory)
|
1037
1113
|
if not filename:
|
1038
|
-
filename = "
|
1039
|
-
plotfile = dirpath
|
1114
|
+
filename = f"{''.join(choice(ascii_letters) for _ in range(6))}.html"
|
1115
|
+
plotfile = dirpath / filename
|
1040
1116
|
|
1041
1117
|
fig, logo = load_plotly_dict()
|
1042
1118
|
figure = Figure(fig)
|
1043
1119
|
|
1044
|
-
hovertemplate = (
|
1045
|
-
f"%{{y:{tick_fmt}}}<br>%{{x|{'%Y-%m-%d'}}}"
|
1046
|
-
if tick_fmt
|
1047
|
-
else "%{y}<br>%{x|%Y-%m-%d}"
|
1048
|
-
)
|
1120
|
+
hovertemplate = self._hover_xy(tick_fmt=tick_fmt)
|
1049
1121
|
|
1050
1122
|
for item in range(self.tsdf.shape[1]):
|
1051
1123
|
figure.add_scatter(
|
@@ -1060,7 +1132,6 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1060
1132
|
|
1061
1133
|
if show_last:
|
1062
1134
|
txt = f"Last {{:{tick_fmt}}}" if tick_fmt else "Last {}"
|
1063
|
-
|
1064
1135
|
for item in range(self.tsdf.shape[1]):
|
1065
1136
|
figure.add_scatter(
|
1066
1137
|
x=[Series(self.tsdf.iloc[:, item]).index[-1]],
|
@@ -1074,31 +1145,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1074
1145
|
textposition="top center",
|
1075
1146
|
)
|
1076
1147
|
|
1077
|
-
|
1078
|
-
figure
|
1148
|
+
self._apply_title_logo(
|
1149
|
+
figure=figure,
|
1150
|
+
title=title,
|
1151
|
+
add_logo=add_logo,
|
1152
|
+
logo=logo,
|
1153
|
+
)
|
1079
1154
|
|
1080
|
-
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
|
1089
|
-
output_type=output_type,
|
1090
|
-
)
|
1091
|
-
string_output = str(plotfile)
|
1092
|
-
else:
|
1093
|
-
div_id = filename.split(sep=".")[0]
|
1094
|
-
string_output = to_html(
|
1095
|
-
fig=figure,
|
1096
|
-
config=fig["config"],
|
1097
|
-
auto_play=False,
|
1098
|
-
include_plotlyjs=cast("bool", include_plotlyjs),
|
1099
|
-
full_html=False,
|
1100
|
-
div_id=div_id,
|
1101
|
-
)
|
1155
|
+
string_output = self._emit_output(
|
1156
|
+
figure=figure,
|
1157
|
+
fig_config=fig["config"],
|
1158
|
+
include_plotlyjs_bool=cast("bool", include_plotlyjs),
|
1159
|
+
output_type=output_type,
|
1160
|
+
auto_open=auto_open,
|
1161
|
+
plotfile=plotfile,
|
1162
|
+
filename=filename,
|
1163
|
+
)
|
1102
1164
|
|
1103
1165
|
return figure, string_output
|
1104
1166
|
|
@@ -1112,6 +1174,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1112
1174
|
bargap: float = 0.0,
|
1113
1175
|
bargroupgap: float = 0.0,
|
1114
1176
|
curve_type: LiteralPlotlyHistogramCurveType = "kde",
|
1177
|
+
title: str | None = None,
|
1115
1178
|
x_fmt: str | None = None,
|
1116
1179
|
y_fmt: str | None = None,
|
1117
1180
|
filename: str | None = None,
|
@@ -1145,6 +1208,8 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1145
1208
|
Sets the gap between bar “groups” at the same location coordinate
|
1146
1209
|
curve_type: LiteralPlotlyHistogramCurveType, default: kde
|
1147
1210
|
Specifies the type of distribution curve to overlay on the histogram
|
1211
|
+
title: str, optional
|
1212
|
+
A title above the plot
|
1148
1213
|
y_fmt: str, optional
|
1149
1214
|
None, '%', '.1%' depending on number of decimals to show on the y-axis
|
1150
1215
|
x_fmt: str, optional
|
@@ -1175,32 +1240,19 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1175
1240
|
Plotly Figure and a div section or a html filename with location
|
1176
1241
|
|
1177
1242
|
"""
|
1178
|
-
|
1179
|
-
|
1180
|
-
|
1181
|
-
|
1182
|
-
|
1183
|
-
labels = list(self.tsdf.columns.get_level_values(0))
|
1184
|
-
|
1185
|
-
if directory:
|
1186
|
-
dirpath = Path(directory).resolve()
|
1187
|
-
elif Path.home().joinpath("Documents").exists():
|
1188
|
-
dirpath = Path.home().joinpath("Documents")
|
1189
|
-
else:
|
1190
|
-
dirpath = Path(stack()[1].filename).parent
|
1243
|
+
labels = self._ensure_labels(
|
1244
|
+
ncols=self.tsdf.shape[1],
|
1245
|
+
labels=labels,
|
1246
|
+
default_labels=list(self.tsdf.columns.get_level_values(0)),
|
1247
|
+
)
|
1191
1248
|
|
1249
|
+
dirpath = self._resolve_dir(directory=directory)
|
1192
1250
|
if not filename:
|
1193
|
-
filename = "
|
1194
|
-
plotfile = dirpath
|
1251
|
+
filename = f"{''.join(choice(ascii_letters) for _ in range(6))}.html"
|
1252
|
+
plotfile = dirpath / filename
|
1195
1253
|
|
1196
1254
|
fig_dict, logo = load_plotly_dict()
|
1197
|
-
|
1198
|
-
hovertemplate = f"Count: %{{y:{y_fmt}}}" if y_fmt else "Count: %{y}"
|
1199
|
-
|
1200
|
-
if x_fmt:
|
1201
|
-
hovertemplate += f"<br>%{{x:{x_fmt}}}"
|
1202
|
-
else:
|
1203
|
-
hovertemplate += "<br>%{x}"
|
1255
|
+
hovertemplate = self._hover_hist(x_fmt=x_fmt, y_fmt=y_fmt)
|
1204
1256
|
|
1205
1257
|
msg = "plot_type must be 'bars' or 'lines'."
|
1206
1258
|
if plot_type == "bars":
|
@@ -1222,10 +1274,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1222
1274
|
bargroupgap=bargroupgap,
|
1223
1275
|
)
|
1224
1276
|
elif plot_type == "lines":
|
1225
|
-
hist_data = [
|
1226
|
-
cast("Series[float]", self.tsdf.loc[:, ds]).dropna().tolist()
|
1227
|
-
for ds in self.tsdf
|
1228
|
-
]
|
1277
|
+
hist_data = [self.tsdf[col] for col in self.tsdf.columns]
|
1229
1278
|
figure = create_distplot(
|
1230
1279
|
hist_data=hist_data,
|
1231
1280
|
curve_type=curve_type,
|
@@ -1239,35 +1288,25 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1239
1288
|
raise TypeError(msg)
|
1240
1289
|
|
1241
1290
|
figure.update_layout(xaxis={"tickformat": x_fmt}, yaxis={"tickformat": y_fmt})
|
1242
|
-
|
1243
1291
|
figure.update_xaxes(zeroline=True, zerolinewidth=2, zerolinecolor="lightgrey")
|
1244
1292
|
figure.update_yaxes(zeroline=True, zerolinewidth=2, zerolinecolor="lightgrey")
|
1245
1293
|
|
1246
|
-
|
1247
|
-
figure
|
1294
|
+
self._apply_title_logo(
|
1295
|
+
figure=figure,
|
1296
|
+
title=title,
|
1297
|
+
add_logo=add_logo,
|
1298
|
+
logo=logo,
|
1299
|
+
)
|
1248
1300
|
|
1249
|
-
|
1250
|
-
|
1251
|
-
|
1252
|
-
|
1253
|
-
|
1254
|
-
|
1255
|
-
|
1256
|
-
|
1257
|
-
|
1258
|
-
output_type=output_type,
|
1259
|
-
)
|
1260
|
-
string_output = str(plotfile)
|
1261
|
-
else:
|
1262
|
-
div_id = filename.rsplit(".", 1)[0]
|
1263
|
-
string_output = to_html(
|
1264
|
-
fig=figure,
|
1265
|
-
config=fig_dict["config"],
|
1266
|
-
auto_play=False,
|
1267
|
-
include_plotlyjs=cast("bool", include_plotlyjs),
|
1268
|
-
full_html=False,
|
1269
|
-
div_id=div_id,
|
1270
|
-
)
|
1301
|
+
string_output = self._emit_output(
|
1302
|
+
figure=figure,
|
1303
|
+
fig_config=fig_dict["config"],
|
1304
|
+
include_plotlyjs_bool=cast("bool", include_plotlyjs),
|
1305
|
+
output_type=output_type,
|
1306
|
+
auto_open=auto_open,
|
1307
|
+
plotfile=plotfile,
|
1308
|
+
filename=filename,
|
1309
|
+
)
|
1271
1310
|
|
1272
1311
|
return figure, string_output
|
1273
1312
|
|
@@ -1277,7 +1316,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1277
1316
|
from_date: dt.date | None = None,
|
1278
1317
|
to_date: dt.date | None = None,
|
1279
1318
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1280
|
-
) ->
|
1319
|
+
) -> Combo_co:
|
1281
1320
|
"""https://www.investopedia.com/terms/a/arithmeticmean.asp.
|
1282
1321
|
|
1283
1322
|
Parameters
|
@@ -1295,7 +1334,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1295
1334
|
|
1296
1335
|
Returns:
|
1297
1336
|
-------
|
1298
|
-
|
1337
|
+
Combo_co
|
1299
1338
|
Annualized arithmetic mean of returns
|
1300
1339
|
|
1301
1340
|
"""
|
@@ -1307,29 +1346,23 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1307
1346
|
if periods_in_a_year_fixed:
|
1308
1347
|
time_factor = float(periods_in_a_year_fixed)
|
1309
1348
|
else:
|
1349
|
+
how_many = (
|
1350
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1351
|
+
.count()
|
1352
|
+
.iloc[0]
|
1353
|
+
)
|
1310
1354
|
fraction = (later - earlier).days / 365.25
|
1311
|
-
|
1312
|
-
cast("int", earlier) : cast("int", later),
|
1313
|
-
self.tsdf.columns.to_numpy()[0],
|
1314
|
-
].count()
|
1315
|
-
time_factor = cast("int", how_many) / fraction
|
1355
|
+
time_factor = how_many / fraction
|
1316
1356
|
|
1317
1357
|
result = (
|
1318
|
-
self.tsdf.loc[cast("
|
1358
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1319
1359
|
.ffill()
|
1320
1360
|
.pct_change()
|
1321
1361
|
.mean()
|
1322
1362
|
* time_factor
|
1323
1363
|
)
|
1324
1364
|
|
1325
|
-
|
1326
|
-
return float(result.iloc[0])
|
1327
|
-
return Series(
|
1328
|
-
data=result,
|
1329
|
-
index=self.tsdf.columns,
|
1330
|
-
name="Arithmetic return",
|
1331
|
-
dtype="float64",
|
1332
|
-
)
|
1365
|
+
return self._coerce_result(result=result, name="Arithmetic return")
|
1333
1366
|
|
1334
1367
|
def vol_func(
|
1335
1368
|
self: Self,
|
@@ -1337,7 +1370,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1337
1370
|
from_date: dt.date | None = None,
|
1338
1371
|
to_date: dt.date | None = None,
|
1339
1372
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1340
|
-
) ->
|
1373
|
+
) -> Combo_co:
|
1341
1374
|
"""Annualized volatility.
|
1342
1375
|
|
1343
1376
|
Based on Pandas .std() which is the equivalent of stdev.s([...]) in MS Excel.
|
@@ -1357,7 +1390,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1357
1390
|
|
1358
1391
|
Returns:
|
1359
1392
|
-------
|
1360
|
-
|
1393
|
+
Combo_co
|
1361
1394
|
Annualized volatility
|
1362
1395
|
|
1363
1396
|
"""
|
@@ -1369,25 +1402,18 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1369
1402
|
if periods_in_a_year_fixed:
|
1370
1403
|
time_factor = float(periods_in_a_year_fixed)
|
1371
1404
|
else:
|
1372
|
-
fraction = (later - earlier).days / 365.25
|
1373
1405
|
how_many = (
|
1374
|
-
self.tsdf.loc[cast("
|
1406
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1375
1407
|
.count()
|
1376
1408
|
.iloc[0]
|
1377
1409
|
)
|
1410
|
+
fraction = (later - earlier).days / 365.25
|
1378
1411
|
time_factor = how_many / fraction
|
1379
1412
|
|
1380
|
-
data = self.tsdf.loc[cast("
|
1413
|
+
data = self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1381
1414
|
result = data.ffill().pct_change().std().mul(sqrt(time_factor))
|
1382
1415
|
|
1383
|
-
|
1384
|
-
return float(cast("SupportsFloat", result.iloc[0]))
|
1385
|
-
return Series(
|
1386
|
-
data=result,
|
1387
|
-
index=self.tsdf.columns,
|
1388
|
-
name="Volatility",
|
1389
|
-
dtype="float64",
|
1390
|
-
)
|
1416
|
+
return self._coerce_result(result=result, name="Volatility")
|
1391
1417
|
|
1392
1418
|
def vol_from_var_func(
|
1393
1419
|
self: Self,
|
@@ -1399,7 +1425,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1399
1425
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1400
1426
|
*,
|
1401
1427
|
drift_adjust: bool = False,
|
1402
|
-
) ->
|
1428
|
+
) -> Combo_co:
|
1403
1429
|
"""Implied annualized volatility.
|
1404
1430
|
|
1405
1431
|
Implied annualized volatility from the Downside VaR using the assumption
|
@@ -1426,7 +1452,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1426
1452
|
|
1427
1453
|
Returns:
|
1428
1454
|
-------
|
1429
|
-
|
1455
|
+
Combo_co
|
1430
1456
|
Implied annualized volatility from the Downside VaR using the
|
1431
1457
|
assumption that returns are normally distributed.
|
1432
1458
|
|
@@ -1454,7 +1480,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1454
1480
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1455
1481
|
*,
|
1456
1482
|
drift_adjust: bool = False,
|
1457
|
-
) ->
|
1483
|
+
) -> Combo_co:
|
1458
1484
|
"""Target weight from VaR.
|
1459
1485
|
|
1460
1486
|
A position weight multiplier from the ratio between a VaR implied
|
@@ -1487,7 +1513,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1487
1513
|
|
1488
1514
|
Returns:
|
1489
1515
|
-------
|
1490
|
-
|
1516
|
+
Combo_co
|
1491
1517
|
A position weight multiplier from the ratio between a VaR implied
|
1492
1518
|
volatility and a given target volatility. Multiplier = 1.0 -> target met
|
1493
1519
|
|
@@ -1518,7 +1544,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1518
1544
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1519
1545
|
*,
|
1520
1546
|
drift_adjust: bool = False,
|
1521
|
-
) ->
|
1547
|
+
) -> Combo_co:
|
1522
1548
|
"""Volatility implied from VaR or Target Weight.
|
1523
1549
|
|
1524
1550
|
The function returns a position weight multiplier from the ratio between
|
@@ -1553,7 +1579,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1553
1579
|
|
1554
1580
|
Returns:
|
1555
1581
|
-------
|
1556
|
-
|
1582
|
+
Combo_co
|
1557
1583
|
Target volatility if target_vol is provided otherwise the VaR
|
1558
1584
|
implied volatility.
|
1559
1585
|
|
@@ -1568,23 +1594,25 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1568
1594
|
else:
|
1569
1595
|
fraction = (later - earlier).days / 365.25
|
1570
1596
|
how_many = (
|
1571
|
-
self.tsdf.loc[cast("
|
1597
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1572
1598
|
.count()
|
1573
1599
|
.iloc[0]
|
1574
1600
|
)
|
1575
1601
|
time_factor = how_many / fraction
|
1576
1602
|
if drift_adjust:
|
1577
1603
|
imp_vol = (-sqrt(time_factor) / norm.ppf(level)) * (
|
1578
|
-
self.tsdf.loc[cast("
|
1604
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1579
1605
|
.ffill()
|
1580
1606
|
.pct_change()
|
1581
1607
|
.quantile(1 - level, interpolation=interpolation)
|
1582
|
-
- self.tsdf.loc[cast("
|
1608
|
+
- self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1583
1609
|
.ffill()
|
1584
1610
|
.pct_change()
|
1585
1611
|
.sum()
|
1586
1612
|
/ len(
|
1587
|
-
self.tsdf.loc[
|
1613
|
+
self.tsdf.loc[
|
1614
|
+
cast("Timestamp", earlier) : cast("Timestamp", later)
|
1615
|
+
]
|
1588
1616
|
.ffill()
|
1589
1617
|
.pct_change(),
|
1590
1618
|
)
|
@@ -1592,7 +1620,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1592
1620
|
else:
|
1593
1621
|
imp_vol = (
|
1594
1622
|
-sqrt(time_factor)
|
1595
|
-
* self.tsdf.loc[cast("
|
1623
|
+
* self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1596
1624
|
.ffill()
|
1597
1625
|
.pct_change()
|
1598
1626
|
.quantile(1 - level, interpolation=interpolation)
|
@@ -1611,14 +1639,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1611
1639
|
result = imp_vol
|
1612
1640
|
label = f"Imp vol from VaR {level:.0%}"
|
1613
1641
|
|
1614
|
-
|
1615
|
-
return float(cast("SupportsFloat", result.iloc[0]))
|
1616
|
-
return Series(
|
1617
|
-
data=result,
|
1618
|
-
index=self.tsdf.columns,
|
1619
|
-
name=label,
|
1620
|
-
dtype="float64",
|
1621
|
-
)
|
1642
|
+
return self._coerce_result(result=result, name=label)
|
1622
1643
|
|
1623
1644
|
def cvar_down_func(
|
1624
1645
|
self: Self,
|
@@ -1626,7 +1647,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1626
1647
|
months_from_last: int | None = None,
|
1627
1648
|
from_date: dt.date | None = None,
|
1628
1649
|
to_date: dt.date | None = None,
|
1629
|
-
) ->
|
1650
|
+
) -> Combo_co:
|
1630
1651
|
"""Downside Conditional Value At Risk "CVaR".
|
1631
1652
|
|
1632
1653
|
https://www.investopedia.com/terms/c/conditional_value_at_risk.asp.
|
@@ -1645,7 +1666,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1645
1666
|
|
1646
1667
|
Returns:
|
1647
1668
|
-------
|
1648
|
-
|
1669
|
+
Combo_co
|
1649
1670
|
Downside Conditional Value At Risk "CVaR"
|
1650
1671
|
|
1651
1672
|
"""
|
@@ -1654,33 +1675,19 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1654
1675
|
from_dt=from_date,
|
1655
1676
|
to_dt=to_date,
|
1656
1677
|
)
|
1657
|
-
cvar_df = self.tsdf.loc[
|
1658
|
-
|
1659
|
-
)
|
1678
|
+
cvar_df = self.tsdf.loc[
|
1679
|
+
cast("Timestamp", earlier) : cast("Timestamp", later)
|
1680
|
+
].copy(deep=True)
|
1660
1681
|
result = [
|
1661
|
-
cvar_df
|
1662
|
-
|
1663
|
-
.
|
1664
|
-
.
|
1665
|
-
.iloc[
|
1666
|
-
: ceil(
|
1667
|
-
(1 - level)
|
1668
|
-
* cvar_df.loc[:, x] # type: ignore[index]
|
1669
|
-
.ffill()
|
1670
|
-
.pct_change()
|
1671
|
-
.count(),
|
1672
|
-
),
|
1673
|
-
]
|
1674
|
-
.mean()
|
1675
|
-
for x in self.tsdf
|
1682
|
+
(r := cvar_df[col].ffill().pct_change().sort_values())[
|
1683
|
+
: ceil((1 - level) * r.count())
|
1684
|
+
].mean()
|
1685
|
+
for col in cvar_df.columns
|
1676
1686
|
]
|
1677
|
-
|
1678
|
-
|
1679
|
-
|
1680
|
-
data=result,
|
1681
|
-
index=self.tsdf.columns,
|
1687
|
+
|
1688
|
+
return self._coerce_result(
|
1689
|
+
result=cast("Series[float]", result),
|
1682
1690
|
name=f"CVaR {level:.1%}",
|
1683
|
-
dtype="float64",
|
1684
1691
|
)
|
1685
1692
|
|
1686
1693
|
def lower_partial_moment_func(
|
@@ -1691,7 +1698,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1691
1698
|
from_date: dt.date | None = None,
|
1692
1699
|
to_date: dt.date | None = None,
|
1693
1700
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
1694
|
-
) ->
|
1701
|
+
) -> Combo_co:
|
1695
1702
|
"""Downside Deviation if order set to 2.
|
1696
1703
|
|
1697
1704
|
If order is set to 2 the function calculates the standard
|
@@ -1718,7 +1725,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1718
1725
|
|
1719
1726
|
Returns:
|
1720
1727
|
-------
|
1721
|
-
|
1728
|
+
Combo_co
|
1722
1729
|
Downside deviation if order set to 2
|
1723
1730
|
|
1724
1731
|
"""
|
@@ -1734,7 +1741,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1734
1741
|
)
|
1735
1742
|
|
1736
1743
|
how_many = (
|
1737
|
-
self.tsdf.loc[cast("
|
1744
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1738
1745
|
.ffill()
|
1739
1746
|
.pct_change()
|
1740
1747
|
.count(numeric_only=True)
|
@@ -1751,7 +1758,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1751
1758
|
|
1752
1759
|
per_period_mar = min_accepted_return / time_factor
|
1753
1760
|
diff = (
|
1754
|
-
self.tsdf.loc[cast("
|
1761
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1755
1762
|
.ffill()
|
1756
1763
|
.pct_change()
|
1757
1764
|
.sub(per_period_mar)
|
@@ -1764,13 +1771,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1764
1771
|
|
1765
1772
|
dd_order = 2
|
1766
1773
|
|
1767
|
-
|
1768
|
-
|
1769
|
-
return Series(
|
1770
|
-
data=result,
|
1771
|
-
index=self.tsdf.columns,
|
1774
|
+
return self._coerce_result(
|
1775
|
+
result=result,
|
1772
1776
|
name="Downside deviation" if order == dd_order else f"LPM{order}",
|
1773
|
-
dtype="float64",
|
1774
1777
|
)
|
1775
1778
|
|
1776
1779
|
def geo_ret_func(
|
@@ -1778,7 +1781,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1778
1781
|
months_from_last: int | None = None,
|
1779
1782
|
from_date: dt.date | None = None,
|
1780
1783
|
to_date: dt.date | None = None,
|
1781
|
-
) ->
|
1784
|
+
) -> Combo_co:
|
1782
1785
|
"""Compounded Annual Growth Rate (CAGR).
|
1783
1786
|
|
1784
1787
|
https://www.investopedia.com/terms/c/cagr.asp.
|
@@ -1795,7 +1798,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1795
1798
|
|
1796
1799
|
Returns:
|
1797
1800
|
-------
|
1798
|
-
|
1801
|
+
Combo_co
|
1799
1802
|
Compounded Annual Growth Rate (CAGR)
|
1800
1803
|
|
1801
1804
|
"""
|
@@ -1807,12 +1810,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1807
1810
|
)
|
1808
1811
|
fraction = (later - earlier).days / 365.25
|
1809
1812
|
|
1810
|
-
any_below_zero = any(
|
1811
|
-
self.tsdf.loc[[earlier, later]] # type: ignore[index]
|
1812
|
-
.lt(0.0)
|
1813
|
-
.any()
|
1814
|
-
.to_numpy()
|
1815
|
-
)
|
1813
|
+
any_below_zero = any(self.tsdf.loc[[earlier, later]].lt(0.0).any().to_numpy())
|
1816
1814
|
if zero in self.tsdf.loc[earlier].to_numpy() or any_below_zero:
|
1817
1815
|
msg = (
|
1818
1816
|
"Geometric return cannot be calculated due to "
|
@@ -1822,13 +1820,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1822
1820
|
|
1823
1821
|
result = (self.tsdf.loc[later] / self.tsdf.loc[earlier]) ** (1 / fraction) - 1
|
1824
1822
|
|
1825
|
-
|
1826
|
-
|
1827
|
-
return Series(
|
1828
|
-
data=result.to_numpy(),
|
1829
|
-
index=self.tsdf.columns,
|
1823
|
+
return self._coerce_result(
|
1824
|
+
result=cast("Series[float]", result),
|
1830
1825
|
name="Geometric return",
|
1831
|
-
dtype="float64",
|
1832
1826
|
)
|
1833
1827
|
|
1834
1828
|
def skew_func(
|
@@ -1836,7 +1830,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1836
1830
|
months_from_last: int | None = None,
|
1837
1831
|
from_date: dt.date | None = None,
|
1838
1832
|
to_date: dt.date | None = None,
|
1839
|
-
) ->
|
1833
|
+
) -> Combo_co:
|
1840
1834
|
"""Skew of the return distribution.
|
1841
1835
|
|
1842
1836
|
https://www.investopedia.com/terms/s/skewness.asp.
|
@@ -1853,7 +1847,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1853
1847
|
|
1854
1848
|
Returns:
|
1855
1849
|
-------
|
1856
|
-
|
1850
|
+
Combo_co
|
1857
1851
|
Skew of the return distribution
|
1858
1852
|
|
1859
1853
|
"""
|
@@ -1862,30 +1856,24 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1862
1856
|
from_dt=from_date,
|
1863
1857
|
to_dt=to_date,
|
1864
1858
|
)
|
1865
|
-
result
|
1866
|
-
a=
|
1867
|
-
|
1868
|
-
|
1869
|
-
|
1859
|
+
result = skew(
|
1860
|
+
a=(
|
1861
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1862
|
+
.ffill()
|
1863
|
+
.pct_change()
|
1864
|
+
),
|
1870
1865
|
bias=True,
|
1871
1866
|
nan_policy="omit",
|
1872
1867
|
)
|
1873
1868
|
|
1874
|
-
|
1875
|
-
return float(result[0])
|
1876
|
-
return Series(
|
1877
|
-
data=result,
|
1878
|
-
index=self.tsdf.columns,
|
1879
|
-
name="Skew",
|
1880
|
-
dtype="float64",
|
1881
|
-
)
|
1869
|
+
return self._coerce_result(result=cast("Series[float]", result), name="Skew")
|
1882
1870
|
|
1883
1871
|
def kurtosis_func(
|
1884
1872
|
self: Self,
|
1885
1873
|
months_from_last: int | None = None,
|
1886
1874
|
from_date: dt.date | None = None,
|
1887
1875
|
to_date: dt.date | None = None,
|
1888
|
-
) ->
|
1876
|
+
) -> Combo_co:
|
1889
1877
|
"""Kurtosis of the return distribution.
|
1890
1878
|
|
1891
1879
|
https://www.investopedia.com/terms/k/kurtosis.asp.
|
@@ -1902,7 +1890,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1902
1890
|
|
1903
1891
|
Returns:
|
1904
1892
|
-------
|
1905
|
-
|
1893
|
+
Combo_co
|
1906
1894
|
Kurtosis of the return distribution
|
1907
1895
|
|
1908
1896
|
"""
|
@@ -1911,9 +1899,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1911
1899
|
from_dt=from_date,
|
1912
1900
|
to_dt=to_date,
|
1913
1901
|
)
|
1914
|
-
result
|
1902
|
+
result = kurtosis(
|
1915
1903
|
a=(
|
1916
|
-
self.tsdf.loc[cast("
|
1904
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1917
1905
|
.ffill()
|
1918
1906
|
.pct_change()
|
1919
1907
|
),
|
@@ -1922,13 +1910,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1922
1910
|
nan_policy="omit",
|
1923
1911
|
)
|
1924
1912
|
|
1925
|
-
|
1926
|
-
|
1927
|
-
return Series(
|
1928
|
-
data=result,
|
1929
|
-
index=self.tsdf.columns,
|
1913
|
+
return self._coerce_result(
|
1914
|
+
result=cast("Series[float]", result),
|
1930
1915
|
name="Kurtosis",
|
1931
|
-
dtype="float64",
|
1932
1916
|
)
|
1933
1917
|
|
1934
1918
|
def max_drawdown_func(
|
@@ -1937,7 +1921,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1937
1921
|
from_date: dt.date | None = None,
|
1938
1922
|
to_date: dt.date | None = None,
|
1939
1923
|
min_periods: int = 1,
|
1940
|
-
) ->
|
1924
|
+
) -> Combo_co:
|
1941
1925
|
"""Maximum drawdown without any limit on date range.
|
1942
1926
|
|
1943
1927
|
https://www.investopedia.com/terms/m/maximum-drawdown-mdd.asp.
|
@@ -1956,7 +1940,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1956
1940
|
|
1957
1941
|
Returns:
|
1958
1942
|
-------
|
1959
|
-
|
1943
|
+
Combo_co
|
1960
1944
|
Maximum drawdown without any limit on date range
|
1961
1945
|
|
1962
1946
|
"""
|
@@ -1966,26 +1950,20 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
1966
1950
|
to_dt=to_date,
|
1967
1951
|
)
|
1968
1952
|
result = (
|
1969
|
-
self.tsdf.loc[cast("
|
1970
|
-
/ self.tsdf.loc[cast("
|
1953
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1954
|
+
/ self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
1971
1955
|
.expanding(min_periods=min_periods)
|
1972
1956
|
.max()
|
1973
1957
|
).min() - 1
|
1974
|
-
|
1975
|
-
|
1976
|
-
return Series(
|
1977
|
-
data=result,
|
1978
|
-
index=self.tsdf.columns,
|
1979
|
-
name="Max drawdown",
|
1980
|
-
dtype="float64",
|
1981
|
-
)
|
1958
|
+
|
1959
|
+
return self._coerce_result(result=result, name="Max drawdown")
|
1982
1960
|
|
1983
1961
|
def positive_share_func(
|
1984
1962
|
self: Self,
|
1985
1963
|
months_from_last: int | None = None,
|
1986
1964
|
from_date: dt.date | None = None,
|
1987
1965
|
to_date: dt.date | None = None,
|
1988
|
-
) ->
|
1966
|
+
) -> Combo_co:
|
1989
1967
|
"""Calculate share of percentage changes that are greater than zero.
|
1990
1968
|
|
1991
1969
|
Parameters
|
@@ -2000,7 +1978,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2000
1978
|
|
2001
1979
|
Returns:
|
2002
1980
|
-------
|
2003
|
-
|
1981
|
+
Combo_co
|
2004
1982
|
Calculate share of percentage changes that are greater than zero
|
2005
1983
|
|
2006
1984
|
"""
|
@@ -2011,10 +1989,10 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2011
1989
|
to_dt=to_date,
|
2012
1990
|
)
|
2013
1991
|
pos = (
|
2014
|
-
self.tsdf.loc[cast("
|
1992
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2015
1993
|
.ffill()
|
2016
1994
|
.pct_change()[1:][
|
2017
|
-
self.tsdf.loc[cast("
|
1995
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2018
1996
|
.ffill()
|
2019
1997
|
.pct_change()[1:]
|
2020
1998
|
> zero
|
@@ -2022,20 +2000,14 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2022
2000
|
.count()
|
2023
2001
|
)
|
2024
2002
|
tot = (
|
2025
|
-
self.tsdf.loc[cast("
|
2003
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2026
2004
|
.ffill()
|
2027
2005
|
.pct_change()
|
2028
2006
|
.count()
|
2029
2007
|
)
|
2030
|
-
|
2031
|
-
|
2032
|
-
|
2033
|
-
return Series(
|
2034
|
-
data=share,
|
2035
|
-
index=self.tsdf.columns,
|
2036
|
-
name="Positive share",
|
2037
|
-
dtype="float64",
|
2038
|
-
)
|
2008
|
+
result = pos / tot
|
2009
|
+
|
2010
|
+
return self._coerce_result(result=result, name="Positive share")
|
2039
2011
|
|
2040
2012
|
def ret_vol_ratio_func(
|
2041
2013
|
self: Self,
|
@@ -2044,7 +2016,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2044
2016
|
from_date: dt.date | None = None,
|
2045
2017
|
to_date: dt.date | None = None,
|
2046
2018
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
2047
|
-
) ->
|
2019
|
+
) -> Combo_co:
|
2048
2020
|
"""Ratio between arithmetic mean of returns and annualized volatility.
|
2049
2021
|
|
2050
2022
|
The ratio of annualized arithmetic mean of returns and annualized
|
@@ -2070,12 +2042,12 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2070
2042
|
|
2071
2043
|
Returns:
|
2072
2044
|
-------
|
2073
|
-
|
2045
|
+
Combo_co
|
2074
2046
|
Ratio of the annualized arithmetic mean of returns and annualized
|
2075
2047
|
volatility or, if risk-free return provided, Sharpe ratio
|
2076
2048
|
|
2077
2049
|
"""
|
2078
|
-
|
2050
|
+
result = Series(
|
2079
2051
|
self.arithmetic_ret_func(
|
2080
2052
|
months_from_last=months_from_last,
|
2081
2053
|
from_date=from_date,
|
@@ -2090,14 +2062,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2090
2062
|
periods_in_a_year_fixed=periods_in_a_year_fixed,
|
2091
2063
|
)
|
2092
2064
|
|
2093
|
-
|
2094
|
-
return float(cast("float64", ratio.iloc[0]))
|
2095
|
-
return Series(
|
2096
|
-
data=ratio,
|
2097
|
-
index=self.tsdf.columns,
|
2098
|
-
name="Return vol ratio",
|
2099
|
-
dtype="float64",
|
2100
|
-
)
|
2065
|
+
return self._coerce_result(result=result, name="Return vol ratio")
|
2101
2066
|
|
2102
2067
|
def sortino_ratio_func(
|
2103
2068
|
self: Self,
|
@@ -2108,7 +2073,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2108
2073
|
from_date: dt.date | None = None,
|
2109
2074
|
to_date: dt.date | None = None,
|
2110
2075
|
periods_in_a_year_fixed: DaysInYearType | None = None,
|
2111
|
-
) ->
|
2076
|
+
) -> Combo_co:
|
2112
2077
|
"""Sortino Ratio or Kappa3 Ratio.
|
2113
2078
|
|
2114
2079
|
The Sortino ratio calculated as ( return - risk free return )
|
@@ -2141,12 +2106,12 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2141
2106
|
|
2142
2107
|
Returns:
|
2143
2108
|
-------
|
2144
|
-
|
2109
|
+
Combo_co
|
2145
2110
|
Sortino ratio calculated as ( return - riskfree return ) /
|
2146
2111
|
downside deviation (std dev of returns below MAR)
|
2147
2112
|
|
2148
2113
|
"""
|
2149
|
-
|
2114
|
+
result = Series(
|
2150
2115
|
self.arithmetic_ret_func(
|
2151
2116
|
months_from_last=months_from_last,
|
2152
2117
|
from_date=from_date,
|
@@ -2164,14 +2129,9 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2164
2129
|
)
|
2165
2130
|
|
2166
2131
|
sortino_order = 2
|
2167
|
-
if
|
2168
|
-
|
2169
|
-
return
|
2170
|
-
data=ratio,
|
2171
|
-
index=self.tsdf.columns,
|
2172
|
-
name="Sortino ratio" if order == sortino_order else "Kappa-3 ratio",
|
2173
|
-
dtype="float64",
|
2174
|
-
)
|
2132
|
+
name = "Sortino ratio" if order == sortino_order else "Kappa-3 ratio"
|
2133
|
+
|
2134
|
+
return self._coerce_result(result=result, name=name)
|
2175
2135
|
|
2176
2136
|
def omega_ratio_func(
|
2177
2137
|
self: Self,
|
@@ -2179,7 +2139,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2179
2139
|
months_from_last: int | None = None,
|
2180
2140
|
from_date: dt.date | None = None,
|
2181
2141
|
to_date: dt.date | None = None,
|
2182
|
-
) ->
|
2142
|
+
) -> Combo_co:
|
2183
2143
|
"""Omega Ratio.
|
2184
2144
|
|
2185
2145
|
The Omega Ratio compares returns above a certain target level
|
@@ -2201,7 +2161,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2201
2161
|
|
2202
2162
|
Returns:
|
2203
2163
|
-------
|
2204
|
-
|
2164
|
+
Combo_co
|
2205
2165
|
Omega ratio calculation
|
2206
2166
|
|
2207
2167
|
"""
|
@@ -2211,29 +2171,22 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2211
2171
|
to_dt=to_date,
|
2212
2172
|
)
|
2213
2173
|
retdf = (
|
2214
|
-
self.tsdf.loc[cast("
|
2174
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2215
2175
|
.ffill()
|
2216
2176
|
.pct_change()
|
2217
2177
|
)
|
2218
2178
|
pos = retdf[retdf > min_accepted_return].sub(min_accepted_return).sum()
|
2219
2179
|
neg = retdf[retdf < min_accepted_return].sub(min_accepted_return).sum()
|
2220
|
-
|
2180
|
+
result = pos / -neg
|
2221
2181
|
|
2222
|
-
|
2223
|
-
return float(cast("float64", ratio.iloc[0]))
|
2224
|
-
return Series(
|
2225
|
-
data=ratio,
|
2226
|
-
index=self.tsdf.columns,
|
2227
|
-
name="Omega ratio",
|
2228
|
-
dtype="float64",
|
2229
|
-
)
|
2182
|
+
return self._coerce_result(result=result, name="Omega ratio")
|
2230
2183
|
|
2231
2184
|
def value_ret_func(
|
2232
2185
|
self: Self,
|
2233
2186
|
months_from_last: int | None = None,
|
2234
2187
|
from_date: dt.date | None = None,
|
2235
2188
|
to_date: dt.date | None = None,
|
2236
|
-
) ->
|
2189
|
+
) -> Combo_co:
|
2237
2190
|
"""Calculate simple return.
|
2238
2191
|
|
2239
2192
|
Parameters
|
@@ -2248,7 +2201,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2248
2201
|
|
2249
2202
|
Returns:
|
2250
2203
|
-------
|
2251
|
-
|
2204
|
+
Combo_co
|
2252
2205
|
Calculate simple return
|
2253
2206
|
|
2254
2207
|
"""
|
@@ -2265,22 +2218,18 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2265
2218
|
)
|
2266
2219
|
raise InitialValueZeroError(msg)
|
2267
2220
|
|
2268
|
-
result =
|
2269
|
-
|
2270
|
-
|
2271
|
-
return float(result.iloc[0])
|
2272
|
-
return Series(
|
2273
|
-
data=result.to_numpy(),
|
2274
|
-
index=self.tsdf.columns,
|
2275
|
-
name="Simple return",
|
2276
|
-
dtype="float64",
|
2221
|
+
result = cast(
|
2222
|
+
"Series[float]",
|
2223
|
+
self.tsdf.loc[later] / self.tsdf.loc[earlier] - 1,
|
2277
2224
|
)
|
2278
2225
|
|
2226
|
+
return self._coerce_result(result=result, name="Simple return")
|
2227
|
+
|
2279
2228
|
def value_ret_calendar_period(
|
2280
2229
|
self: Self,
|
2281
2230
|
year: int,
|
2282
2231
|
month: int | None = None,
|
2283
|
-
) ->
|
2232
|
+
) -> Combo_co:
|
2284
2233
|
"""Calculate simple return for a specific calendar period.
|
2285
2234
|
|
2286
2235
|
Parameters
|
@@ -2292,7 +2241,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2292
2241
|
|
2293
2242
|
Returns:
|
2294
2243
|
-------
|
2295
|
-
|
2244
|
+
Combo_co
|
2296
2245
|
Calculate simple return for a specific calendar period
|
2297
2246
|
|
2298
2247
|
"""
|
@@ -2303,16 +2252,10 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2303
2252
|
vrdf = self.tsdf.copy()
|
2304
2253
|
vrdf.index = DatetimeIndex(vrdf.index)
|
2305
2254
|
resultdf = DataFrame(vrdf.ffill().pct_change())
|
2306
|
-
|
2307
|
-
|
2308
|
-
|
2309
|
-
|
2310
|
-
return Series(
|
2311
|
-
data=cal_period,
|
2312
|
-
index=self.tsdf.columns,
|
2313
|
-
name=period,
|
2314
|
-
dtype="float64",
|
2315
|
-
)
|
2255
|
+
plus_one = resultdf.loc[period] + 1
|
2256
|
+
result = plus_one.cumprod(axis="index").iloc[-1] - 1
|
2257
|
+
|
2258
|
+
return self._coerce_result(result=result, name=period)
|
2316
2259
|
|
2317
2260
|
def var_down_func(
|
2318
2261
|
self: Self,
|
@@ -2321,7 +2264,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2321
2264
|
from_date: dt.date | None = None,
|
2322
2265
|
to_date: dt.date | None = None,
|
2323
2266
|
interpolation: LiteralQuantileInterp = "lower",
|
2324
|
-
) ->
|
2267
|
+
) -> Combo_co:
|
2325
2268
|
"""Downside Value At Risk, "VaR".
|
2326
2269
|
|
2327
2270
|
The equivalent of percentile.inc([...], 1-level) over returns in MS Excel.
|
@@ -2343,7 +2286,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2343
2286
|
|
2344
2287
|
Returns:
|
2345
2288
|
-------
|
2346
|
-
|
2289
|
+
Combo_co
|
2347
2290
|
Downside Value At Risk
|
2348
2291
|
|
2349
2292
|
"""
|
@@ -2353,20 +2296,13 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2353
2296
|
to_dt=to_date,
|
2354
2297
|
)
|
2355
2298
|
result = (
|
2356
|
-
self.tsdf.loc[cast("
|
2299
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2357
2300
|
.ffill()
|
2358
2301
|
.pct_change()
|
2359
2302
|
.quantile(1 - level, interpolation=interpolation)
|
2360
2303
|
)
|
2361
2304
|
|
2362
|
-
|
2363
|
-
return float(result.iloc[0])
|
2364
|
-
return Series(
|
2365
|
-
data=result,
|
2366
|
-
index=self.tsdf.columns,
|
2367
|
-
name=f"VaR {level:.1%}",
|
2368
|
-
dtype="float64",
|
2369
|
-
)
|
2305
|
+
return self._coerce_result(result=result, name=f"VaR {level:.1%}")
|
2370
2306
|
|
2371
2307
|
def worst_func(
|
2372
2308
|
self: Self,
|
@@ -2374,7 +2310,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2374
2310
|
months_from_last: int | None = None,
|
2375
2311
|
from_date: dt.date | None = None,
|
2376
2312
|
to_date: dt.date | None = None,
|
2377
|
-
) ->
|
2313
|
+
) -> Combo_co:
|
2378
2314
|
"""Most negative percentage change over a rolling number of observations.
|
2379
2315
|
|
2380
2316
|
Parameters
|
@@ -2391,7 +2327,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2391
2327
|
|
2392
2328
|
Returns:
|
2393
2329
|
-------
|
2394
|
-
|
2330
|
+
Combo_co
|
2395
2331
|
Most negative percentage change over a rolling number of observations
|
2396
2332
|
within a chosen date range
|
2397
2333
|
|
@@ -2402,7 +2338,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2402
2338
|
to_dt=to_date,
|
2403
2339
|
)
|
2404
2340
|
result = (
|
2405
|
-
self.tsdf.loc[cast("
|
2341
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2406
2342
|
.ffill()
|
2407
2343
|
.pct_change()
|
2408
2344
|
.rolling(observations, min_periods=observations)
|
@@ -2410,21 +2346,14 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2410
2346
|
.min()
|
2411
2347
|
)
|
2412
2348
|
|
2413
|
-
|
2414
|
-
return float(result.iloc[0])
|
2415
|
-
return Series(
|
2416
|
-
data=result,
|
2417
|
-
index=self.tsdf.columns,
|
2418
|
-
name="Worst",
|
2419
|
-
dtype="float64",
|
2420
|
-
)
|
2349
|
+
return self._coerce_result(result=result, name="Worst")
|
2421
2350
|
|
2422
2351
|
def z_score_func(
|
2423
2352
|
self: Self,
|
2424
2353
|
months_from_last: int | None = None,
|
2425
2354
|
from_date: dt.date | None = None,
|
2426
2355
|
to_date: dt.date | None = None,
|
2427
|
-
) ->
|
2356
|
+
) -> Combo_co:
|
2428
2357
|
"""Z-score as (last return - mean return) / standard deviation of returns.
|
2429
2358
|
|
2430
2359
|
https://www.investopedia.com/terms/z/zscore.asp.
|
@@ -2441,7 +2370,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2441
2370
|
|
2442
2371
|
Returns:
|
2443
2372
|
-------
|
2444
|
-
|
2373
|
+
Combo_co
|
2445
2374
|
Z-score as (last return - mean return) / standard deviation of returns
|
2446
2375
|
|
2447
2376
|
"""
|
@@ -2451,20 +2380,13 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2451
2380
|
to_dt=to_date,
|
2452
2381
|
)
|
2453
2382
|
zscframe = (
|
2454
|
-
self.tsdf.loc[cast("
|
2383
|
+
self.tsdf.loc[cast("Timestamp", earlier) : cast("Timestamp", later)]
|
2455
2384
|
.ffill()
|
2456
2385
|
.pct_change()
|
2457
2386
|
)
|
2458
2387
|
result = (zscframe.iloc[-1] - zscframe.mean()) / zscframe.std()
|
2459
2388
|
|
2460
|
-
|
2461
|
-
return float(result.iloc[0])
|
2462
|
-
return Series(
|
2463
|
-
data=result,
|
2464
|
-
index=self.tsdf.columns,
|
2465
|
-
name="Z-score",
|
2466
|
-
dtype="float64",
|
2467
|
-
)
|
2389
|
+
return self._coerce_result(result=result, name="Z-score")
|
2468
2390
|
|
2469
2391
|
def rolling_cvar_down(
|
2470
2392
|
self: Self,
|
@@ -2608,7 +2530,7 @@ class _CommonModel(BaseModel): # type: ignore[misc]
|
|
2608
2530
|
|
2609
2531
|
s = log(self.tsdf.iloc[:, column]).diff()
|
2610
2532
|
volseries = s.rolling(window=observations, min_periods=observations).std(
|
2611
|
-
ddof=dlta_degr_freedms
|
2533
|
+
ddof=dlta_degr_freedms,
|
2612
2534
|
) * sqrt(time_factor)
|
2613
2535
|
|
2614
2536
|
voldf = volseries.dropna().to_frame()
|