bullishpy 0.9.0__py3-none-any.whl → 0.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bullishpy might be problematic. Click here for more details.
- bullish/analysis/analysis.py +23 -4
- bullish/analysis/filter.py +26 -1
- bullish/analysis/functions.py +46 -3
- bullish/analysis/indicators.py +89 -44
- bullish/analysis/predefined_filters.py +225 -41
- bullish/app/app.py +35 -16
- bullish/cli.py +3 -0
- bullish/database/alembic/versions/17e51420e7ad_.py +85 -0
- bullish/database/alembic/versions/d663166c531d_.py +56 -0
- bullish/database/crud.py +28 -1
- bullish/figures/figures.py +17 -4
- bullish/interface/interface.py +7 -0
- bullish/jobs/tasks.py +12 -3
- bullish/utils/checks.py +2 -0
- {bullishpy-0.9.0.dist-info → bullishpy-0.11.0.dist-info}/METADATA +2 -1
- {bullishpy-0.9.0.dist-info → bullishpy-0.11.0.dist-info}/RECORD +18 -16
- {bullishpy-0.9.0.dist-info → bullishpy-0.11.0.dist-info}/WHEEL +0 -0
- {bullishpy-0.9.0.dist-info → bullishpy-0.11.0.dist-info}/entry_points.txt +0 -0
bullish/analysis/analysis.py
CHANGED
|
@@ -116,12 +116,21 @@ TechnicalAnalysisModels = [*IndicatorModels, TechnicalAnalysisBase]
|
|
|
116
116
|
class TechnicalAnalysis(*TechnicalAnalysisModels): # type: ignore
|
|
117
117
|
|
|
118
118
|
@classmethod
|
|
119
|
-
def from_data(cls, prices: pd.DataFrame) -> "TechnicalAnalysis":
|
|
119
|
+
def from_data(cls, prices: pd.DataFrame, ticker: Ticker) -> "TechnicalAnalysis":
|
|
120
|
+
if "close" not in prices.columns:
|
|
121
|
+
logger.warning(
|
|
122
|
+
f"Ticker {ticker.symbol} does not have valid 'close' values.",
|
|
123
|
+
exc_info=True,
|
|
124
|
+
)
|
|
125
|
+
return cls()
|
|
120
126
|
try:
|
|
121
127
|
res = Indicators().to_dict(prices)
|
|
122
128
|
return cls(last_price=prices.close.iloc[-1], **res)
|
|
123
129
|
except Exception as e:
|
|
124
|
-
logger.error(
|
|
130
|
+
logger.error(
|
|
131
|
+
f"Failing to calculate technical analysis for {ticker.symbol}: {e}",
|
|
132
|
+
exc_info=True,
|
|
133
|
+
)
|
|
125
134
|
return cls()
|
|
126
135
|
|
|
127
136
|
|
|
@@ -426,10 +435,20 @@ class AnalysisView(BaseModel):
|
|
|
426
435
|
Optional[str],
|
|
427
436
|
Field(None, description="Full name of the company"),
|
|
428
437
|
]
|
|
438
|
+
price_per_earning_ratio: Optional[float] = None
|
|
439
|
+
last_price: Annotated[
|
|
440
|
+
Optional[float],
|
|
441
|
+
BeforeValidator(to_float),
|
|
442
|
+
Field(
|
|
443
|
+
default=None,
|
|
444
|
+
),
|
|
445
|
+
]
|
|
446
|
+
median_yearly_growth: Optional[float] = None
|
|
447
|
+
median_weekly_growth: Optional[float] = None
|
|
448
|
+
median_monthly_growth: Optional[float] = None
|
|
429
449
|
|
|
430
450
|
|
|
431
451
|
class Analysis(AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis): # type: ignore
|
|
432
|
-
price_per_earning_ratio: Optional[float] = None
|
|
433
452
|
|
|
434
453
|
@classmethod
|
|
435
454
|
def from_ticker(cls, bearish_db: BearishDbBase, ticker: Ticker) -> "Analysis":
|
|
@@ -443,7 +462,7 @@ class Analysis(AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis)
|
|
|
443
462
|
financials = Financials.from_ticker(bearish_db, ticker)
|
|
444
463
|
fundamental_analysis = FundamentalAnalysis.from_financials(financials, ticker)
|
|
445
464
|
prices = Prices.from_ticker(bearish_db, ticker)
|
|
446
|
-
technical_analysis = TechnicalAnalysis.from_data(prices.to_dataframe())
|
|
465
|
+
technical_analysis = TechnicalAnalysis.from_data(prices.to_dataframe(), ticker)
|
|
447
466
|
return cls.model_validate(
|
|
448
467
|
equity.model_dump()
|
|
449
468
|
| fundamental_analysis.model_dump()
|
bullish/analysis/filter.py
CHANGED
|
@@ -11,6 +11,8 @@ from bullish.analysis.analysis import (
|
|
|
11
11
|
YearlyFundamentalAnalysis,
|
|
12
12
|
QuarterlyFundamentalAnalysis,
|
|
13
13
|
TechnicalAnalysisModels,
|
|
14
|
+
TechnicalAnalysis,
|
|
15
|
+
AnalysisView,
|
|
14
16
|
)
|
|
15
17
|
|
|
16
18
|
Industry = Literal[
|
|
@@ -455,6 +457,16 @@ def _get_fundamental_analysis_boolean_fields() -> List[str]:
|
|
|
455
457
|
]
|
|
456
458
|
|
|
457
459
|
|
|
460
|
+
def _get_technical_analysis_float_fields() -> List[str]:
|
|
461
|
+
return [
|
|
462
|
+
name
|
|
463
|
+
for name, info in (
|
|
464
|
+
TechnicalAnalysis.model_fields | AnalysisView.model_fields
|
|
465
|
+
).items()
|
|
466
|
+
if info.annotation == Optional[float]
|
|
467
|
+
]
|
|
468
|
+
|
|
469
|
+
|
|
458
470
|
def get_boolean_field_group(group: str) -> List[str]:
|
|
459
471
|
groups = FUNDAMENTAL_ANALYSIS_GROUP.copy()
|
|
460
472
|
groups.remove(group)
|
|
@@ -484,6 +496,8 @@ GROUP_MAPPING: Dict[str, List[str]] = {
|
|
|
484
496
|
"industry_group": list(get_args(IndustryGroup)),
|
|
485
497
|
"sector": list(get_args(Sector)),
|
|
486
498
|
"symbol": [],
|
|
499
|
+
"order_by_asc": _get_technical_analysis_float_fields(),
|
|
500
|
+
"order_by_desc": _get_technical_analysis_float_fields(),
|
|
487
501
|
}
|
|
488
502
|
|
|
489
503
|
|
|
@@ -552,11 +566,14 @@ FundamentalAnalysisFilters = _create_fundamental_analysis_models()
|
|
|
552
566
|
|
|
553
567
|
class GeneralFilter(BaseModel):
|
|
554
568
|
country: Optional[List[str]] = None
|
|
569
|
+
order_by_asc: Optional[str] = None
|
|
570
|
+
order_by_desc: Optional[str] = None
|
|
555
571
|
industry: Optional[List[str]] = None
|
|
556
572
|
industry_group: Optional[List[str]] = None
|
|
557
573
|
sector: Optional[List[str]] = None
|
|
558
574
|
symbol: Optional[List[str]] = None
|
|
559
575
|
market_capitalization: Optional[List[float]] = Field(default=[5e8, 1e12])
|
|
576
|
+
price_per_earning_ratio: Optional[List[float]] = Field(default=[0.0, 1000.0])
|
|
560
577
|
|
|
561
578
|
|
|
562
579
|
class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysisFilters): # type: ignore
|
|
@@ -572,6 +589,8 @@ class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysis
|
|
|
572
589
|
def to_query(self) -> str:
|
|
573
590
|
parameters = self.model_dump(exclude_defaults=True, exclude_unset=True)
|
|
574
591
|
query = []
|
|
592
|
+
order_by_desc = ""
|
|
593
|
+
order_by_asc = ""
|
|
575
594
|
for parameter, value in parameters.items():
|
|
576
595
|
if not value:
|
|
577
596
|
continue
|
|
@@ -582,6 +601,12 @@ class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysis
|
|
|
582
601
|
and parameter not in GeneralFilter.model_fields
|
|
583
602
|
):
|
|
584
603
|
query.append(" AND ".join([f"{v}=1" for v in value]))
|
|
604
|
+
elif (
|
|
605
|
+
isinstance(value, str) and bool(value) and parameter == "order_by_desc"
|
|
606
|
+
):
|
|
607
|
+
order_by_desc = f"ORDER BY {value} DESC"
|
|
608
|
+
elif isinstance(value, str) and bool(value) and parameter == "order_by_asc":
|
|
609
|
+
order_by_asc = f"ORDER BY {value} ASC"
|
|
585
610
|
elif (
|
|
586
611
|
isinstance(value, list)
|
|
587
612
|
and len(value) == SIZE_RANGE
|
|
@@ -604,7 +629,7 @@ class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysis
|
|
|
604
629
|
else:
|
|
605
630
|
raise NotImplementedError
|
|
606
631
|
query_ = " AND ".join(query)
|
|
607
|
-
return query_
|
|
632
|
+
return f"{query_} {order_by_desc.strip()} {order_by_asc.strip()}".strip()
|
|
608
633
|
|
|
609
634
|
|
|
610
635
|
class FilterQueryStored(FilterQuery): ...
|
bullish/analysis/functions.py
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
|
+
import datetime
|
|
1
2
|
import logging
|
|
2
3
|
from datetime import date
|
|
3
|
-
from typing import Optional, Callable
|
|
4
|
+
from typing import Optional, Callable, cast
|
|
4
5
|
|
|
6
|
+
import numpy as np
|
|
5
7
|
import pandas as pd
|
|
6
8
|
import pandas_ta as ta # type: ignore
|
|
7
9
|
|
|
@@ -19,7 +21,7 @@ def cross(
|
|
|
19
21
|
) -> Optional[date]:
|
|
20
22
|
crossing = ta.cross(series_a=series_a, series_b=series_b, above=above)
|
|
21
23
|
if not crossing[crossing == 1].index.empty:
|
|
22
|
-
return
|
|
24
|
+
return crossing[crossing == 1].last_valid_index().date() # type: ignore
|
|
23
25
|
return None
|
|
24
26
|
|
|
25
27
|
|
|
@@ -27,6 +29,15 @@ def cross_value(series: pd.Series, number: int, above: bool = True) -> Optional[
|
|
|
27
29
|
return cross(series, pd.Series(number, index=series.index), above=above)
|
|
28
30
|
|
|
29
31
|
|
|
32
|
+
def cross_value_series(
|
|
33
|
+
series_a: pd.Series, number: int, above: bool = True
|
|
34
|
+
) -> pd.Series:
|
|
35
|
+
crossing = ta.cross(
|
|
36
|
+
series_a=series_a, series_b=pd.Series(number, index=series_a.index), above=above
|
|
37
|
+
)
|
|
38
|
+
return crossing # type: ignore
|
|
39
|
+
|
|
40
|
+
|
|
30
41
|
def compute_adx(data: pd.DataFrame) -> pd.DataFrame:
|
|
31
42
|
results = pd.DataFrame(index=data.index)
|
|
32
43
|
results["ADX_14"] = talib.ADX(data.high, data.low, close=data.close) # type: ignore
|
|
@@ -69,12 +80,14 @@ def compute_pandas_ta_macd(data: pd.DataFrame) -> pd.DataFrame:
|
|
|
69
80
|
def compute_rsi(data: pd.DataFrame) -> pd.DataFrame:
|
|
70
81
|
results = pd.DataFrame(index=data.index)
|
|
71
82
|
results["RSI"] = talib.RSI(data.close) # type: ignore
|
|
83
|
+
results["CLOSE"] = data.close
|
|
72
84
|
return results
|
|
73
85
|
|
|
74
86
|
|
|
75
87
|
def compute_pandas_ta_rsi(data: pd.DataFrame) -> pd.DataFrame:
|
|
76
88
|
results = pd.DataFrame(index=data.index)
|
|
77
89
|
results["RSI"] = ta.rsi(data.close, length=14)
|
|
90
|
+
results["CLOSE"] = data.close
|
|
78
91
|
return results
|
|
79
92
|
|
|
80
93
|
|
|
@@ -244,6 +257,12 @@ def compute_patterns(data: pd.DataFrame) -> pd.DataFrame:
|
|
|
244
257
|
return results
|
|
245
258
|
|
|
246
259
|
|
|
260
|
+
def perc(data: pd.Series) -> float:
|
|
261
|
+
if len(data) < 2 or data.iloc[0] == 0:
|
|
262
|
+
return np.nan
|
|
263
|
+
return cast(float, ((data.iloc[-1] - data.iloc[0]) / data.iloc[0]) * 100)
|
|
264
|
+
|
|
265
|
+
|
|
247
266
|
def compute_price(data: pd.DataFrame) -> pd.DataFrame:
|
|
248
267
|
results = pd.DataFrame(index=data.index)
|
|
249
268
|
results["200_DAY_HIGH"] = data.close.rolling(window=200).max()
|
|
@@ -251,9 +270,30 @@ def compute_price(data: pd.DataFrame) -> pd.DataFrame:
|
|
|
251
270
|
results["20_DAY_HIGH"] = data.close.rolling(window=20).max()
|
|
252
271
|
results["20_DAY_LOW"] = data.close.rolling(window=20).min()
|
|
253
272
|
results["LAST_PRICE"] = data.close
|
|
273
|
+
results["WEEKLY_GROWTH"] = data.close.resample("W").transform(perc) # type: ignore
|
|
274
|
+
results["MONTHLY_GROWTH"] = data.close.resample("ME").transform(perc) # type: ignore
|
|
275
|
+
results["YEARLY_GROWTH"] = data.close.resample("YE").transform(perc) # type: ignore
|
|
254
276
|
return results
|
|
255
277
|
|
|
256
278
|
|
|
279
|
+
def compute_percentile_return_after_rsi_crossover(
|
|
280
|
+
data: pd.DataFrame, rsi_threshold: int = 45, period: int = 90
|
|
281
|
+
) -> float:
|
|
282
|
+
data_ = cross_value_series(data.RSI, rsi_threshold)
|
|
283
|
+
values = []
|
|
284
|
+
for crossing_date in data_[data_ == 1].index:
|
|
285
|
+
data_crossed = data[
|
|
286
|
+
(data.index >= crossing_date)
|
|
287
|
+
& (data.index <= crossing_date + datetime.timedelta(days=period))
|
|
288
|
+
]
|
|
289
|
+
v = (
|
|
290
|
+
data_crossed.CLOSE.pct_change(periods=len(data_crossed.CLOSE) - 1).iloc[-1]
|
|
291
|
+
* 100
|
|
292
|
+
)
|
|
293
|
+
values.append(v)
|
|
294
|
+
return float(np.percentile(values, 30))
|
|
295
|
+
|
|
296
|
+
|
|
257
297
|
class IndicatorFunction(BaseModel):
|
|
258
298
|
expected_columns: list[str]
|
|
259
299
|
functions: list[Callable[[pd.DataFrame], pd.DataFrame]]
|
|
@@ -265,7 +305,7 @@ class IndicatorFunction(BaseModel):
|
|
|
265
305
|
data_ = function(data)
|
|
266
306
|
break
|
|
267
307
|
except Exception as e:
|
|
268
|
-
logger.
|
|
308
|
+
logger.error(f"Fail to compute function {function.__name__}: {e}")
|
|
269
309
|
if data_ is None:
|
|
270
310
|
raise ValueError("No data returned from indicator functions.")
|
|
271
311
|
if not set(self.expected_columns).issubset(set(data_.columns)):
|
|
@@ -347,6 +387,9 @@ PRICE = IndicatorFunction(
|
|
|
347
387
|
"20_DAY_HIGH",
|
|
348
388
|
"20_DAY_LOW",
|
|
349
389
|
"LAST_PRICE",
|
|
390
|
+
"WEEKLY_GROWTH",
|
|
391
|
+
"MONTHLY_GROWTH",
|
|
392
|
+
"YEARLY_GROWTH",
|
|
350
393
|
],
|
|
351
394
|
functions=[compute_price],
|
|
352
395
|
)
|
bullish/analysis/indicators.py
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from datetime import date
|
|
3
|
-
from typing import Optional, List, Callable, Any, Literal, Dict, Union
|
|
3
|
+
from typing import Optional, List, Callable, Any, Literal, Dict, Union
|
|
4
4
|
|
|
5
5
|
import numpy as np
|
|
6
6
|
import pandas as pd
|
|
7
|
-
from pydantic import BaseModel, Field, PrivateAttr, create_model
|
|
7
|
+
from pydantic import BaseModel, Field, PrivateAttr, create_model
|
|
8
8
|
|
|
9
9
|
from bullish.analysis.functions import (
|
|
10
10
|
cross,
|
|
@@ -19,6 +19,7 @@ from bullish.analysis.functions import (
|
|
|
19
19
|
SMA,
|
|
20
20
|
ADOSC,
|
|
21
21
|
PRICE,
|
|
22
|
+
compute_percentile_return_after_rsi_crossover,
|
|
22
23
|
)
|
|
23
24
|
|
|
24
25
|
logger = logging.getLogger(__name__)
|
|
@@ -49,14 +50,6 @@ class Signal(BaseModel):
|
|
|
49
50
|
else:
|
|
50
51
|
self.value = self.function(data) # type: ignore
|
|
51
52
|
|
|
52
|
-
@model_validator(mode="after")
|
|
53
|
-
def _validate(self) -> Self:
|
|
54
|
-
if self.type == Optional[float] and self.range is None:
|
|
55
|
-
raise ValueError(
|
|
56
|
-
"Range must be defined for signals of type Optional[float]"
|
|
57
|
-
)
|
|
58
|
-
return self
|
|
59
|
-
|
|
60
53
|
|
|
61
54
|
class Indicator(BaseModel):
|
|
62
55
|
name: str
|
|
@@ -100,7 +93,7 @@ def indicators_factory() -> List[Indicator]:
|
|
|
100
93
|
type=Optional[date],
|
|
101
94
|
function=lambda d: d[
|
|
102
95
|
(d.ADX_14 > 20) & (d.PLUS_DI > d.MINUS_DI)
|
|
103
|
-
].
|
|
96
|
+
].last_valid_index(),
|
|
104
97
|
),
|
|
105
98
|
Signal(
|
|
106
99
|
name="ADX_14_SHORT",
|
|
@@ -109,7 +102,7 @@ def indicators_factory() -> List[Indicator]:
|
|
|
109
102
|
type=Optional[date],
|
|
110
103
|
function=lambda d: d[
|
|
111
104
|
(d.ADX_14 > 20) & (d.MINUS_DI > d.PLUS_DI)
|
|
112
|
-
].
|
|
105
|
+
].last_valid_index(),
|
|
113
106
|
),
|
|
114
107
|
],
|
|
115
108
|
),
|
|
@@ -156,16 +149,30 @@ def indicators_factory() -> List[Indicator]:
|
|
|
156
149
|
Indicator(
|
|
157
150
|
name="RSI",
|
|
158
151
|
description="Relative Strength Index",
|
|
159
|
-
expected_columns=
|
|
152
|
+
expected_columns=RSI.expected_columns,
|
|
160
153
|
function=RSI.call,
|
|
161
154
|
signals=[
|
|
162
155
|
Signal(
|
|
163
|
-
name="
|
|
156
|
+
name="RSI_BULLISH_CROSSOVER_30",
|
|
164
157
|
description="RSI Bullish Crossover",
|
|
165
158
|
type_info="Long",
|
|
166
159
|
type=Optional[date],
|
|
167
160
|
function=lambda d: cross_value(d.RSI, 30),
|
|
168
161
|
),
|
|
162
|
+
Signal(
|
|
163
|
+
name="RSI_BULLISH_CROSSOVER_40",
|
|
164
|
+
description="RSI Bullish Crossover 40",
|
|
165
|
+
type_info="Long",
|
|
166
|
+
type=Optional[date],
|
|
167
|
+
function=lambda d: cross_value(d.RSI, 40),
|
|
168
|
+
),
|
|
169
|
+
Signal(
|
|
170
|
+
name="RSI_BULLISH_CROSSOVER_45",
|
|
171
|
+
description="RSI Bullish Crossover 45",
|
|
172
|
+
type_info="Long",
|
|
173
|
+
type=Optional[date],
|
|
174
|
+
function=lambda d: cross_value(d.RSI, 45),
|
|
175
|
+
),
|
|
169
176
|
Signal(
|
|
170
177
|
name="RSI_BEARISH_CROSSOVER",
|
|
171
178
|
description="RSI Bearish Crossover",
|
|
@@ -178,21 +185,32 @@ def indicators_factory() -> List[Indicator]:
|
|
|
178
185
|
description="RSI Oversold Signal",
|
|
179
186
|
type_info="Oversold",
|
|
180
187
|
type=Optional[date],
|
|
181
|
-
function=lambda d: d[(d.RSI < 30) & (d.RSI > 0)].
|
|
188
|
+
function=lambda d: d[(d.RSI < 30) & (d.RSI > 0)].last_valid_index(),
|
|
182
189
|
),
|
|
183
190
|
Signal(
|
|
184
191
|
name="RSI_OVERBOUGHT",
|
|
185
192
|
description="RSI Overbought Signal",
|
|
186
193
|
type_info="Overbought",
|
|
187
194
|
type=Optional[date],
|
|
188
|
-
function=lambda d: d[
|
|
195
|
+
function=lambda d: d[
|
|
196
|
+
(d.RSI < 100) & (d.RSI > 70)
|
|
197
|
+
].last_valid_index(),
|
|
189
198
|
),
|
|
190
199
|
Signal(
|
|
191
200
|
name="RSI_NEUTRAL",
|
|
192
201
|
description="RSI Neutral Signal",
|
|
193
202
|
type_info="Overbought",
|
|
194
203
|
type=Optional[date],
|
|
195
|
-
function=lambda d: d[
|
|
204
|
+
function=lambda d: d[
|
|
205
|
+
(d.RSI < 60) & (d.RSI > 40)
|
|
206
|
+
].last_valid_index(),
|
|
207
|
+
),
|
|
208
|
+
Signal(
|
|
209
|
+
name="RETURN_AFTER_RSI_CROSSOVER_45_PERIOD_90",
|
|
210
|
+
description="Percentile 30 return after RSI crossover 45 in the next 90 days",
|
|
211
|
+
type_info="Long",
|
|
212
|
+
type=Optional[float],
|
|
213
|
+
function=lambda d: compute_percentile_return_after_rsi_crossover(d),
|
|
196
214
|
),
|
|
197
215
|
],
|
|
198
216
|
),
|
|
@@ -207,14 +225,18 @@ def indicators_factory() -> List[Indicator]:
|
|
|
207
225
|
description="Stoch Oversold Signal",
|
|
208
226
|
type_info="Oversold",
|
|
209
227
|
type=Optional[date],
|
|
210
|
-
function=lambda d: d[
|
|
228
|
+
function=lambda d: d[
|
|
229
|
+
(d.SLOW_K < 20) & (d.SLOW_K > 0)
|
|
230
|
+
].last_valid_index(),
|
|
211
231
|
),
|
|
212
232
|
Signal(
|
|
213
233
|
name="STOCH_OVERBOUGHT",
|
|
214
234
|
description="Stoch Overbought Signal",
|
|
215
235
|
type_info="Overbought",
|
|
216
236
|
type=Optional[date],
|
|
217
|
-
function=lambda d: d[
|
|
237
|
+
function=lambda d: d[
|
|
238
|
+
(d.SLOW_K < 100) & (d.SLOW_K > 80)
|
|
239
|
+
].last_valid_index(),
|
|
218
240
|
),
|
|
219
241
|
],
|
|
220
242
|
),
|
|
@@ -229,14 +251,14 @@ def indicators_factory() -> List[Indicator]:
|
|
|
229
251
|
description="MFI Oversold Signal",
|
|
230
252
|
type_info="Oversold",
|
|
231
253
|
type=Optional[date],
|
|
232
|
-
function=lambda d: d[(d.MFI < 20)].
|
|
254
|
+
function=lambda d: d[(d.MFI < 20)].last_valid_index(),
|
|
233
255
|
),
|
|
234
256
|
Signal(
|
|
235
257
|
name="MFI_OVERBOUGHT",
|
|
236
258
|
description="MFI Overbought Signal",
|
|
237
259
|
type_info="Overbought",
|
|
238
260
|
type=Optional[date],
|
|
239
|
-
function=lambda d: d[(d.MFI > 80)].
|
|
261
|
+
function=lambda d: d[(d.MFI > 80)].last_valid_index(),
|
|
240
262
|
),
|
|
241
263
|
],
|
|
242
264
|
),
|
|
@@ -273,18 +295,39 @@ def indicators_factory() -> List[Indicator]:
|
|
|
273
295
|
description="Current price is lower than the 200-day high",
|
|
274
296
|
type_info="Oversold",
|
|
275
297
|
type=Optional[date],
|
|
276
|
-
function=lambda d: d[
|
|
277
|
-
|
|
278
|
-
],
|
|
298
|
+
function=lambda d: d[
|
|
299
|
+
0.6 * d["200_DAY_HIGH"] > d.LAST_PRICE
|
|
300
|
+
].last_valid_index(),
|
|
279
301
|
),
|
|
280
302
|
Signal(
|
|
281
303
|
name="LOWER_THAN_20_DAY_HIGH",
|
|
282
304
|
description="Current price is lower than the 20-day high",
|
|
283
305
|
type_info="Oversold",
|
|
284
306
|
type=Optional[date],
|
|
285
|
-
function=lambda d: d[
|
|
286
|
-
|
|
287
|
-
],
|
|
307
|
+
function=lambda d: d[
|
|
308
|
+
0.6 * d["20_DAY_HIGH"] > d.LAST_PRICE
|
|
309
|
+
].last_valid_index(),
|
|
310
|
+
),
|
|
311
|
+
Signal(
|
|
312
|
+
name="MEDIAN_WEEKLY_GROWTH",
|
|
313
|
+
description="Median weekly growth",
|
|
314
|
+
type_info="Oversold",
|
|
315
|
+
type=Optional[float],
|
|
316
|
+
function=lambda d: np.median(d.WEEKLY_GROWTH.unique()),
|
|
317
|
+
),
|
|
318
|
+
Signal(
|
|
319
|
+
name="MEDIAN_MONTHLY_GROWTH",
|
|
320
|
+
description="Median monthly growth",
|
|
321
|
+
type_info="Oversold",
|
|
322
|
+
type=Optional[float],
|
|
323
|
+
function=lambda d: np.median(d.MONTHLY_GROWTH.unique()),
|
|
324
|
+
),
|
|
325
|
+
Signal(
|
|
326
|
+
name="MEDIAN_YEARLY_GROWTH",
|
|
327
|
+
description="Median yearly growth",
|
|
328
|
+
type_info="Oversold",
|
|
329
|
+
type=Optional[float],
|
|
330
|
+
function=lambda d: np.median(d.YEARLY_GROWTH.unique()),
|
|
288
331
|
),
|
|
289
332
|
],
|
|
290
333
|
),
|
|
@@ -299,7 +342,6 @@ def indicators_factory() -> List[Indicator]:
|
|
|
299
342
|
type_info="Value",
|
|
300
343
|
description="Median daily Rate of Change of the last 30 days",
|
|
301
344
|
type=Optional[float],
|
|
302
|
-
range=[-100, 100],
|
|
303
345
|
function=lambda d: np.median(d.ROC_1.tolist()[-30:]),
|
|
304
346
|
),
|
|
305
347
|
Signal(
|
|
@@ -307,7 +349,6 @@ def indicators_factory() -> List[Indicator]:
|
|
|
307
349
|
type_info="Value",
|
|
308
350
|
description="Median weekly Rate of Change of the last 4 weeks",
|
|
309
351
|
type=Optional[float],
|
|
310
|
-
range=[-100, 100],
|
|
311
352
|
function=lambda d: np.median(d.ROC_7.tolist()[-4:]),
|
|
312
353
|
),
|
|
313
354
|
Signal(
|
|
@@ -315,7 +356,6 @@ def indicators_factory() -> List[Indicator]:
|
|
|
315
356
|
type_info="Value",
|
|
316
357
|
description="Median weekly Rate of Change of the last 12 weeks",
|
|
317
358
|
type=Optional[float],
|
|
318
|
-
range=[-100, 100],
|
|
319
359
|
function=lambda d: np.median(d.ROC_7.tolist()[-12:]),
|
|
320
360
|
),
|
|
321
361
|
Signal(
|
|
@@ -323,7 +363,6 @@ def indicators_factory() -> List[Indicator]:
|
|
|
323
363
|
type_info="Value",
|
|
324
364
|
description="Median monthly Rate of Change of the last 12 Months",
|
|
325
365
|
type=Optional[float],
|
|
326
|
-
range=[-100, 100],
|
|
327
366
|
function=lambda d: np.median(d.ROC_30.tolist()[-12:]),
|
|
328
367
|
),
|
|
329
368
|
Signal(
|
|
@@ -331,7 +370,6 @@ def indicators_factory() -> List[Indicator]:
|
|
|
331
370
|
type_info="Value",
|
|
332
371
|
description="30-day Rate of Change",
|
|
333
372
|
type=Optional[float],
|
|
334
|
-
range=[-100, 100],
|
|
335
373
|
function=lambda d: d.ROC_30.tolist()[-1],
|
|
336
374
|
),
|
|
337
375
|
Signal(
|
|
@@ -339,7 +377,6 @@ def indicators_factory() -> List[Indicator]:
|
|
|
339
377
|
type_info="Value",
|
|
340
378
|
description="7-day Rate of Change",
|
|
341
379
|
type=Optional[float],
|
|
342
|
-
range=[-100, 100],
|
|
343
380
|
function=lambda d: d.ROC_7.tolist()[-1],
|
|
344
381
|
),
|
|
345
382
|
],
|
|
@@ -362,9 +399,9 @@ def indicators_factory() -> List[Indicator]:
|
|
|
362
399
|
type_info="Oversold",
|
|
363
400
|
description="20-day breakout confirmed by positive ADOSC",
|
|
364
401
|
type=Optional[date],
|
|
365
|
-
function=lambda d: d[
|
|
366
|
-
|
|
367
|
-
],
|
|
402
|
+
function=lambda d: d[
|
|
403
|
+
(d.ADOSC_SIGNAL == True) # noqa: E712
|
|
404
|
+
].last_valid_index(),
|
|
368
405
|
),
|
|
369
406
|
],
|
|
370
407
|
),
|
|
@@ -387,49 +424,53 @@ def indicators_factory() -> List[Indicator]:
|
|
|
387
424
|
type_info="Long",
|
|
388
425
|
description="Morning Star Candlestick Pattern",
|
|
389
426
|
type=Optional[date],
|
|
390
|
-
function=lambda d: d[(d.CDLMORNINGSTAR == 100)].
|
|
427
|
+
function=lambda d: d[(d.CDLMORNINGSTAR == 100)].last_valid_index(),
|
|
391
428
|
),
|
|
392
429
|
Signal(
|
|
393
430
|
name="CDL3LINESTRIKE",
|
|
394
431
|
description="3 Line Strike Candlestick Pattern",
|
|
395
432
|
type_info="Long",
|
|
396
433
|
type=Optional[date],
|
|
397
|
-
function=lambda d: d[(d.CDL3LINESTRIKE == 100)].
|
|
434
|
+
function=lambda d: d[(d.CDL3LINESTRIKE == 100)].last_valid_index(),
|
|
398
435
|
),
|
|
399
436
|
Signal(
|
|
400
437
|
name="CDL3WHITESOLDIERS",
|
|
401
438
|
description="3 White Soldiers Candlestick Pattern",
|
|
402
439
|
type_info="Long",
|
|
403
440
|
type=Optional[date],
|
|
404
|
-
function=lambda d: d[
|
|
441
|
+
function=lambda d: d[
|
|
442
|
+
(d.CDL3WHITESOLDIERS == 100)
|
|
443
|
+
].last_valid_index(),
|
|
405
444
|
),
|
|
406
445
|
Signal(
|
|
407
446
|
name="CDLABANDONEDBABY",
|
|
408
447
|
description="Abandoned Baby Candlestick Pattern",
|
|
409
448
|
type_info="Long",
|
|
410
449
|
type=Optional[date],
|
|
411
|
-
function=lambda d: d[
|
|
450
|
+
function=lambda d: d[
|
|
451
|
+
(d.CDLABANDONEDBABY == 100)
|
|
452
|
+
].last_valid_index(),
|
|
412
453
|
),
|
|
413
454
|
Signal(
|
|
414
455
|
name="CDLTASUKIGAP",
|
|
415
456
|
description="Tasukigap Candlestick Pattern",
|
|
416
457
|
type_info="Long",
|
|
417
458
|
type=Optional[date],
|
|
418
|
-
function=lambda d: d[(d.CDLTASUKIGAP == 100)].
|
|
459
|
+
function=lambda d: d[(d.CDLTASUKIGAP == 100)].last_valid_index(),
|
|
419
460
|
),
|
|
420
461
|
Signal(
|
|
421
462
|
name="CDLPIERCING",
|
|
422
463
|
description="Piercing Candlestick Pattern",
|
|
423
464
|
type_info="Long",
|
|
424
465
|
type=Optional[date],
|
|
425
|
-
function=lambda d: d[(d.CDLPIERCING == 100)].
|
|
466
|
+
function=lambda d: d[(d.CDLPIERCING == 100)].last_valid_index(),
|
|
426
467
|
),
|
|
427
468
|
Signal(
|
|
428
469
|
name="CDLENGULFING",
|
|
429
470
|
description="Engulfing Candlestick Pattern",
|
|
430
471
|
type_info="Long",
|
|
431
472
|
type=Optional[date],
|
|
432
|
-
function=lambda d: d[(d.CDLENGULFING == 100)].
|
|
473
|
+
function=lambda d: d[(d.CDLENGULFING == 100)].last_valid_index(),
|
|
433
474
|
),
|
|
434
475
|
],
|
|
435
476
|
),
|
|
@@ -441,7 +482,11 @@ class Indicators(BaseModel):
|
|
|
441
482
|
|
|
442
483
|
def compute(self, data: pd.DataFrame) -> None:
|
|
443
484
|
for indicator in self.indicators:
|
|
444
|
-
|
|
485
|
+
try:
|
|
486
|
+
indicator.compute(data)
|
|
487
|
+
except Exception as e:
|
|
488
|
+
logger.error(f"Failed to compute indicator {indicator.name}: {e}")
|
|
489
|
+
continue
|
|
445
490
|
logger.info(
|
|
446
491
|
f"Computed {indicator.name} with {len(indicator.signals)} signals"
|
|
447
492
|
)
|