bullishpy 0.4.0__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -0,0 +1,450 @@
1
+ import logging
2
+ from datetime import date
3
+ from typing import Optional, List, Callable, Any, Literal, Dict, Union, Self
4
+
5
+ import numpy as np
6
+ import pandas as pd
7
+ from pydantic import BaseModel, Field, PrivateAttr, create_model, model_validator
8
+
9
+ from bullish.analysis.functions import (
10
+ cross,
11
+ cross_value,
12
+ ADX,
13
+ MACD,
14
+ RSI,
15
+ STOCH,
16
+ MFI,
17
+ ROC,
18
+ CANDLESTOCK_PATTERNS,
19
+ SMA,
20
+ ADOSC,
21
+ )
22
+
23
+ logger = logging.getLogger(__name__)
24
+ SignalType = Literal["Short", "Long", "Oversold", "Overbought", "Value"]
25
+
26
+
27
+ class Signal(BaseModel):
28
+ name: str
29
+ type_info: SignalType
30
+ type: Any
31
+ range: Optional[List[float]] = None
32
+ function: Callable[[pd.DataFrame], Optional[Union[date, float]]]
33
+ description: str
34
+ date: Optional[date] = None
35
+ value: Optional[float] = None
36
+
37
+ def is_date(self) -> bool:
38
+ if self.type == Optional[date]:
39
+ return True
40
+ elif self.type == Optional[float]:
41
+ return False
42
+ else:
43
+ raise NotImplementedError
44
+
45
+ def compute(self, data: pd.DataFrame) -> None:
46
+ if self.is_date():
47
+ self.date = self.function(data) # type: ignore
48
+ else:
49
+ self.value = self.function(data) # type: ignore
50
+
51
+ @model_validator(mode="after")
52
+ def _validate(self) -> Self:
53
+ if self.type == Optional[float] and self.range is None:
54
+ raise ValueError(
55
+ "Range must be defined for signals of type Optional[float]"
56
+ )
57
+ return self
58
+
59
+
60
+ class Indicator(BaseModel):
61
+ name: str
62
+ description: str
63
+ expected_columns: List[str]
64
+ function: Callable[[pd.DataFrame], pd.DataFrame]
65
+ _data: pd.DataFrame = PrivateAttr(default=pd.DataFrame())
66
+ signals: List[Signal] = Field(default_factory=list)
67
+
68
+ def compute(self, data: pd.DataFrame) -> None:
69
+ results = self.function(data)
70
+ if not set(self.expected_columns).issubset(results.columns):
71
+ raise ValueError(
72
+ f"Expected columns {self.expected_columns}, but got {results.columns.tolist()}"
73
+ )
74
+ self._data = results
75
+ self._signals()
76
+
77
+ def _signals(self) -> None:
78
+ for signal in self.signals:
79
+ try:
80
+ signal.compute(self._data)
81
+ except Exception as e: # noqa: PERF203
82
+ logger.error(
83
+ f"Fail to compute signal {signal.name} for indicator {self.name}: {e}"
84
+ )
85
+
86
+
87
+ def indicators_factory() -> List[Indicator]:
88
+ return [
89
+ Indicator(
90
+ name="ADX_14",
91
+ description="Average Directional Movement Index",
92
+ expected_columns=["ADX_14", "MINUS_DI", "PLUS_DI"],
93
+ function=ADX.call,
94
+ signals=[
95
+ Signal(
96
+ name="ADX_14_LONG",
97
+ description="ADX 14 Long Signal",
98
+ type_info="Long",
99
+ type=Optional[date],
100
+ function=lambda d: d[
101
+ (d.ADX_14 > 20) & (d.PLUS_DI > d.MINUS_DI)
102
+ ].index[-1],
103
+ ),
104
+ Signal(
105
+ name="ADX_14_SHORT",
106
+ description="ADX 14 Short Signal",
107
+ type_info="Short",
108
+ type=Optional[date],
109
+ function=lambda d: d[
110
+ (d.ADX_14 > 20) & (d.MINUS_DI > d.PLUS_DI)
111
+ ].index[-1],
112
+ ),
113
+ ],
114
+ ),
115
+ Indicator(
116
+ name="MACD_12_26_9",
117
+ description="Moving Average Convergence/Divergence",
118
+ expected_columns=[
119
+ "MACD_12_26_9",
120
+ "MACD_12_26_9_SIGNAL",
121
+ "MACD_12_26_9_HIST",
122
+ ],
123
+ function=MACD.call,
124
+ signals=[
125
+ Signal(
126
+ name="MACD_12_26_9_BULLISH_CROSSOVER",
127
+ description="MACD 12-26-9 Bullish Crossover",
128
+ type_info="Long",
129
+ type=Optional[date],
130
+ function=lambda d: cross(d.MACD_12_26_9, d.MACD_12_26_9_SIGNAL),
131
+ ),
132
+ Signal(
133
+ name="MACD_12_26_9_BEARISH_CROSSOVER",
134
+ description="MACD 12-26-9 Bearish Crossover",
135
+ type_info="Short",
136
+ type=Optional[date],
137
+ function=lambda d: cross(d.MACD_12_26_9_SIGNAL, d.MACD_12_26_9),
138
+ ),
139
+ Signal(
140
+ name="MACD_12_26_9_ZERO_LINE_CROSS_UP",
141
+ description="MACD 12-26-9 Zero Line Cross Up",
142
+ type_info="Long",
143
+ type=Optional[date],
144
+ function=lambda d: cross_value(d.MACD_12_26_9, 0),
145
+ ),
146
+ Signal(
147
+ name="MACD_12_26_9_ZERO_LINE_CROSS_DOWN",
148
+ description="MACD 12-26-9 Zero Line Cross Down",
149
+ type_info="Long",
150
+ type=Optional[date],
151
+ function=lambda d: cross_value(d.MACD_12_26_9, 0, above=False),
152
+ ),
153
+ ],
154
+ ),
155
+ Indicator(
156
+ name="RSI",
157
+ description="Relative Strength Index",
158
+ expected_columns=["RSI"],
159
+ function=RSI.call,
160
+ signals=[
161
+ Signal(
162
+ name="RSI_BULLISH_CROSSOVER",
163
+ description="RSI Bullish Crossover",
164
+ type_info="Long",
165
+ type=Optional[date],
166
+ function=lambda d: cross_value(d.RSI, 30),
167
+ ),
168
+ Signal(
169
+ name="RSI_BEARISH_CROSSOVER",
170
+ description="RSI Bearish Crossover",
171
+ type_info="Short",
172
+ type=Optional[date],
173
+ function=lambda d: cross_value(d.RSI, 70, above=False),
174
+ ),
175
+ Signal(
176
+ name="RSI_OVERSOLD",
177
+ description="RSI Oversold Signal",
178
+ type_info="Oversold",
179
+ type=Optional[date],
180
+ function=lambda d: d[(d.RSI < 30) & (d.RSI > 0)].index[-1],
181
+ ),
182
+ Signal(
183
+ name="RSI_OVERBOUGHT",
184
+ description="RSI Overbought Signal",
185
+ type_info="Overbought",
186
+ type=Optional[date],
187
+ function=lambda d: d[(d.RSI < 100) & (d.RSI > 70)].index[-1],
188
+ ),
189
+ ],
190
+ ),
191
+ Indicator(
192
+ name="STOCH",
193
+ description="Stochastic",
194
+ expected_columns=["SLOW_K", "SLOW_D"],
195
+ function=STOCH.call,
196
+ signals=[
197
+ Signal(
198
+ name="STOCH_OVERSOLD",
199
+ description="Stoch Oversold Signal",
200
+ type_info="Oversold",
201
+ type=Optional[date],
202
+ function=lambda d: d[(d.SLOW_K < 20) & (d.SLOW_K > 0)].index[-1],
203
+ ),
204
+ Signal(
205
+ name="STOCH_OVERBOUGHT",
206
+ description="Stoch Overbought Signal",
207
+ type_info="Overbought",
208
+ type=Optional[date],
209
+ function=lambda d: d[(d.SLOW_K < 100) & (d.SLOW_K > 80)].index[-1],
210
+ ),
211
+ ],
212
+ ),
213
+ Indicator(
214
+ name="MFI",
215
+ description="Money Flow Index",
216
+ expected_columns=["MFI"],
217
+ function=MFI.call,
218
+ signals=[
219
+ Signal(
220
+ name="MFI_OVERSOLD",
221
+ description="MFI Oversold Signal",
222
+ type_info="Oversold",
223
+ type=Optional[date],
224
+ function=lambda d: d[(d.MFI < 20)].index[-1],
225
+ ),
226
+ Signal(
227
+ name="MFI_OVERBOUGHT",
228
+ description="MFI Overbought Signal",
229
+ type_info="Overbought",
230
+ type=Optional[date],
231
+ function=lambda d: d[(d.MFI > 80)].index[-1],
232
+ ),
233
+ ],
234
+ ),
235
+ Indicator(
236
+ name="SMA",
237
+ description="Money Flow Index",
238
+ expected_columns=["SMA_50", "SMA_200"],
239
+ function=SMA.call,
240
+ signals=[
241
+ Signal(
242
+ name="GOLDEN_CROSS",
243
+ description="Golden cross: SMA 50 crosses above SMA 200",
244
+ type_info="Oversold",
245
+ type=Optional[date],
246
+ function=lambda d: cross(d.SMA_50, d.SMA_200),
247
+ ),
248
+ Signal(
249
+ name="DEATH_CROSS",
250
+ description="Death cross: SMA 50 crosses below SMA 200",
251
+ type_info="Overbought",
252
+ type=Optional[date],
253
+ function=lambda d: cross(d.SMA_50, d.SMA_200, above=False),
254
+ ),
255
+ ],
256
+ ),
257
+ Indicator(
258
+ name="ROC",
259
+ description="Rate Of Change",
260
+ expected_columns=["ROC_7", "ROC_30", "ROC_1"],
261
+ function=ROC.call,
262
+ signals=[
263
+ Signal(
264
+ name="MEDIAN_RATE_OF_CHANGE_1",
265
+ type_info="Value",
266
+ description="Median daily Rate of Change of the last 30 days",
267
+ type=Optional[float],
268
+ range=[-100, 100],
269
+ function=lambda d: np.median(d.ROC_1.tolist()[-30:]),
270
+ ),
271
+ Signal(
272
+ name="MEDIAN_RATE_OF_CHANGE_7_4",
273
+ type_info="Value",
274
+ description="Median weekly Rate of Change of the last 4 weeks",
275
+ type=Optional[float],
276
+ range=[-100, 100],
277
+ function=lambda d: np.median(d.ROC_7.tolist()[-4:]),
278
+ ),
279
+ Signal(
280
+ name="MEDIAN_RATE_OF_CHANGE_7_12",
281
+ type_info="Value",
282
+ description="Median weekly Rate of Change of the last 12 weeks",
283
+ type=Optional[float],
284
+ range=[-100, 100],
285
+ function=lambda d: np.median(d.ROC_7.tolist()[-12:]),
286
+ ),
287
+ Signal(
288
+ name="MEDIAN_RATE_OF_CHANGE_30",
289
+ type_info="Value",
290
+ description="Median monthly Rate of Change of the last 12 Months",
291
+ type=Optional[float],
292
+ range=[-100, 100],
293
+ function=lambda d: np.median(d.ROC_30.tolist()[-12:]),
294
+ ),
295
+ Signal(
296
+ name="RATE_OF_CHANGE_30",
297
+ type_info="Value",
298
+ description="30-day Rate of Change",
299
+ type=Optional[float],
300
+ range=[-100, 100],
301
+ function=lambda d: d.ROC_30.tolist()[-1],
302
+ ),
303
+ Signal(
304
+ name="RATE_OF_CHANGE_7",
305
+ type_info="Value",
306
+ description="7-day Rate of Change",
307
+ type=Optional[float],
308
+ range=[-100, 100],
309
+ function=lambda d: d.ROC_7.tolist()[-1],
310
+ ),
311
+ ],
312
+ ),
313
+ Indicator(
314
+ name="ADOSC",
315
+ description="Chaikin A/D Oscillator",
316
+ expected_columns=["ADOSC", "ADOSC_SIGNAL"],
317
+ function=ADOSC.call,
318
+ signals=[
319
+ Signal(
320
+ name="ADOSC_CROSSES_ABOVE_0",
321
+ type_info="Oversold",
322
+ description="Bullish momentum in money flow",
323
+ type=Optional[date],
324
+ function=lambda d: cross_value(d.ADOSC, 0, above=True),
325
+ ),
326
+ Signal(
327
+ name="POSITIVE_ADOSC_20_DAY_BREAKOUT",
328
+ type_info="Oversold",
329
+ description="20-day breakout confirmed by positive ADOSC",
330
+ type=Optional[date],
331
+ function=lambda d: d[(d.ADOSC_SIGNAL == True)].index[ # noqa: E712
332
+ -1
333
+ ],
334
+ ),
335
+ ],
336
+ ),
337
+ Indicator(
338
+ name="CANDLESTICKS",
339
+ description="Candlestick Patterns",
340
+ expected_columns=[
341
+ "CDLMORNINGSTAR",
342
+ "CDL3LINESTRIKE",
343
+ "CDL3WHITESOLDIERS",
344
+ "CDLABANDONEDBABY",
345
+ "CDLTASUKIGAP",
346
+ "CDLPIERCING",
347
+ "CDLENGULFING",
348
+ ],
349
+ function=CANDLESTOCK_PATTERNS.call,
350
+ signals=[
351
+ Signal(
352
+ name="CDLMORNINGSTAR",
353
+ type_info="Long",
354
+ description="Morning Star Candlestick Pattern",
355
+ type=Optional[date],
356
+ function=lambda d: d[(d.CDLMORNINGSTAR == 100)].index[-1],
357
+ ),
358
+ Signal(
359
+ name="CDL3LINESTRIKE",
360
+ description="3 Line Strike Candlestick Pattern",
361
+ type_info="Long",
362
+ type=Optional[date],
363
+ function=lambda d: d[(d.CDL3LINESTRIKE == 100)].index[-1],
364
+ ),
365
+ Signal(
366
+ name="CDL3WHITESOLDIERS",
367
+ description="3 White Soldiers Candlestick Pattern",
368
+ type_info="Long",
369
+ type=Optional[date],
370
+ function=lambda d: d[(d.CDL3WHITESOLDIERS == 100)].index[-1],
371
+ ),
372
+ Signal(
373
+ name="CDLABANDONEDBABY",
374
+ description="Abandoned Baby Candlestick Pattern",
375
+ type_info="Long",
376
+ type=Optional[date],
377
+ function=lambda d: d[(d.CDLABANDONEDBABY == 100)].index[-1],
378
+ ),
379
+ Signal(
380
+ name="CDLTASUKIGAP",
381
+ description="Tasukigap Candlestick Pattern",
382
+ type_info="Long",
383
+ type=Optional[date],
384
+ function=lambda d: d[(d.CDLTASUKIGAP == 100)].index[-1],
385
+ ),
386
+ Signal(
387
+ name="CDLPIERCING",
388
+ description="Piercing Candlestick Pattern",
389
+ type_info="Long",
390
+ type=Optional[date],
391
+ function=lambda d: d[(d.CDLPIERCING == 100)].index[-1],
392
+ ),
393
+ Signal(
394
+ name="CDLENGULFING",
395
+ description="Engulfing Candlestick Pattern",
396
+ type_info="Long",
397
+ type=Optional[date],
398
+ function=lambda d: d[(d.CDLENGULFING == 100)].index[-1],
399
+ ),
400
+ ],
401
+ ),
402
+ ]
403
+
404
+
405
+ class Indicators(BaseModel):
406
+ indicators: List[Indicator] = Field(default_factory=indicators_factory)
407
+
408
+ def compute(self, data: pd.DataFrame) -> None:
409
+ for indicator in self.indicators:
410
+ indicator.compute(data)
411
+ logger.info(
412
+ f"Computed {indicator.name} with {len(indicator.signals)} signals"
413
+ )
414
+
415
+ def to_dict(self, data: pd.DataFrame) -> Dict[str, Any]:
416
+ self.compute(data)
417
+ res = {}
418
+ for indicator in self.indicators:
419
+ for signal in indicator.signals:
420
+ res[signal.name.lower()] = (
421
+ signal.date if signal.is_date() else signal.value
422
+ )
423
+ return res
424
+
425
+ def create_indicator_models(self) -> List[type[BaseModel]]:
426
+ models = []
427
+ for indicator in self.indicators:
428
+ model_parameters = {}
429
+ for signal in indicator.signals:
430
+ range_ = {}
431
+ if signal.range:
432
+ range_ = {"ge": signal.range[0], "le": signal.range[1]}
433
+ model_parameters[signal.name.lower()] = (
434
+ signal.type,
435
+ Field( # type: ignore
436
+ None,
437
+ **range_,
438
+ description=(
439
+ signal.description
440
+ or " ".join(signal.name.lower().capitalize().split("_"))
441
+ ),
442
+ ),
443
+ )
444
+ model = create_model(indicator.name, **model_parameters) # type: ignore
445
+ model._description = indicator.description
446
+ models.append(model)
447
+ return models
448
+
449
+
450
+ IndicatorModels = Indicators().create_indicator_models()
@@ -0,0 +1,87 @@
1
+ import datetime
2
+ from typing import Dict, Any, Optional
3
+
4
+ from bullish.analysis.filter import FilterQuery
5
+ from pydantic import BaseModel, Field
6
+
7
+
8
+ class NamedFilterQuery(FilterQuery):
9
+ name: str
10
+ description: Optional[str] = None
11
+
12
+ def to_dict(self) -> Dict[str, Any]:
13
+ return self.model_dump(
14
+ exclude_unset=True,
15
+ exclude_none=True,
16
+ exclude_defaults=True,
17
+ exclude={"name"},
18
+ )
19
+
20
+
21
+ STRONG_FUNDAMENTALS = NamedFilterQuery(
22
+ name="Strong Fundamentals",
23
+ income=[
24
+ "positive_operating_income",
25
+ "growing_operating_income",
26
+ "positive_net_income",
27
+ "growing_net_income",
28
+ ],
29
+ cash_flow=["positive_free_cash_flow", "growing_operating_cash_flow"],
30
+ eps=["positive_diluted_eps", "growing_diluted_eps"],
31
+ properties=[
32
+ "operating_cash_flow_is_higher_than_net_income",
33
+ "positive_return_on_equity",
34
+ "positive_return_on_assets",
35
+ "positive_debt_to_equity",
36
+ ],
37
+ market_capitalization=[1e10, 1e12], # 1 billion to 1 trillion
38
+ )
39
+
40
+ GOOD_FUNDAMENTALS = NamedFilterQuery(
41
+ name="Good Fundamentals",
42
+ income=[
43
+ "positive_operating_income",
44
+ "positive_net_income",
45
+ ],
46
+ cash_flow=["positive_free_cash_flow"],
47
+ eps=["positive_diluted_eps"],
48
+ properties=[
49
+ "positive_return_on_equity",
50
+ "positive_return_on_assets",
51
+ "positive_debt_to_equity",
52
+ ],
53
+ market_capitalization=[1e10, 1e12], # 1 billion to 1 trillion
54
+ )
55
+
56
+ MICRO_CAP_EVENT_SPECULATION = NamedFilterQuery(
57
+ name="Micro-Cap Event Speculation",
58
+ description="seeks tiny names where unusual volume and price gaps hint at "
59
+ "pending corporate events (patent win, FDA news, buy-out rumors).",
60
+ positive_adosc_20_day_breakout=[
61
+ datetime.date.today() - datetime.timedelta(days=5),
62
+ datetime.date.today(),
63
+ ],
64
+ cdltasukigap=[
65
+ datetime.date.today() - datetime.timedelta(days=5),
66
+ datetime.date.today(),
67
+ ],
68
+ rate_of_change_30=[20, 100], # 10% to 50% in the last 30 days
69
+ market_capitalization=[0, 5e8],
70
+ )
71
+
72
+
73
+ def predefined_filters() -> list[NamedFilterQuery]:
74
+ return [STRONG_FUNDAMENTALS, GOOD_FUNDAMENTALS, MICRO_CAP_EVENT_SPECULATION]
75
+
76
+
77
+ class PredefinedFilters(BaseModel):
78
+ filters: list[NamedFilterQuery] = Field(default_factory=predefined_filters)
79
+
80
+ def get_predefined_filter_names(self) -> list[str]:
81
+ return [filter.name for filter in self.filters]
82
+
83
+ def get_predefined_filter(self, name: str) -> Dict[str, Any]:
84
+ for filter in self.filters:
85
+ if filter.name == name:
86
+ return filter.to_dict()
87
+ raise ValueError(f"Filter with name '{name}' not found.")