bullishpy 0.8.0__py3-none-any.whl → 0.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -116,12 +116,21 @@ TechnicalAnalysisModels = [*IndicatorModels, TechnicalAnalysisBase]
116
116
  class TechnicalAnalysis(*TechnicalAnalysisModels): # type: ignore
117
117
 
118
118
  @classmethod
119
- def from_data(cls, prices: pd.DataFrame) -> "TechnicalAnalysis":
119
+ def from_data(cls, prices: pd.DataFrame, ticker: Ticker) -> "TechnicalAnalysis":
120
+ if "close" not in prices.columns:
121
+ logger.warning(
122
+ f"Ticker {ticker.symbol} does not have valid 'close' values.",
123
+ exc_info=True,
124
+ )
125
+ return cls()
120
126
  try:
121
127
  res = Indicators().to_dict(prices)
122
128
  return cls(last_price=prices.close.iloc[-1], **res)
123
129
  except Exception as e:
124
- logger.error(f"Failing to calculate technical analysis: {e}", exc_info=True)
130
+ logger.error(
131
+ f"Failing to calculate technical analysis for {ticker.symbol}: {e}",
132
+ exc_info=True,
133
+ )
125
134
  return cls()
126
135
 
127
136
 
@@ -426,10 +435,20 @@ class AnalysisView(BaseModel):
426
435
  Optional[str],
427
436
  Field(None, description="Full name of the company"),
428
437
  ]
438
+ price_per_earning_ratio: Optional[float] = None
439
+ last_price: Annotated[
440
+ Optional[float],
441
+ BeforeValidator(to_float),
442
+ Field(
443
+ default=None,
444
+ ),
445
+ ]
446
+ median_yearly_growth: Optional[float] = None
447
+ median_weekly_growth: Optional[float] = None
448
+ median_monthly_growth: Optional[float] = None
429
449
 
430
450
 
431
451
  class Analysis(AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis): # type: ignore
432
- price_per_earning_ratio: Optional[float] = None
433
452
 
434
453
  @classmethod
435
454
  def from_ticker(cls, bearish_db: BearishDbBase, ticker: Ticker) -> "Analysis":
@@ -443,7 +462,7 @@ class Analysis(AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis)
443
462
  financials = Financials.from_ticker(bearish_db, ticker)
444
463
  fundamental_analysis = FundamentalAnalysis.from_financials(financials, ticker)
445
464
  prices = Prices.from_ticker(bearish_db, ticker)
446
- technical_analysis = TechnicalAnalysis.from_data(prices.to_dataframe())
465
+ technical_analysis = TechnicalAnalysis.from_data(prices.to_dataframe(), ticker)
447
466
  return cls.model_validate(
448
467
  equity.model_dump()
449
468
  | fundamental_analysis.model_dump()
@@ -11,6 +11,8 @@ from bullish.analysis.analysis import (
11
11
  YearlyFundamentalAnalysis,
12
12
  QuarterlyFundamentalAnalysis,
13
13
  TechnicalAnalysisModels,
14
+ TechnicalAnalysis,
15
+ AnalysisView,
14
16
  )
15
17
 
16
18
  Industry = Literal[
@@ -455,6 +457,16 @@ def _get_fundamental_analysis_boolean_fields() -> List[str]:
455
457
  ]
456
458
 
457
459
 
460
+ def _get_technical_analysis_float_fields() -> List[str]:
461
+ return [
462
+ name
463
+ for name, info in (
464
+ TechnicalAnalysis.model_fields | AnalysisView.model_fields
465
+ ).items()
466
+ if info.annotation == Optional[float]
467
+ ]
468
+
469
+
458
470
  def get_boolean_field_group(group: str) -> List[str]:
459
471
  groups = FUNDAMENTAL_ANALYSIS_GROUP.copy()
460
472
  groups.remove(group)
@@ -484,6 +496,8 @@ GROUP_MAPPING: Dict[str, List[str]] = {
484
496
  "industry_group": list(get_args(IndustryGroup)),
485
497
  "sector": list(get_args(Sector)),
486
498
  "symbol": [],
499
+ "order_by_asc": _get_technical_analysis_float_fields(),
500
+ "order_by_desc": _get_technical_analysis_float_fields(),
487
501
  }
488
502
 
489
503
 
@@ -552,11 +566,14 @@ FundamentalAnalysisFilters = _create_fundamental_analysis_models()
552
566
 
553
567
  class GeneralFilter(BaseModel):
554
568
  country: Optional[List[str]] = None
569
+ order_by_asc: Optional[str] = None
570
+ order_by_desc: Optional[str] = None
555
571
  industry: Optional[List[str]] = None
556
572
  industry_group: Optional[List[str]] = None
557
573
  sector: Optional[List[str]] = None
558
574
  symbol: Optional[List[str]] = None
559
575
  market_capitalization: Optional[List[float]] = Field(default=[5e8, 1e12])
576
+ price_per_earning_ratio: Optional[List[float]] = Field(default=[0.0, 1000.0])
560
577
 
561
578
 
562
579
  class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysisFilters): # type: ignore
@@ -572,6 +589,8 @@ class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysis
572
589
  def to_query(self) -> str:
573
590
  parameters = self.model_dump(exclude_defaults=True, exclude_unset=True)
574
591
  query = []
592
+ order_by_desc = ""
593
+ order_by_asc = ""
575
594
  for parameter, value in parameters.items():
576
595
  if not value:
577
596
  continue
@@ -582,6 +601,12 @@ class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysis
582
601
  and parameter not in GeneralFilter.model_fields
583
602
  ):
584
603
  query.append(" AND ".join([f"{v}=1" for v in value]))
604
+ elif (
605
+ isinstance(value, str) and bool(value) and parameter == "order_by_desc"
606
+ ):
607
+ order_by_desc = f"ORDER BY {value} DESC"
608
+ elif isinstance(value, str) and bool(value) and parameter == "order_by_asc":
609
+ order_by_asc = f"ORDER BY {value} ASC"
585
610
  elif (
586
611
  isinstance(value, list)
587
612
  and len(value) == SIZE_RANGE
@@ -604,7 +629,7 @@ class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysis
604
629
  else:
605
630
  raise NotImplementedError
606
631
  query_ = " AND ".join(query)
607
- return query_
632
+ return f"{query_} {order_by_desc.strip()} {order_by_asc.strip()}".strip()
608
633
 
609
634
 
610
635
  class FilterQueryStored(FilterQuery): ...
@@ -1,7 +1,9 @@
1
+ import datetime
1
2
  import logging
2
3
  from datetime import date
3
- from typing import Optional, Callable
4
+ from typing import Optional, Callable, cast
4
5
 
6
+ import numpy as np
5
7
  import pandas as pd
6
8
  import pandas_ta as ta # type: ignore
7
9
 
@@ -19,7 +21,7 @@ def cross(
19
21
  ) -> Optional[date]:
20
22
  crossing = ta.cross(series_a=series_a, series_b=series_b, above=above)
21
23
  if not crossing[crossing == 1].index.empty:
22
- return pd.Timestamp(crossing[crossing == 1].index[-1]).date()
24
+ return crossing[crossing == 1].last_valid_index().date() # type: ignore
23
25
  return None
24
26
 
25
27
 
@@ -27,6 +29,15 @@ def cross_value(series: pd.Series, number: int, above: bool = True) -> Optional[
27
29
  return cross(series, pd.Series(number, index=series.index), above=above)
28
30
 
29
31
 
32
+ def cross_value_series(
33
+ series_a: pd.Series, number: int, above: bool = True
34
+ ) -> pd.Series:
35
+ crossing = ta.cross(
36
+ series_a=series_a, series_b=pd.Series(number, index=series_a.index), above=above
37
+ )
38
+ return crossing # type: ignore
39
+
40
+
30
41
  def compute_adx(data: pd.DataFrame) -> pd.DataFrame:
31
42
  results = pd.DataFrame(index=data.index)
32
43
  results["ADX_14"] = talib.ADX(data.high, data.low, close=data.close) # type: ignore
@@ -69,12 +80,14 @@ def compute_pandas_ta_macd(data: pd.DataFrame) -> pd.DataFrame:
69
80
  def compute_rsi(data: pd.DataFrame) -> pd.DataFrame:
70
81
  results = pd.DataFrame(index=data.index)
71
82
  results["RSI"] = talib.RSI(data.close) # type: ignore
83
+ results["CLOSE"] = data.close
72
84
  return results
73
85
 
74
86
 
75
87
  def compute_pandas_ta_rsi(data: pd.DataFrame) -> pd.DataFrame:
76
88
  results = pd.DataFrame(index=data.index)
77
89
  results["RSI"] = ta.rsi(data.close, length=14)
90
+ results["CLOSE"] = data.close
78
91
  return results
79
92
 
80
93
 
@@ -244,6 +257,12 @@ def compute_patterns(data: pd.DataFrame) -> pd.DataFrame:
244
257
  return results
245
258
 
246
259
 
260
+ def perc(data: pd.Series) -> float:
261
+ if len(data) < 2 or data.iloc[0] == 0:
262
+ return np.nan
263
+ return cast(float, ((data.iloc[-1] - data.iloc[0]) / data.iloc[0]) * 100)
264
+
265
+
247
266
  def compute_price(data: pd.DataFrame) -> pd.DataFrame:
248
267
  results = pd.DataFrame(index=data.index)
249
268
  results["200_DAY_HIGH"] = data.close.rolling(window=200).max()
@@ -251,9 +270,30 @@ def compute_price(data: pd.DataFrame) -> pd.DataFrame:
251
270
  results["20_DAY_HIGH"] = data.close.rolling(window=20).max()
252
271
  results["20_DAY_LOW"] = data.close.rolling(window=20).min()
253
272
  results["LAST_PRICE"] = data.close
273
+ results["WEEKLY_GROWTH"] = data.close.resample("W").transform(perc) # type: ignore
274
+ results["MONTHLY_GROWTH"] = data.close.resample("ME").transform(perc) # type: ignore
275
+ results["YEARLY_GROWTH"] = data.close.resample("YE").transform(perc) # type: ignore
254
276
  return results
255
277
 
256
278
 
279
+ def compute_percentile_return_after_rsi_crossover(
280
+ data: pd.DataFrame, rsi_threshold: int = 45, period: int = 90
281
+ ) -> float:
282
+ data_ = cross_value_series(data.RSI, rsi_threshold)
283
+ values = []
284
+ for crossing_date in data_[data_ == 1].index:
285
+ data_crossed = data[
286
+ (data.index >= crossing_date)
287
+ & (data.index <= crossing_date + datetime.timedelta(days=period))
288
+ ]
289
+ v = (
290
+ data_crossed.CLOSE.pct_change(periods=len(data_crossed.CLOSE) - 1).iloc[-1]
291
+ * 100
292
+ )
293
+ values.append(v)
294
+ return float(np.percentile(values, 30))
295
+
296
+
257
297
  class IndicatorFunction(BaseModel):
258
298
  expected_columns: list[str]
259
299
  functions: list[Callable[[pd.DataFrame], pd.DataFrame]]
@@ -265,7 +305,7 @@ class IndicatorFunction(BaseModel):
265
305
  data_ = function(data)
266
306
  break
267
307
  except Exception as e:
268
- logger.warning(f"Fail to compute function {function.__name__}: {e}")
308
+ logger.error(f"Fail to compute function {function.__name__}: {e}")
269
309
  if data_ is None:
270
310
  raise ValueError("No data returned from indicator functions.")
271
311
  if not set(self.expected_columns).issubset(set(data_.columns)):
@@ -347,6 +387,9 @@ PRICE = IndicatorFunction(
347
387
  "20_DAY_HIGH",
348
388
  "20_DAY_LOW",
349
389
  "LAST_PRICE",
390
+ "WEEKLY_GROWTH",
391
+ "MONTHLY_GROWTH",
392
+ "YEARLY_GROWTH",
350
393
  ],
351
394
  functions=[compute_price],
352
395
  )
@@ -1,10 +1,10 @@
1
1
  import logging
2
2
  from datetime import date
3
- from typing import Optional, List, Callable, Any, Literal, Dict, Union, Self
3
+ from typing import Optional, List, Callable, Any, Literal, Dict, Union
4
4
 
5
5
  import numpy as np
6
6
  import pandas as pd
7
- from pydantic import BaseModel, Field, PrivateAttr, create_model, model_validator
7
+ from pydantic import BaseModel, Field, PrivateAttr, create_model
8
8
 
9
9
  from bullish.analysis.functions import (
10
10
  cross,
@@ -19,6 +19,7 @@ from bullish.analysis.functions import (
19
19
  SMA,
20
20
  ADOSC,
21
21
  PRICE,
22
+ compute_percentile_return_after_rsi_crossover,
22
23
  )
23
24
 
24
25
  logger = logging.getLogger(__name__)
@@ -49,14 +50,6 @@ class Signal(BaseModel):
49
50
  else:
50
51
  self.value = self.function(data) # type: ignore
51
52
 
52
- @model_validator(mode="after")
53
- def _validate(self) -> Self:
54
- if self.type == Optional[float] and self.range is None:
55
- raise ValueError(
56
- "Range must be defined for signals of type Optional[float]"
57
- )
58
- return self
59
-
60
53
 
61
54
  class Indicator(BaseModel):
62
55
  name: str
@@ -100,7 +93,7 @@ def indicators_factory() -> List[Indicator]:
100
93
  type=Optional[date],
101
94
  function=lambda d: d[
102
95
  (d.ADX_14 > 20) & (d.PLUS_DI > d.MINUS_DI)
103
- ].index[-1],
96
+ ].last_valid_index(),
104
97
  ),
105
98
  Signal(
106
99
  name="ADX_14_SHORT",
@@ -109,7 +102,7 @@ def indicators_factory() -> List[Indicator]:
109
102
  type=Optional[date],
110
103
  function=lambda d: d[
111
104
  (d.ADX_14 > 20) & (d.MINUS_DI > d.PLUS_DI)
112
- ].index[-1],
105
+ ].last_valid_index(),
113
106
  ),
114
107
  ],
115
108
  ),
@@ -156,16 +149,30 @@ def indicators_factory() -> List[Indicator]:
156
149
  Indicator(
157
150
  name="RSI",
158
151
  description="Relative Strength Index",
159
- expected_columns=["RSI"],
152
+ expected_columns=RSI.expected_columns,
160
153
  function=RSI.call,
161
154
  signals=[
162
155
  Signal(
163
- name="RSI_BULLISH_CROSSOVER",
156
+ name="RSI_BULLISH_CROSSOVER_30",
164
157
  description="RSI Bullish Crossover",
165
158
  type_info="Long",
166
159
  type=Optional[date],
167
160
  function=lambda d: cross_value(d.RSI, 30),
168
161
  ),
162
+ Signal(
163
+ name="RSI_BULLISH_CROSSOVER_40",
164
+ description="RSI Bullish Crossover 40",
165
+ type_info="Long",
166
+ type=Optional[date],
167
+ function=lambda d: cross_value(d.RSI, 40),
168
+ ),
169
+ Signal(
170
+ name="RSI_BULLISH_CROSSOVER_45",
171
+ description="RSI Bullish Crossover 45",
172
+ type_info="Long",
173
+ type=Optional[date],
174
+ function=lambda d: cross_value(d.RSI, 45),
175
+ ),
169
176
  Signal(
170
177
  name="RSI_BEARISH_CROSSOVER",
171
178
  description="RSI Bearish Crossover",
@@ -178,21 +185,32 @@ def indicators_factory() -> List[Indicator]:
178
185
  description="RSI Oversold Signal",
179
186
  type_info="Oversold",
180
187
  type=Optional[date],
181
- function=lambda d: d[(d.RSI < 30) & (d.RSI > 0)].index[-1],
188
+ function=lambda d: d[(d.RSI < 30) & (d.RSI > 0)].last_valid_index(),
182
189
  ),
183
190
  Signal(
184
191
  name="RSI_OVERBOUGHT",
185
192
  description="RSI Overbought Signal",
186
193
  type_info="Overbought",
187
194
  type=Optional[date],
188
- function=lambda d: d[(d.RSI < 100) & (d.RSI > 70)].index[-1],
195
+ function=lambda d: d[
196
+ (d.RSI < 100) & (d.RSI > 70)
197
+ ].last_valid_index(),
189
198
  ),
190
199
  Signal(
191
200
  name="RSI_NEUTRAL",
192
201
  description="RSI Neutral Signal",
193
202
  type_info="Overbought",
194
203
  type=Optional[date],
195
- function=lambda d: d[(d.RSI < 60) & (d.RSI > 40)].index[-1],
204
+ function=lambda d: d[
205
+ (d.RSI < 60) & (d.RSI > 40)
206
+ ].last_valid_index(),
207
+ ),
208
+ Signal(
209
+ name="RETURN_AFTER_RSI_CROSSOVER_45_PERIOD_90",
210
+ description="Percentile 30 return after RSI crossover 45 in the next 90 days",
211
+ type_info="Long",
212
+ type=Optional[float],
213
+ function=lambda d: compute_percentile_return_after_rsi_crossover(d),
196
214
  ),
197
215
  ],
198
216
  ),
@@ -207,14 +225,18 @@ def indicators_factory() -> List[Indicator]:
207
225
  description="Stoch Oversold Signal",
208
226
  type_info="Oversold",
209
227
  type=Optional[date],
210
- function=lambda d: d[(d.SLOW_K < 20) & (d.SLOW_K > 0)].index[-1],
228
+ function=lambda d: d[
229
+ (d.SLOW_K < 20) & (d.SLOW_K > 0)
230
+ ].last_valid_index(),
211
231
  ),
212
232
  Signal(
213
233
  name="STOCH_OVERBOUGHT",
214
234
  description="Stoch Overbought Signal",
215
235
  type_info="Overbought",
216
236
  type=Optional[date],
217
- function=lambda d: d[(d.SLOW_K < 100) & (d.SLOW_K > 80)].index[-1],
237
+ function=lambda d: d[
238
+ (d.SLOW_K < 100) & (d.SLOW_K > 80)
239
+ ].last_valid_index(),
218
240
  ),
219
241
  ],
220
242
  ),
@@ -229,14 +251,14 @@ def indicators_factory() -> List[Indicator]:
229
251
  description="MFI Oversold Signal",
230
252
  type_info="Oversold",
231
253
  type=Optional[date],
232
- function=lambda d: d[(d.MFI < 20)].index[-1],
254
+ function=lambda d: d[(d.MFI < 20)].last_valid_index(),
233
255
  ),
234
256
  Signal(
235
257
  name="MFI_OVERBOUGHT",
236
258
  description="MFI Overbought Signal",
237
259
  type_info="Overbought",
238
260
  type=Optional[date],
239
- function=lambda d: d[(d.MFI > 80)].index[-1],
261
+ function=lambda d: d[(d.MFI > 80)].last_valid_index(),
240
262
  ),
241
263
  ],
242
264
  ),
@@ -273,18 +295,39 @@ def indicators_factory() -> List[Indicator]:
273
295
  description="Current price is lower than the 200-day high",
274
296
  type_info="Oversold",
275
297
  type=Optional[date],
276
- function=lambda d: d[0.6 * d["200_DAY_HIGH"] > d.LAST_PRICE].index[
277
- -1
278
- ],
298
+ function=lambda d: d[
299
+ 0.6 * d["200_DAY_HIGH"] > d.LAST_PRICE
300
+ ].last_valid_index(),
279
301
  ),
280
302
  Signal(
281
303
  name="LOWER_THAN_20_DAY_HIGH",
282
304
  description="Current price is lower than the 20-day high",
283
305
  type_info="Oversold",
284
306
  type=Optional[date],
285
- function=lambda d: d[0.6 * d["20_DAY_HIGH"] > d.LAST_PRICE].index[
286
- -1
287
- ],
307
+ function=lambda d: d[
308
+ 0.6 * d["20_DAY_HIGH"] > d.LAST_PRICE
309
+ ].last_valid_index(),
310
+ ),
311
+ Signal(
312
+ name="MEDIAN_WEEKLY_GROWTH",
313
+ description="Median weekly growth",
314
+ type_info="Oversold",
315
+ type=Optional[float],
316
+ function=lambda d: np.median(d.WEEKLY_GROWTH.unique()),
317
+ ),
318
+ Signal(
319
+ name="MEDIAN_MONTHLY_GROWTH",
320
+ description="Median monthly growth",
321
+ type_info="Oversold",
322
+ type=Optional[float],
323
+ function=lambda d: np.median(d.MONTHLY_GROWTH.unique()),
324
+ ),
325
+ Signal(
326
+ name="MEDIAN_YEARLY_GROWTH",
327
+ description="Median yearly growth",
328
+ type_info="Oversold",
329
+ type=Optional[float],
330
+ function=lambda d: np.median(d.YEARLY_GROWTH.unique()),
288
331
  ),
289
332
  ],
290
333
  ),
@@ -299,7 +342,6 @@ def indicators_factory() -> List[Indicator]:
299
342
  type_info="Value",
300
343
  description="Median daily Rate of Change of the last 30 days",
301
344
  type=Optional[float],
302
- range=[-100, 100],
303
345
  function=lambda d: np.median(d.ROC_1.tolist()[-30:]),
304
346
  ),
305
347
  Signal(
@@ -307,7 +349,6 @@ def indicators_factory() -> List[Indicator]:
307
349
  type_info="Value",
308
350
  description="Median weekly Rate of Change of the last 4 weeks",
309
351
  type=Optional[float],
310
- range=[-100, 100],
311
352
  function=lambda d: np.median(d.ROC_7.tolist()[-4:]),
312
353
  ),
313
354
  Signal(
@@ -315,7 +356,6 @@ def indicators_factory() -> List[Indicator]:
315
356
  type_info="Value",
316
357
  description="Median weekly Rate of Change of the last 12 weeks",
317
358
  type=Optional[float],
318
- range=[-100, 100],
319
359
  function=lambda d: np.median(d.ROC_7.tolist()[-12:]),
320
360
  ),
321
361
  Signal(
@@ -323,7 +363,6 @@ def indicators_factory() -> List[Indicator]:
323
363
  type_info="Value",
324
364
  description="Median monthly Rate of Change of the last 12 Months",
325
365
  type=Optional[float],
326
- range=[-100, 100],
327
366
  function=lambda d: np.median(d.ROC_30.tolist()[-12:]),
328
367
  ),
329
368
  Signal(
@@ -331,7 +370,6 @@ def indicators_factory() -> List[Indicator]:
331
370
  type_info="Value",
332
371
  description="30-day Rate of Change",
333
372
  type=Optional[float],
334
- range=[-100, 100],
335
373
  function=lambda d: d.ROC_30.tolist()[-1],
336
374
  ),
337
375
  Signal(
@@ -339,7 +377,6 @@ def indicators_factory() -> List[Indicator]:
339
377
  type_info="Value",
340
378
  description="7-day Rate of Change",
341
379
  type=Optional[float],
342
- range=[-100, 100],
343
380
  function=lambda d: d.ROC_7.tolist()[-1],
344
381
  ),
345
382
  ],
@@ -362,9 +399,9 @@ def indicators_factory() -> List[Indicator]:
362
399
  type_info="Oversold",
363
400
  description="20-day breakout confirmed by positive ADOSC",
364
401
  type=Optional[date],
365
- function=lambda d: d[(d.ADOSC_SIGNAL == True)].index[ # noqa: E712
366
- -1
367
- ],
402
+ function=lambda d: d[
403
+ (d.ADOSC_SIGNAL == True) # noqa: E712
404
+ ].last_valid_index(),
368
405
  ),
369
406
  ],
370
407
  ),
@@ -387,49 +424,53 @@ def indicators_factory() -> List[Indicator]:
387
424
  type_info="Long",
388
425
  description="Morning Star Candlestick Pattern",
389
426
  type=Optional[date],
390
- function=lambda d: d[(d.CDLMORNINGSTAR == 100)].index[-1],
427
+ function=lambda d: d[(d.CDLMORNINGSTAR == 100)].last_valid_index(),
391
428
  ),
392
429
  Signal(
393
430
  name="CDL3LINESTRIKE",
394
431
  description="3 Line Strike Candlestick Pattern",
395
432
  type_info="Long",
396
433
  type=Optional[date],
397
- function=lambda d: d[(d.CDL3LINESTRIKE == 100)].index[-1],
434
+ function=lambda d: d[(d.CDL3LINESTRIKE == 100)].last_valid_index(),
398
435
  ),
399
436
  Signal(
400
437
  name="CDL3WHITESOLDIERS",
401
438
  description="3 White Soldiers Candlestick Pattern",
402
439
  type_info="Long",
403
440
  type=Optional[date],
404
- function=lambda d: d[(d.CDL3WHITESOLDIERS == 100)].index[-1],
441
+ function=lambda d: d[
442
+ (d.CDL3WHITESOLDIERS == 100)
443
+ ].last_valid_index(),
405
444
  ),
406
445
  Signal(
407
446
  name="CDLABANDONEDBABY",
408
447
  description="Abandoned Baby Candlestick Pattern",
409
448
  type_info="Long",
410
449
  type=Optional[date],
411
- function=lambda d: d[(d.CDLABANDONEDBABY == 100)].index[-1],
450
+ function=lambda d: d[
451
+ (d.CDLABANDONEDBABY == 100)
452
+ ].last_valid_index(),
412
453
  ),
413
454
  Signal(
414
455
  name="CDLTASUKIGAP",
415
456
  description="Tasukigap Candlestick Pattern",
416
457
  type_info="Long",
417
458
  type=Optional[date],
418
- function=lambda d: d[(d.CDLTASUKIGAP == 100)].index[-1],
459
+ function=lambda d: d[(d.CDLTASUKIGAP == 100)].last_valid_index(),
419
460
  ),
420
461
  Signal(
421
462
  name="CDLPIERCING",
422
463
  description="Piercing Candlestick Pattern",
423
464
  type_info="Long",
424
465
  type=Optional[date],
425
- function=lambda d: d[(d.CDLPIERCING == 100)].index[-1],
466
+ function=lambda d: d[(d.CDLPIERCING == 100)].last_valid_index(),
426
467
  ),
427
468
  Signal(
428
469
  name="CDLENGULFING",
429
470
  description="Engulfing Candlestick Pattern",
430
471
  type_info="Long",
431
472
  type=Optional[date],
432
- function=lambda d: d[(d.CDLENGULFING == 100)].index[-1],
473
+ function=lambda d: d[(d.CDLENGULFING == 100)].last_valid_index(),
433
474
  ),
434
475
  ],
435
476
  ),
@@ -441,7 +482,11 @@ class Indicators(BaseModel):
441
482
 
442
483
  def compute(self, data: pd.DataFrame) -> None:
443
484
  for indicator in self.indicators:
444
- indicator.compute(data)
485
+ try:
486
+ indicator.compute(data)
487
+ except Exception as e:
488
+ logger.error(f"Failed to compute indicator {indicator.name}: {e}")
489
+ continue
445
490
  logger.info(
446
491
  f"Computed {indicator.name} with {len(indicator.signals)} signals"
447
492
  )
@@ -4,6 +4,11 @@ from typing import Dict, Any, Optional
4
4
  from bullish.analysis.filter import FilterQuery
5
5
  from pydantic import BaseModel, Field
6
6
 
7
+ DATE_THRESHOLD = [
8
+ datetime.date.today() - datetime.timedelta(days=10),
9
+ datetime.date.today(),
10
+ ]
11
+
7
12
 
8
13
  class NamedFilterQuery(FilterQuery):
9
14
  name: str
@@ -35,6 +40,7 @@ STRONG_FUNDAMENTALS = NamedFilterQuery(
35
40
  "positive_debt_to_equity",
36
41
  ],
37
42
  market_capitalization=[1e10, 1e12], # 1 billion to 1 trillion
43
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
38
44
  )
39
45
 
40
46
  GOOD_FUNDAMENTALS = NamedFilterQuery(
@@ -51,20 +57,33 @@ GOOD_FUNDAMENTALS = NamedFilterQuery(
51
57
  "positive_debt_to_equity",
52
58
  ],
53
59
  market_capitalization=[1e10, 1e12], # 1 billion to 1 trillion
60
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
61
+ )
62
+
63
+
64
+ SHOOTING_STARS = NamedFilterQuery(
65
+ name="Shooting stars",
66
+ cash_flow=["positive_free_cash_flow"],
67
+ properties=["operating_cash_flow_is_higher_than_net_income"],
68
+ market_capitalization=[1e9, 1e12], # 1 billion to 1 trillion
69
+ order_by_desc="median_yearly_growth",
70
+ order_by_asc="last_price",
54
71
  )
55
72
 
73
+ RSI_CROSSOVER = NamedFilterQuery(
74
+ name="RSI cross-over",
75
+ cash_flow=["positive_free_cash_flow"],
76
+ properties=["operating_cash_flow_is_higher_than_net_income"],
77
+ return_after_rsi_crossover_45_period_90=[0.0, 100],
78
+ rsi_bullish_crossover_45=DATE_THRESHOLD,
79
+ market_capitalization=[1e9, 1e12], # 1 billion to 1 trillion
80
+ order_by_desc="market_capitalization",
81
+ )
56
82
  MICRO_CAP_EVENT_SPECULATION = NamedFilterQuery(
57
83
  name="Micro-Cap Event Speculation",
58
84
  description="seeks tiny names where unusual volume and price gaps hint at "
59
85
  "pending corporate events (patent win, FDA news, buy-out rumors).",
60
- positive_adosc_20_day_breakout=[
61
- datetime.date.today() - datetime.timedelta(days=5),
62
- datetime.date.today(),
63
- ],
64
- cdltasukigap=[
65
- datetime.date.today() - datetime.timedelta(days=5),
66
- datetime.date.today(),
67
- ],
86
+ positive_adosc_20_day_breakout=DATE_THRESHOLD,
68
87
  rate_of_change_30=[20, 100], # 10% to 50% in the last 30 days
69
88
  market_capitalization=[0, 5e8],
70
89
  )
@@ -78,19 +97,10 @@ MOMENTUM_BREAKOUT_HUNTER = NamedFilterQuery(
78
97
  "positive_net_income",
79
98
  ],
80
99
  cash_flow=["positive_free_cash_flow"],
81
- golden_cross=[
82
- datetime.date.today() - datetime.timedelta(days=5),
83
- datetime.date.today(),
84
- ],
85
- adx_14_long=[
86
- datetime.date.today() - datetime.timedelta(days=5),
87
- datetime.date.today(),
88
- ],
100
+ golden_cross=DATE_THRESHOLD,
101
+ adx_14_long=DATE_THRESHOLD,
89
102
  rate_of_change_30=[0, 100],
90
- rsi_neutral=[
91
- datetime.date.today() - datetime.timedelta(days=5),
92
- datetime.date.today(),
93
- ],
103
+ rsi_neutral=DATE_THRESHOLD,
94
104
  )
95
105
 
96
106
  DEEP_VALUE_PLUS_CATALYST = NamedFilterQuery(
@@ -101,29 +111,79 @@ DEEP_VALUE_PLUS_CATALYST = NamedFilterQuery(
101
111
  "positive_operating_income",
102
112
  "positive_net_income",
103
113
  ],
104
- lower_than_200_day_high=[
105
- datetime.date.today() - datetime.timedelta(days=5),
106
- datetime.date.today(),
107
- ],
114
+ lower_than_200_day_high=DATE_THRESHOLD,
108
115
  rate_of_change_30=[3, 100],
109
- rsi_bullish_crossover=[
110
- datetime.date.today() - datetime.timedelta(days=5),
111
- datetime.date.today(),
112
- ],
116
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
113
117
  )
114
118
  END_OF_TREND_REVERSAL = NamedFilterQuery(
115
119
  name="End of trend reversal",
116
120
  description="Layers long-term MA breach with momentum exhaustion and a "
117
121
  "bullish candle—classic setup for mean-reversion traders.",
118
- death_cross=[
119
- datetime.date.today() - datetime.timedelta(days=5),
120
- datetime.date.today(),
122
+ death_cross=DATE_THRESHOLD,
123
+ rsi_oversold=DATE_THRESHOLD,
124
+ candlesticks=["cdlmorningstart", "cdlabandonedbaby", "cdl3whitesoldiers"],
125
+ )
126
+
127
+ HIGH_QUALITY_CASH_GENERATOR = NamedFilterQuery(
128
+ name="High Quality Cash Generator",
129
+ description="This quartet isolates companies that are profitable, cash-rich, and disciplined with leverage. "
130
+ "Ideal first pass for “quality” or “compounder” "
131
+ "portfolios where downside protection matters as much as upside.",
132
+ income=[
133
+ "positive_net_income",
121
134
  ],
122
- rsi_oversold=[
123
- datetime.date.today() - datetime.timedelta(days=5),
124
- datetime.date.today(),
135
+ cash_flow=["positive_free_cash_flow"],
136
+ properties=[
137
+ "operating_cash_flow_is_higher_than_net_income",
138
+ "positive_return_on_equity",
139
+ "positive_return_on_assets",
140
+ "positive_debt_to_equity",
125
141
  ],
126
- candlesticks=["cdlmorningstart", "cdlabandonedbaby", "cdl3whitesoldiers"],
142
+ )
143
+
144
+ EARNINGS_ACCELERATION_TREND_CONFIRMATION = NamedFilterQuery(
145
+ name="Earnings Acceleration Trend Confirmation",
146
+ description="Pairs fundamental acceleration with momentum confirmation. Research shows this “double positive” "
147
+ "outperforms simple momentum because it filters out purely sentiment-driven rallies.",
148
+ income=[
149
+ "growing_operating_income",
150
+ "positive_net_income",
151
+ ],
152
+ eps=["growing_basic_eps"],
153
+ golden_cross=DATE_THRESHOLD,
154
+ macd_12_26_9_bullish_crossover=DATE_THRESHOLD,
155
+ adx_14_long=DATE_THRESHOLD,
156
+ )
157
+ DIVIDEND_GROWTH_COMPOUNDER = NamedFilterQuery(
158
+ name="Dividend-Growth Compounders",
159
+ description="Separates true dividend growers from high-yield traps. "
160
+ "Critical for income portfolios that need both yield and growth to beat inflation.",
161
+ mean_dividend_payout_ratio=[0, 0.6], # 0% to 60% payout ratio
162
+ cash_flow=[
163
+ "positive_free_cash_flow",
164
+ "quarterly_positive_free_cash_flow",
165
+ "growing_operating_cash_flow",
166
+ ],
167
+ properties=["quarterly_positive_return_on_equity"],
168
+ )
169
+
170
+ BREAK_OUT_MOMENTUM = NamedFilterQuery(
171
+ name="Break-out Momentum",
172
+ description="Combines price, volume, and pattern confirmation. Great for tactical traders seeking "
173
+ "quick continuation moves with statistically higher follow-through.",
174
+ adosc_crosses_above_0=DATE_THRESHOLD,
175
+ positive_adosc_20_day_breakout=DATE_THRESHOLD,
176
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
177
+ )
178
+
179
+ OVERSOLD_MEAN_REVERSION = NamedFilterQuery(
180
+ name="Oversold Mean Reversion",
181
+ description="Gives contrarian traders a high-probability bounce setup by "
182
+ "stacking three different oversold measures plus a reversal pattern.",
183
+ rsi_oversold=DATE_THRESHOLD,
184
+ stoch_oversold=DATE_THRESHOLD,
185
+ mfi_oversold=DATE_THRESHOLD,
186
+ lower_than_200_day_high=DATE_THRESHOLD,
127
187
  )
128
188
 
129
189
 
@@ -135,6 +195,13 @@ def predefined_filters() -> list[NamedFilterQuery]:
135
195
  MOMENTUM_BREAKOUT_HUNTER,
136
196
  DEEP_VALUE_PLUS_CATALYST,
137
197
  END_OF_TREND_REVERSAL,
198
+ HIGH_QUALITY_CASH_GENERATOR,
199
+ EARNINGS_ACCELERATION_TREND_CONFIRMATION,
200
+ DIVIDEND_GROWTH_COMPOUNDER,
201
+ BREAK_OUT_MOMENTUM,
202
+ OVERSOLD_MEAN_REVERSION,
203
+ SHOOTING_STARS,
204
+ RSI_CROSSOVER,
138
205
  ]
139
206
 
140
207
 
bullish/app/app.py CHANGED
@@ -24,7 +24,6 @@ from bullish.analysis.filter import (
24
24
  GeneralFilter,
25
25
  TechnicalAnalysisFilters,
26
26
  )
27
- from bullish.jobs.models import JobTracker
28
27
  from bullish.jobs.tasks import update, news, analysis
29
28
  from pydantic import BaseModel
30
29
 
@@ -85,8 +84,9 @@ def on_table_select() -> None:
85
84
  query = AssetQuery(symbols=Symbols(equities=[Ticker(symbol=symbol)]))
86
85
  prices = db.read_series(query, months=24)
87
86
  data = Prices(prices=prices).to_dataframe()
87
+ dates = db.read_dates(symbol)
88
88
 
89
- fig = plot(data, symbol)
89
+ fig = plot(data, symbol, dates=dates)
90
90
 
91
91
  st.session_state.ticker_figure = fig
92
92
 
@@ -145,6 +145,13 @@ def build_filter(model: Type[BaseModel], data: Dict[str, Any]) -> Dict[str, Any]
145
145
  default=default,
146
146
  key=hash((model.__name__, field)),
147
147
  )
148
+ elif info.annotation == Optional[str]: # type: ignore
149
+ data[field] = st.selectbox(
150
+ name,
151
+ ["", *groups_mapping()[field]],
152
+ index=0 if not default else groups_mapping()[field].index(default),
153
+ key=hash((model.__name__, field)),
154
+ )
148
155
 
149
156
  else:
150
157
  ge = next(
@@ -166,7 +173,6 @@ def build_filter(model: Type[BaseModel], data: Dict[str, Any]) -> Dict[str, Any]
166
173
  @st.dialog("⏳ Jobs", width="large")
167
174
  def jobs() -> None:
168
175
  with st.expander("Update data"):
169
- bearish_db_ = bearish_db(st.session_state.database_path)
170
176
  update_query = sp.pydantic_form(key="update", model=FilterUpdate)
171
177
  if (
172
178
  update_query
@@ -174,19 +180,20 @@ def jobs() -> None:
174
180
  and not st.session_state.data.empty
175
181
  ):
176
182
  symbols = st.session_state.data["symbol"].unique().tolist()
177
- res = update(
183
+ update(
178
184
  database_path=st.session_state.database_path,
185
+ job_type="Update data",
179
186
  symbols=symbols,
180
187
  update_query=update_query,
181
188
  ) # enqueue & get result-handle
182
- bearish_db_.write_job_tracker(
183
- JobTracker(job_id=str(res.id), type="Update data")
184
- )
189
+
185
190
  st.success("Data update job has been enqueued.")
186
191
  st.rerun()
187
192
  with st.expander("Update analysis"):
188
193
  if st.button("Update analysis"):
189
- analysis(st.session_state.database_path)
194
+ analysis(st.session_state.database_path, job_type="Update analysis")
195
+ st.success("Data update job has been enqueued.")
196
+ st.rerun()
190
197
 
191
198
 
192
199
  @st.dialog("📥 Load", width="large")
@@ -284,14 +291,12 @@ def save_filtered_results(bearish_db_: BullishDb) -> None:
284
291
  )
285
292
 
286
293
  bearish_db_.write_filtered_results(filtered_results)
287
- res = news(
294
+ news(
288
295
  database_path=st.session_state.database_path,
296
+ job_type="Fetching news",
289
297
  symbols=symbols,
290
298
  headless=headless,
291
299
  )
292
- bearish_db_.write_job_tracker(
293
- JobTracker(job_id=str(res.id), type="Fetching news")
294
- )
295
300
  st.session_state.filter_query = None
296
301
  st.session_state.query = None
297
302
  st.rerun()
bullish/cli.py CHANGED
@@ -1,4 +1,6 @@
1
1
  from __future__ import annotations
2
+
3
+ import os
2
4
  import subprocess
3
5
  import signal
4
6
  import sys
@@ -57,6 +59,7 @@ def serve(
57
59
  host,
58
60
  "--server.port",
59
61
  str(port),
62
+ os.devnull,
60
63
  ]
61
64
  )
62
65
  )
@@ -0,0 +1,85 @@
1
+ """
2
+
3
+ Revision ID: 17e51420e7ad
4
+ Revises: d663166c531d
5
+ Create Date: 2025-07-10 17:35:02.376675
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "17e51420e7ad"
17
+ down_revision: Union[str, None] = "d663166c531d"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
25
+ batch_op.add_column(
26
+ sa.Column("rsi_bullish_crossover_30", sa.Date(), nullable=True)
27
+ )
28
+ batch_op.add_column(
29
+ sa.Column("rsi_bullish_crossover_40", sa.Date(), nullable=True)
30
+ )
31
+ batch_op.add_column(
32
+ sa.Column("rsi_bullish_crossover_45", sa.Date(), nullable=True)
33
+ )
34
+ batch_op.add_column(
35
+ sa.Column(
36
+ "return_after_rsi_crossover_45_period_90", sa.Float(), nullable=True
37
+ )
38
+ )
39
+ batch_op.drop_index(batch_op.f("ix_analysis_rsi_bullish_crossover"))
40
+ batch_op.create_index(
41
+ "ix_analysis_return_after_rsi_crossover_45_period_90",
42
+ ["return_after_rsi_crossover_45_period_90"],
43
+ unique=False,
44
+ )
45
+ batch_op.create_index(
46
+ "ix_analysis_rsi_bullish_crossover_30",
47
+ ["rsi_bullish_crossover_30"],
48
+ unique=False,
49
+ )
50
+ batch_op.create_index(
51
+ "ix_analysis_rsi_bullish_crossover_40",
52
+ ["rsi_bullish_crossover_40"],
53
+ unique=False,
54
+ )
55
+ batch_op.create_index(
56
+ "ix_analysis_rsi_bullish_crossover_45",
57
+ ["rsi_bullish_crossover_45"],
58
+ unique=False,
59
+ )
60
+ batch_op.drop_column("rsi_bullish_crossover")
61
+
62
+ # ### end Alembic commands ###
63
+
64
+
65
+ def downgrade() -> None:
66
+ # ### commands auto generated by Alembic - please adjust! ###
67
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
68
+ batch_op.add_column(
69
+ sa.Column("rsi_bullish_crossover", sa.DATE(), nullable=True)
70
+ )
71
+ batch_op.drop_index("ix_analysis_rsi_bullish_crossover_45")
72
+ batch_op.drop_index("ix_analysis_rsi_bullish_crossover_40")
73
+ batch_op.drop_index("ix_analysis_rsi_bullish_crossover_30")
74
+ batch_op.drop_index("ix_analysis_return_after_rsi_crossover_45_period_90")
75
+ batch_op.create_index(
76
+ batch_op.f("ix_analysis_rsi_bullish_crossover"),
77
+ ["rsi_bullish_crossover"],
78
+ unique=False,
79
+ )
80
+ batch_op.drop_column("return_after_rsi_crossover_45_period_90")
81
+ batch_op.drop_column("rsi_bullish_crossover_45")
82
+ batch_op.drop_column("rsi_bullish_crossover_40")
83
+ batch_op.drop_column("rsi_bullish_crossover_30")
84
+
85
+ # ### end Alembic commands ###
@@ -0,0 +1,56 @@
1
+ """
2
+
3
+ Revision ID: d663166c531d
4
+ Revises: fc191121f522
5
+ Create Date: 2025-07-09 17:44:25.728075
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "d663166c531d"
17
+ down_revision: Union[str, None] = "fc191121f522"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
25
+ batch_op.add_column(
26
+ sa.Column("median_weekly_growth", sa.Float(), nullable=True)
27
+ )
28
+ batch_op.add_column(
29
+ sa.Column("median_monthly_growth", sa.Float(), nullable=True)
30
+ )
31
+ batch_op.add_column(
32
+ sa.Column("median_yearly_growth", sa.Float(), nullable=True)
33
+ )
34
+ batch_op.create_index(
35
+ "ix_analysis_median_monthly_growth", ["median_monthly_growth"], unique=False
36
+ )
37
+ batch_op.create_index(
38
+ "ix_analysis_median_weekly_growth", ["median_weekly_growth"], unique=False
39
+ )
40
+ batch_op.create_index(
41
+ "ix_analysis_median_yearly_growth", ["median_yearly_growth"], unique=False
42
+ )
43
+
44
+ # ### end Alembic commands ###
45
+
46
+
47
+ def downgrade() -> None:
48
+ # ### commands auto generated by Alembic - please adjust! ###
49
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
50
+ batch_op.drop_index("ix_analysis_median_yearly_growth")
51
+ batch_op.drop_index("ix_analysis_median_weekly_growth")
52
+ batch_op.drop_index("ix_analysis_median_monthly_growth")
53
+ batch_op.drop_column("median_yearly_growth")
54
+ batch_op.drop_column("median_monthly_growth")
55
+ batch_op.drop_column("median_weekly_growth")
56
+ # ### end Alembic commands ###
bullish/database/crud.py CHANGED
@@ -1,12 +1,15 @@
1
1
  import json
2
2
  import logging
3
+ from datetime import date
3
4
  from functools import cached_property
4
5
  from pathlib import Path
6
+ from sqlite3 import OperationalError
5
7
  from typing import TYPE_CHECKING, Any, List, Optional
6
8
 
7
9
  import pandas as pd
8
10
  from bearish.database.crud import BearishDb # type: ignore
9
11
  from bearish.models.base import Ticker # type: ignore
12
+ from bearish.database.schemas import EarningsDateORM # type: ignore
10
13
  from pydantic import ConfigDict
11
14
  from sqlalchemy import Engine, create_engine, insert, delete, update
12
15
  from sqlmodel import Session, select
@@ -40,7 +43,14 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
40
43
  if not self.valid():
41
44
  raise DatabaseFileNotFoundError("Database file not found.")
42
45
  database_url = f"sqlite:///{Path(self.database_path)}"
43
- upgrade(self.database_path)
46
+ try:
47
+ upgrade(self.database_path)
48
+ except OperationalError as e:
49
+ logger.warning(
50
+ f"Failed to upgrade the database at {self.database_path}. "
51
+ f"Reason: {e}"
52
+ "Skipping upgrade. "
53
+ )
44
54
  engine = create_engine(database_url)
45
55
  return engine
46
56
 
@@ -101,6 +111,14 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
101
111
  session.exec(stmt) # type: ignore
102
112
  session.commit()
103
113
 
114
+ def read_job_tracker(self, task_id: str) -> Optional[JobTracker]:
115
+ stmt = select(JobTrackerORM).where(JobTrackerORM.job_id == task_id)
116
+ with Session(self._engine) as session:
117
+ result = session.execute(stmt).scalar_one_or_none()
118
+ if result:
119
+ return JobTracker.model_validate(result.model_dump())
120
+ return None
121
+
104
122
  def delete_job_trackers(self, job_ids: List[str]) -> None:
105
123
  with Session(self._engine) as session:
106
124
  stmt = delete(JobTrackerORM).where(JobTrackerORM.job_id.in_(job_ids)) # type: ignore
@@ -156,3 +174,12 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
156
174
  )
157
175
  session.exec(stmt) # type: ignore
158
176
  session.commit()
177
+
178
+ def read_dates(self, symbol: str) -> List[date]:
179
+ with Session(self._engine) as session:
180
+ return [
181
+ r.date()
182
+ for r in session.exec(
183
+ select(EarningsDateORM.date).where(EarningsDateORM.symbol == symbol)
184
+ )
185
+ ]
@@ -1,17 +1,19 @@
1
- from typing import Optional
1
+ import datetime
2
+ from typing import Optional, List
2
3
 
3
4
  import pandas as pd
4
5
  import plotly.graph_objects as go
5
6
  from plotly.subplots import make_subplots
6
7
 
7
8
  from bullish.analysis.functions import add_indicators
9
+ from datetime import date
8
10
 
9
11
 
10
12
  def plot(
11
13
  data: pd.DataFrame,
12
14
  symbol: str,
13
15
  name: Optional[str] = None,
14
- dates: Optional[pd.Series] = None,
16
+ dates: Optional[List[date]] = None,
15
17
  ) -> go.Figure:
16
18
  data = add_indicators(data)
17
19
  fig = make_subplots(
@@ -125,10 +127,21 @@ def plot(
125
127
  row=7,
126
128
  col=1,
127
129
  )
128
- if dates is not None and not dates.empty:
130
+ if dates is not None and dates:
129
131
  for date in dates:
132
+ if (
133
+ data.first_valid_index().date() > date # type: ignore
134
+ or data.last_valid_index().date() + datetime.timedelta(days=31 * 3) # type: ignore
135
+ < date
136
+ ):
137
+ continue
130
138
  fig.add_vline(
131
- x=date, line_dash="dashdot", line_color="MediumPurple", line_width=3
139
+ x=date,
140
+ line_dash="dashdot",
141
+ line_color="MediumPurple",
142
+ line_width=1,
143
+ row=1,
144
+ col=1,
132
145
  )
133
146
 
134
147
  # Layout tweaks
@@ -1,5 +1,6 @@
1
1
  import abc
2
2
  import logging
3
+ from datetime import date
3
4
  from typing import List, Optional
4
5
 
5
6
  import pandas as pd
@@ -83,3 +84,9 @@ class BullishDbBase(BearishDbBase): # type: ignore
83
84
 
84
85
  @abc.abstractmethod
85
86
  def read_symbols(self) -> List[str]: ...
87
+
88
+ @abc.abstractmethod
89
+ def read_job_tracker(self, task_id: str) -> Optional[JobTracker]: ...
90
+
91
+ @abc.abstractmethod
92
+ def read_dates(self, symbol: str) -> List[date]: ...
bullish/jobs/tasks.py CHANGED
@@ -10,7 +10,7 @@ from .app import huey
10
10
  from pathlib import Path
11
11
  from huey.api import Task # type: ignore
12
12
 
13
- from .models import JobTrackerStatus
13
+ from .models import JobTrackerStatus, JobTracker, JobType
14
14
  from ..analysis.analysis import run_analysis
15
15
  from ..database.crud import BullishDb
16
16
  from bullish.analysis.filter import FilterUpdate
@@ -21,16 +21,22 @@ logger = logging.getLogger(__name__)
21
21
  def job_tracker(func: Callable[..., Any]) -> Callable[..., Any]:
22
22
  @functools.wraps(func)
23
23
  def wrapper(
24
- database_path: Path, *args: Any, task: Optional[Task] = None, **kwargs: Any
24
+ database_path: Path,
25
+ job_type: JobType,
26
+ *args: Any,
27
+ task: Optional[Task] = None,
28
+ **kwargs: Any,
25
29
  ) -> None:
26
30
  bullish_db = BullishDb(database_path=database_path)
27
31
  if task is None:
28
32
  raise ValueError("Task must be provided for job tracking.")
33
+ if bullish_db.read_job_tracker(task.id) is None:
34
+ bullish_db.write_job_tracker(JobTracker(job_id=str(task.id), type=job_type))
29
35
  bullish_db.update_job_tracker_status(
30
36
  JobTrackerStatus(job_id=task.id, status="Running")
31
37
  )
32
38
  try:
33
- func(database_path, *args, task=task, **kwargs)
39
+ func(database_path, job_type, *args, task=task, **kwargs)
34
40
  bullish_db.update_job_tracker_status(
35
41
  JobTrackerStatus(job_id=task.id, status="Completed")
36
42
  )
@@ -47,6 +53,7 @@ def job_tracker(func: Callable[..., Any]) -> Callable[..., Any]:
47
53
  @job_tracker
48
54
  def update(
49
55
  database_path: Path,
56
+ job_type: JobType,
50
57
  symbols: List[str],
51
58
  update_query: FilterUpdate,
52
59
  task: Optional[Task] = None,
@@ -69,6 +76,7 @@ def update(
69
76
  @job_tracker
70
77
  def analysis(
71
78
  database_path: Path,
79
+ job_type: JobType,
72
80
  task: Optional[Task] = None,
73
81
  ) -> None:
74
82
  bullish_db = BullishDb(database_path=database_path)
@@ -79,6 +87,7 @@ def analysis(
79
87
  @job_tracker
80
88
  def news(
81
89
  database_path: Path,
90
+ job_type: JobType,
82
91
  symbols: List[str],
83
92
  headless: bool = True,
84
93
  task: Optional[Task] = None,
bullish/utils/checks.py CHANGED
@@ -40,6 +40,8 @@ def get_table_names_from_path(database_path: Path) -> List[str]:
40
40
 
41
41
 
42
42
  def empty_analysis_table(database_path: Path) -> bool:
43
+ if "analysis" not in get_table_names_from_path(database_path):
44
+ return True
43
45
  with get_sqlite_connection(database_path) as conn:
44
46
  cursor = conn.cursor()
45
47
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: bullishpy
3
- Version: 0.8.0
3
+ Version: 0.10.0
4
4
  Summary:
5
5
  Author: aan
6
6
  Author-email: andoludovic.andriamamonjy@gmail.com
@@ -8,6 +8,7 @@ Requires-Python: >=3.12,<3.13
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: Programming Language :: Python :: 3.12
10
10
  Requires-Dist: bearishpy (>=0.20.0,<0.21.0)
11
+ Requires-Dist: click (>=7.0,<=8.1)
11
12
  Requires-Dist: huey (>=2.5.3,<3.0.0)
12
13
  Requires-Dist: pandas-ta (>=0.3.14b0,<0.4.0)
13
14
  Requires-Dist: plotly (>=6.1.2,<7.0.0)
@@ -1,13 +1,13 @@
1
1
  bullish/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  bullish/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- bullish/analysis/analysis.py,sha256=ZqIMdAq1NJYgoQZ52PsV724VsXK8Jtno4alFFd_7_XU,17842
4
- bullish/analysis/filter.py,sha256=DzhA-cuKzZhIL1kwi61PTz77BBo8B6G_7EI4OVkPrww,17123
5
- bullish/analysis/functions.py,sha256=yM4d7C_2gwEnV_t1-TWvYOzpeKx36WhupjPZxBSsOYg,12317
6
- bullish/analysis/indicators.py,sha256=kOvtnrz-9-_fCUUFE842ijwjDbF9X2O-W4YoPkSDisk,18117
7
- bullish/analysis/predefined_filters.py,sha256=g4vScDC4qiEzKb6gVhvxSFriw1fbi71k1xV3nNS7VS4,4715
3
+ bullish/analysis/analysis.py,sha256=9ugJp2fGuA6xFUlGIJVIHWZ8d8E6TdbQHWjg5zWPIwY,18445
4
+ bullish/analysis/filter.py,sha256=S8TuxoTAUY0U8ARPjNHE0tSSE_ToWkfZazAgnfgswk4,18136
5
+ bullish/analysis/functions.py,sha256=KKz_0C7maQmcGu2tGwZvioxzmh-JcB-YNpPQGjyyheA,13825
6
+ bullish/analysis/indicators.py,sha256=hZgzTq-80XPP6x7dXGhxd-Zzgra-6D-g3pVxUBYOW44,20167
7
+ bullish/analysis/predefined_filters.py,sha256=oEmlMW0jvBtHeOjbc7tmTEN1M8ymhRchLC6jqmULwCw,7524
8
8
  bullish/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
- bullish/app/app.py,sha256=Es244AnLce32d-MUYbKXZeEPE8GlfCUO4SQ7aiqooGc,12539
10
- bullish/cli.py,sha256=C31Pj7XGzdLz2Y3nIPQ7CF1DvyGVU5EyLvzj423QbwQ,1915
9
+ bullish/app/app.py,sha256=LA-NpNkAz1jsaKVJaQnbf7yQ4IjhA575ZagTGJsDuZY,12733
10
+ bullish/cli.py,sha256=uYLZmGDAolZKWzduZ58bP-xul1adg0oKfeUQtZMXTvA,1958
11
11
  bullish/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
12
  bullish/database/alembic/README,sha256=heMzebYwlGhnE8_4CWJ4LS74WoEZjBy-S-mIJRxAEKI,39
13
13
  bullish/database/alembic/alembic.ini,sha256=VuwqBJV5ObTyyRNrqv8Xr-TDIRfqPjP9R1mqewYM_xE,3695
@@ -16,12 +16,14 @@ bullish/database/alembic/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NF
16
16
  bullish/database/alembic/versions/037dbd721317_.py,sha256=U7EA4odH3t9w0-J4FmvBUt8HOuGDMn0rEAu_0vPUYaI,8595
17
17
  bullish/database/alembic/versions/08ac1116e055_.py,sha256=zMEiCbraMEAZItT4ibc3evAH7-7mkXpdgnZy4tPVYeg,27263
18
18
  bullish/database/alembic/versions/11d35a452b40_.py,sha256=j2PaU1RssLQ20OevGmBC7S9E9ocWiXpBue9SOS4AQoY,11521
19
+ bullish/database/alembic/versions/17e51420e7ad_.py,sha256=xeiVIm1YUZb08opE9rocHZP1__9WQWXsKsXgeFV9cvs,2960
19
20
  bullish/database/alembic/versions/49c83f9eb5ac_.py,sha256=kCBItp7KmqpJ03roy5ikQjhefZia1oKgfZwournQDq8,3890
20
21
  bullish/database/alembic/versions/4b0a2f40b7d3_.py,sha256=G0K7w7pOPYjPZkXTB8LWhxoxuWBPcPwOfnubTBtdeEY,1827
21
22
  bullish/database/alembic/versions/73564b60fe24_.py,sha256=MTlDRDNHj3E9gK7IMeAzv2UxxxYtWiu3gI_9xTLE-wg,1008
23
+ bullish/database/alembic/versions/d663166c531d_.py,sha256=U92l6QXqPniAYrPeu2Bt77ReDbXveLj4aGXtgd806JY,1915
22
24
  bullish/database/alembic/versions/ee5baabb35f8_.py,sha256=nBMEY-_C8AsSXVPyaDdUkwrFFo2gxShzJhmrjejDwtc,1632
23
25
  bullish/database/alembic/versions/fc191121f522_.py,sha256=0sstF6TpAJ09-Mt-Vek9SdSWksvi4C58a5D92rBtuY8,1894
24
- bullish/database/crud.py,sha256=2D0uDxJlDv1eJFRmgipiCQxCvlC5ILB2OR4OORlIJf4,5967
26
+ bullish/database/crud.py,sha256=6-Fb1AjGZqsrmwwl2Qay_leqQ9_-RAIjZ8D0efe8nKA,7022
25
27
  bullish/database/schemas.py,sha256=bU-DW49NqpBp--1VN486LUdDmLeScrI8TF69afzjoTc,1507
26
28
  bullish/database/scripts/create_revision.py,sha256=rggIf-3koPqJNth8FIg89EOfnIM7a9QrvL8X7UJsP0g,628
27
29
  bullish/database/scripts/stamp.py,sha256=PWgVUEBumjNUMjTnGw46qmU3p221LeN-KspnW_gFuu4,839
@@ -29,16 +31,16 @@ bullish/database/scripts/upgrade.py,sha256=-Gz7aFNPEt9y9e1kltqXE76-j_8QeNtet_Vlw
29
31
  bullish/database/settings.py,sha256=nMudufmF7iC_62_PHrGSMjlqDLN2I0qTbtz9JKZHSko,164
30
32
  bullish/exceptions.py,sha256=4z_i-dD-CDz1bkGmZH9DOf1L_awlCPCgdUDPF7dhWAI,106
31
33
  bullish/figures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
- bullish/figures/figures.py,sha256=W4XJIs5wFtpX75OgoocVcuuA8Hdb_SNN58VW0LYI5oI,3808
34
+ bullish/figures/figures.py,sha256=SWTTiEoVyWMZeIIxg0ERi23v7s4tySB5BLKyPu12jC4,4193
33
35
  bullish/interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- bullish/interface/interface.py,sha256=T3MdJbX2ZF7hiMhnZ00ppXSXbDwyxaqD69a3M4bPswU,2806
36
+ bullish/interface/interface.py,sha256=-3V4M1J2VVn0ugwbaCKzMuBYRHH7eFmz9gij0hUTNUw,3013
35
37
  bullish/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
38
  bullish/jobs/app.py,sha256=5MJ5KXUo7JSNAvOPgkpIMasD11VTrjQvGzM7vmCY65E,77
37
39
  bullish/jobs/models.py,sha256=ndrGTMP08S57yGLGEG9TQt8Uw2slc4HvbG-TZtEEuN0,744
38
- bullish/jobs/tasks.py,sha256=gJEB342nCTw2KD3YgDXpQhGGITcXd14GAMiqRRVbZ-A,2757
40
+ bullish/jobs/tasks.py,sha256=V_b0c8_GQC0-KIxaHDlLFhtkclQJOsck0gXaW6OlC_w,3055
39
41
  bullish/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
- bullish/utils/checks.py,sha256=rUyFNkx9F5DuOSxjldXymNHwRxIddN7PYbqPICnz1uM,2101
41
- bullishpy-0.8.0.dist-info/METADATA,sha256=h5onZ3YSy7ji9BQQjjuOy7ks90Ftt5obE3cf3LeJu0Q,709
42
- bullishpy-0.8.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
43
- bullishpy-0.8.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
44
- bullishpy-0.8.0.dist-info/RECORD,,
42
+ bullish/utils/checks.py,sha256=Va10_xDVVnxYkOD2hafvyQ-TFV8FQpOkr4huJ7XgpDM,2188
43
+ bullishpy-0.10.0.dist-info/METADATA,sha256=pAoW7ZWqHdzTZygnklF37z9CW9dxHVfqDNCKIube1fU,745
44
+ bullishpy-0.10.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
45
+ bullishpy-0.10.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
46
+ bullishpy-0.10.0.dist-info/RECORD,,