bullishpy 0.28.0__py3-none-any.whl → 0.29.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -1,5 +1,6 @@
1
1
  import logging
2
2
  import time
3
+ from datetime import date
3
4
  from itertools import batched, chain
4
5
  from pathlib import Path
5
6
  from typing import (
@@ -15,7 +16,6 @@ from typing import (
15
16
  )
16
17
 
17
18
  import pandas as pd
18
- from bearish.interface.interface import BearishDbBase # type: ignore
19
19
  from bearish.models.assets.equity import BaseEquity # type: ignore
20
20
  from bearish.models.base import ( # type: ignore
21
21
  DataSourceBase,
@@ -87,10 +87,10 @@ def _compute_growth(series: pd.Series) -> bool:
87
87
  return all(series.pct_change(fill_method=None).dropna() > 0)
88
88
 
89
89
 
90
- def _all_positive(series: pd.Series) -> bool:
90
+ def _all_positive(series: pd.Series, threshold: int = 0) -> bool:
91
91
  if series.empty:
92
92
  return False
93
- return all(series.dropna() > 0)
93
+ return all(series.dropna() > threshold)
94
94
 
95
95
 
96
96
  def _get_last(data: pd.Series) -> Optional[float]:
@@ -274,7 +274,7 @@ class BaseFundamentalAnalysis(BaseModel):
274
274
  debt_to_equity = (
275
275
  balance_sheet.total_liabilities / balance_sheet.total_shareholder_equity
276
276
  ).dropna()
277
- positive_debt_to_equity = _all_positive(debt_to_equity)
277
+ positive_debt_to_equity = _all_positive(debt_to_equity, threshold=1)
278
278
 
279
279
  # Add relevant balance sheet data to financials
280
280
  financial["total_shareholder_equity"] = balance_sheet[
@@ -433,6 +433,10 @@ class FundamentalAnalysis(YearlyFundamentalAnalysis, QuarterlyFundamentalAnalysi
433
433
  ]
434
434
 
435
435
 
436
+ class AnalysisEarningsDate(BaseModel):
437
+ next_earnings_date: Optional[date] = None
438
+
439
+
436
440
  class AnalysisView(BaseModel):
437
441
  sector: Annotated[
438
442
  Optional[str],
@@ -484,15 +488,15 @@ class AnalysisView(BaseModel):
484
488
  default=None,
485
489
  ),
486
490
  ]
487
- median_yearly_growth: Optional[float] = None
488
- median_weekly_growth: Optional[float] = None
489
- median_monthly_growth: Optional[float] = None
491
+ yearly_growth: Optional[float] = None
492
+ weekly_growth: Optional[float] = None
493
+ monthly_growth: Optional[float] = None
490
494
 
491
495
 
492
- class Analysis(AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis): # type: ignore
496
+ class Analysis(AnalysisEarningsDate, AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis): # type: ignore
493
497
 
494
498
  @classmethod
495
- def from_ticker(cls, bearish_db: BearishDbBase, ticker: Ticker) -> "Analysis":
499
+ def from_ticker(cls, bearish_db: "BullishDb", ticker: Ticker) -> "Analysis":
496
500
  asset = bearish_db.read_assets(
497
501
  AssetQuery(
498
502
  symbols=Symbols(equities=[ticker]),
@@ -504,11 +508,13 @@ class Analysis(AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis)
504
508
  fundamental_analysis = FundamentalAnalysis.from_financials(financials, ticker)
505
509
  prices = Prices.from_ticker(bearish_db, ticker)
506
510
  technical_analysis = TechnicalAnalysis.from_data(prices.to_dataframe(), ticker)
511
+ next_earnings_date = bearish_db.read_next_earnings_date(ticker.symbol)
507
512
  return cls.model_validate(
508
513
  equity.model_dump()
509
514
  | fundamental_analysis.model_dump()
510
515
  | technical_analysis.model_dump()
511
516
  | {
517
+ "next_earnings_date": next_earnings_date,
512
518
  "price_per_earning_ratio": (
513
519
  (
514
520
  technical_analysis.last_price
@@ -518,7 +524,7 @@ class Analysis(AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis)
518
524
  and fundamental_analysis.earning_per_share != 0
519
525
  and fundamental_analysis.earning_per_share is not None
520
526
  else None
521
- )
527
+ ),
522
528
  }
523
529
  )
524
530
 
@@ -308,23 +308,20 @@ Sector = Literal[
308
308
  "Financial Services",
309
309
  "Conglomerates",
310
310
  ]
311
-
312
- SubCountry = Literal["United kingdom", "United states", "Germany", "Belgium", "France"]
311
+ Europe = Literal["Germany", "Belgium", "France"]
312
+ Us = Literal["United states"]
313
+ WesternCountries = Literal["United kingdom", Europe, Us]
313
314
  Country = Literal[
315
+ WesternCountries,
314
316
  "Australia",
315
317
  "China",
316
318
  "Japan",
317
- "United kingdom",
318
- "United states",
319
319
  "Poland",
320
320
  "Switzerland",
321
321
  "Canada",
322
322
  "Greece",
323
323
  "Spain",
324
- "Germany",
325
324
  "Indonesia",
326
- "Belgium",
327
- "France",
328
325
  "Netherlands",
329
326
  "British virgin islands",
330
327
  "Italy",
@@ -172,6 +172,10 @@ class GeneralFilter(BaseModel):
172
172
  industry_group: Optional[List[str]] = None
173
173
  sector: Optional[List[str]] = None
174
174
  symbol: Optional[List[str]] = None
175
+ limit: Optional[str] = None
176
+ next_earnings_date: List[date] = Field(
177
+ default=[date.today(), date.today() + datetime.timedelta(days=30 * 12)],
178
+ )
175
179
  market_capitalization: Optional[List[float]] = Field(default=[5e8, 1e12])
176
180
  price_per_earning_ratio: Optional[List[float]] = Field(default=[0.0, 1000.0])
177
181
 
@@ -186,11 +190,12 @@ class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysis
186
190
  ).items()
187
191
  )
188
192
 
189
- def to_query(self) -> str:
193
+ def to_query(self) -> str: # noqa: C901
190
194
  parameters = self.model_dump(exclude_defaults=True, exclude_unset=True)
191
195
  query = []
192
196
  order_by_desc = ""
193
197
  order_by_asc = ""
198
+ limit = None
194
199
  for parameter, value in parameters.items():
195
200
  if not value:
196
201
  continue
@@ -207,10 +212,20 @@ class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysis
207
212
  order_by_desc = f"ORDER BY {value} DESC"
208
213
  elif isinstance(value, str) and bool(value) and parameter == "order_by_asc":
209
214
  order_by_asc = f"ORDER BY {value} ASC"
215
+ elif isinstance(value, str) and bool(value) and parameter == "limit":
216
+ limit = f" LIMIT {int(value)}"
210
217
  elif (
211
- isinstance(value, list)
212
- and len(value) == SIZE_RANGE
213
- and all(isinstance(item, (int, float)) for item in value)
218
+ (
219
+ isinstance(value, list)
220
+ and len(value) == SIZE_RANGE
221
+ and all(isinstance(item, date) for item in value)
222
+ )
223
+ and parameter == "next_earnings_date"
224
+ or (
225
+ isinstance(value, list)
226
+ and len(value) == SIZE_RANGE
227
+ and all(isinstance(item, (int, float)) for item in value)
228
+ )
214
229
  ):
215
230
  query.append(f"{parameter} BETWEEN {value[0]} AND {value[1]}")
216
231
  elif (
@@ -229,7 +244,12 @@ class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysis
229
244
  else:
230
245
  raise NotImplementedError
231
246
  query_ = " AND ".join(query)
232
- return f"{query_} {order_by_desc.strip()} {order_by_asc.strip()}".strip()
247
+ query__ = f"{query_} {order_by_desc.strip()} {order_by_asc.strip()}".strip()
248
+ if limit is not None:
249
+ query__ += limit
250
+ else:
251
+ query__ += " LIMIT 1000"
252
+ return query__
233
253
 
234
254
 
235
255
  class FilterQueryStored(FilterQuery): ...
@@ -288,6 +288,14 @@ def compute_price(data: pd.DataFrame) -> pd.DataFrame:
288
288
  return results
289
289
 
290
290
 
291
+ def compute_volume(data: pd.DataFrame) -> pd.DataFrame:
292
+ results = pd.DataFrame(index=data.index)
293
+ results["AVERAGE_VOLUME_10"] = data.volume.rolling(window=10).mean()
294
+ results["AVERAGE_VOLUME_30"] = data.volume.rolling(window=30).mean()
295
+ results["VOLUME"] = data.volume
296
+ return results
297
+
298
+
291
299
  def find_last_true_run_start(series: pd.Series) -> Optional[date]:
292
300
  if not series.iloc[-1]:
293
301
  return None
@@ -397,6 +405,10 @@ TRANGE = IndicatorFunction(
397
405
  expected_columns=["TRANGE"],
398
406
  functions=[compute_trange, compute_pandas_ta_trange],
399
407
  )
408
+ VOLUME = IndicatorFunction(
409
+ expected_columns=["AVERAGE_VOLUME_10", "AVERAGE_VOLUME_30", "VOLUME"],
410
+ functions=[compute_volume],
411
+ )
400
412
  PRICE = IndicatorFunction(
401
413
  expected_columns=[
402
414
  "200_DAY_HIGH",
@@ -20,6 +20,7 @@ from bullish.analysis.functions import (
20
20
  cross_simple,
21
21
  cross_value_series,
22
22
  find_last_true_run_start,
23
+ VOLUME,
23
24
  )
24
25
 
25
26
  logger = logging.getLogger(__name__)
@@ -389,6 +390,27 @@ def indicators_factory() -> List[Indicator]:
389
390
  type=Optional[date],
390
391
  function=lambda d: 0.6 * d["20_DAY_HIGH"] > d.LAST_PRICE,
391
392
  ),
393
+ Signal(
394
+ name="WEEKLY_GROWTH",
395
+ description="weekly growth",
396
+ type_info="Oversold",
397
+ type=Optional[float],
398
+ function=lambda d: d.WEEKLY_GROWTH,
399
+ ),
400
+ Signal(
401
+ name="MONTHLY_GROWTH",
402
+ description="Median monthly growth",
403
+ type_info="Oversold",
404
+ type=Optional[float],
405
+ function=lambda d: d.MONTHLY_GROWTH,
406
+ ),
407
+ Signal(
408
+ name="YEARLY_GROWTH",
409
+ description="Median yearly growth",
410
+ type_info="Oversold",
411
+ type=Optional[float],
412
+ function=lambda d: d.YEARLY_GROWTH,
413
+ ),
392
414
  Signal(
393
415
  name="MEDIAN_WEEKLY_GROWTH",
394
416
  description="Median weekly growth",
@@ -419,6 +441,42 @@ def indicators_factory() -> List[Indicator]:
419
441
  number=lambda v: np.median(v.unique())
420
442
  ),
421
443
  ),
444
+ Signal(
445
+ name="LOWER_THAN_20_DAY_HIGH",
446
+ description="Current price is lower than the 20-day high",
447
+ type_info="Oversold",
448
+ type=Optional[date],
449
+ function=lambda d: 0.6 * d["20_DAY_HIGH"] > d.LAST_PRICE,
450
+ ),
451
+ ],
452
+ ),
453
+ Indicator(
454
+ name="VOLUME",
455
+ description="Volume based indicators",
456
+ expected_columns=VOLUME.expected_columns,
457
+ function=VOLUME.call,
458
+ signals=[
459
+ Signal(
460
+ name="AVERAGE_VOLUME_10",
461
+ type_info="Value",
462
+ description="Average volume over the last 10 days",
463
+ type=Optional[float],
464
+ function=lambda d: d.AVERAGE_VOLUME_10,
465
+ ),
466
+ Signal(
467
+ name="AVERAGE_VOLUME_30",
468
+ type_info="Value",
469
+ description="Average volume over the last 30 days",
470
+ type=Optional[float],
471
+ function=lambda d: d.AVERAGE_VOLUME_30,
472
+ ),
473
+ Signal(
474
+ name="VOLUME_ABOVE_AVERAGE",
475
+ type_info="Value",
476
+ description="Volume above average volume over the last 30 days",
477
+ type=Optional[date],
478
+ function=lambda d: d.AVERAGE_VOLUME_30 < d.VOLUME,
479
+ ),
422
480
  ],
423
481
  ),
424
482
  Indicator(
@@ -23,7 +23,7 @@ from bullish.analysis.constants import (
23
23
  IndustryGroup,
24
24
  Sector,
25
25
  Country,
26
- SubCountry,
26
+ WesternCountries,
27
27
  )
28
28
 
29
29
  if TYPE_CHECKING:
@@ -61,7 +61,7 @@ def get_industry_comparison_data(
61
61
  normalized_symbol = compute_normalized_close(symbol_data.close).rename("symbol")
62
62
  normalized_industry = industry_data.normalized_close.rename(industry)
63
63
  data = [normalized_symbol, normalized_industry]
64
- for country in get_args(SubCountry):
64
+ for country in get_args(WesternCountries):
65
65
  views = bullish_db.read_returns(type, industry, country)
66
66
  if views:
67
67
  industry_data = IndustryViews.from_views(views).to_dataframe()
@@ -1,6 +1,6 @@
1
1
  import datetime
2
2
  from datetime import timedelta
3
- from typing import Dict, Any, Optional, List, Union
3
+ from typing import Dict, Any, Optional, List, Union, get_args
4
4
 
5
5
  from bullish.analysis.analysis import AnalysisView
6
6
  from bullish.analysis.backtest import (
@@ -9,6 +9,7 @@ from bullish.analysis.backtest import (
9
9
  BacktestQueryRange,
10
10
  BacktestQuerySelection,
11
11
  )
12
+ from bullish.analysis.constants import Europe, Us
12
13
  from bullish.analysis.filter import FilterQuery, BOOLEAN_GROUP_MAPPING
13
14
  from pydantic import BaseModel, Field
14
15
 
@@ -101,280 +102,84 @@ class NamedFilterQuery(FilterQuery):
101
102
 
102
103
  return bullish_db.read_query(queries.to_query())["symbol"].tolist() # type: ignore
103
104
 
105
+ def country_variant(self, suffix: str, countries: List[str]) -> "NamedFilterQuery":
106
+ return NamedFilterQuery.model_validate(
107
+ self.model_dump()
108
+ | {"name": f"{self.name} ({suffix})", "country": countries}
109
+ )
104
110
 
105
- STRONG_FUNDAMENTALS = NamedFilterQuery(
106
- name="Strong Fundamentals",
107
- income=[
108
- "positive_operating_income",
109
- "growing_operating_income",
110
- "positive_net_income",
111
- "growing_net_income",
112
- ],
113
- cash_flow=["positive_free_cash_flow", "growing_operating_cash_flow"],
114
- eps=["positive_diluted_eps", "growing_diluted_eps"],
115
- properties=[
116
- "operating_cash_flow_is_higher_than_net_income",
117
- "positive_return_on_equity",
118
- "positive_return_on_assets",
119
- "positive_debt_to_equity",
120
- ],
121
- market_capitalization=[1e10, 1e12], # 1 billion to 1 trillion
122
- rsi_bullish_crossover_30=DATE_THRESHOLD,
123
- )
111
+ def variants(self) -> List["NamedFilterQuery"]:
112
+ return [
113
+ self.country_variant("Europe", list(get_args(Europe))),
114
+ self.country_variant("Us", list(get_args(Us))),
115
+ ]
124
116
 
125
- GOOD_FUNDAMENTALS = NamedFilterQuery(
126
- name="Good Fundamentals",
127
- income=[
128
- "positive_operating_income",
129
- "positive_net_income",
130
- ],
131
- cash_flow=["positive_free_cash_flow"],
132
- eps=["positive_diluted_eps"],
133
- properties=[
134
- "positive_return_on_equity",
135
- "positive_return_on_assets",
136
- "positive_debt_to_equity",
137
- ],
138
- market_capitalization=[1e10, 1e12], # 1 billion to 1 trillion
139
- rsi_bullish_crossover_30=DATE_THRESHOLD,
140
- )
141
117
 
142
- RSI_CROSSOVER_30_GROWTH_STOCK_STRONG_FUNDAMENTAL = NamedFilterQuery(
143
- name="RSI cross-over 30 growth stock strong fundamental",
144
- income=[
145
- "positive_operating_income",
146
- "growing_operating_income",
147
- "positive_net_income",
148
- "growing_net_income",
149
- ],
150
- cash_flow=["positive_free_cash_flow"],
151
- properties=["operating_cash_flow_is_higher_than_net_income"],
152
- price_per_earning_ratio=[10, 100],
153
- rsi_bullish_crossover_30=DATE_THRESHOLD,
154
- market_capitalization=[5e8, 1e12],
118
+ SMALL_CAP = NamedFilterQuery(
119
+ name="Small Cap",
120
+ last_price=[1, 20],
121
+ market_capitalization=[5e7, 5e8],
122
+ properties=["positive_debt_to_equity"],
123
+ average_volume_30=[50000, 5e9],
155
124
  order_by_desc="market_capitalization",
156
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
157
- )
158
- RSI_CROSSOVER_40_GROWTH_STOCK_STRONG_FUNDAMENTAL = NamedFilterQuery(
159
- name="RSI cross-over 40 growth stock strong fundamental",
160
- income=[
161
- "positive_operating_income",
162
- "growing_operating_income",
163
- "positive_net_income",
164
- "growing_net_income",
165
- ],
166
- cash_flow=["positive_free_cash_flow"],
167
- properties=["operating_cash_flow_is_higher_than_net_income"],
168
- price_per_earning_ratio=[10, 500],
169
- rsi_bullish_crossover_40=DATE_THRESHOLD,
170
- market_capitalization=[5e8, 1e12],
171
- order_by_desc="market_capitalization",
172
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
173
- )
125
+ ).variants()
174
126
 
175
- RSI_CROSSOVER_30_GROWTH_STOCK = NamedFilterQuery(
176
- name="RSI cross-over 30 growth stock",
177
- price_per_earning_ratio=[10, 500],
178
- rsi_bullish_crossover_30=DATE_THRESHOLD,
179
- market_capitalization=[1e10, 1e13],
180
- order_by_desc="market_capitalization",
181
- country=[
182
- "Germany",
183
- "United states",
184
- "France",
185
- "United kingdom",
186
- "Canada",
187
- "Japan",
188
- "Belgium",
189
- ],
190
- )
191
-
192
- MEDIAN_YEARLY_GROWTH = NamedFilterQuery(
193
- name="Median yearly growth",
194
- market_capitalization=[1e6, 1e13],
195
- median_yearly_growth=[40, 1000],
196
- last_price=[1, 100],
197
- order_by_asc="last_price",
198
- country=[
199
- "Germany",
200
- "United states",
201
- "France",
202
- "Belgium",
203
- ],
204
- )
205
- RSI_CROSSOVER_40_GROWTH_STOCK = NamedFilterQuery(
206
- name="RSI cross-over 40 growth stock",
207
- price_per_earning_ratio=[10, 500],
208
- rsi_bullish_crossover_40=DATE_THRESHOLD,
209
- market_capitalization=[1e10, 1e13],
210
- order_by_desc="market_capitalization",
211
- country=[
212
- "Germany",
213
- "United states",
214
- "France",
215
- "United kingdom",
216
- "Canada",
217
- "Japan",
218
- "Belgium",
219
- ],
220
- )
221
-
222
-
223
- MOMENTUM_GROWTH_GOOD_FUNDAMENTALS = NamedFilterQuery(
224
- name="Momentum Growth Good Fundamentals (RSI 30)",
225
- cash_flow=["positive_free_cash_flow"],
226
- properties=["operating_cash_flow_is_higher_than_net_income"],
227
- price_per_earning_ratio=[10, 500],
228
- rsi_bullish_crossover_30=[
229
- datetime.date.today() - datetime.timedelta(days=7),
230
- datetime.date.today(),
231
- ],
232
- macd_12_26_9_bullish_crossover=[
233
- datetime.date.today() - datetime.timedelta(days=7),
234
- datetime.date.today(),
235
- ],
127
+ TOP_PERFORMERS = NamedFilterQuery(
128
+ name="Top Performers",
236
129
  sma_50_above_sma_200=[
237
130
  datetime.date.today() - datetime.timedelta(days=5000),
238
131
  datetime.date.today() - datetime.timedelta(days=10),
239
132
  ],
240
- market_capitalization=[5e8, 1e12],
241
- order_by_desc="momentum",
242
- country=[
243
- "Germany",
244
- "United states",
245
- "France",
246
- "United kingdom",
247
- "Canada",
248
- "Japan",
249
- "Belgium",
250
- ],
251
- )
252
-
253
- MOMENTUM_GROWTH_STRONG_FUNDAMENTALS = NamedFilterQuery(
254
- name="Momentum Growth Strong Fundamentals (RSI 30)",
255
- income=[
256
- "positive_operating_income",
257
- "growing_operating_income",
258
- "positive_net_income",
259
- "growing_net_income",
260
- ],
261
- cash_flow=["positive_free_cash_flow"],
262
- properties=["operating_cash_flow_is_higher_than_net_income"],
263
- price_per_earning_ratio=[10, 500],
264
- rsi_bullish_crossover_30=[
265
- datetime.date.today() - datetime.timedelta(days=7),
266
- datetime.date.today(),
267
- ],
268
- macd_12_26_9_bullish_crossover=[
269
- datetime.date.today() - datetime.timedelta(days=7),
270
- datetime.date.today(),
271
- ],
272
- sma_50_above_sma_200=[
133
+ price_above_sma_50=[
273
134
  datetime.date.today() - datetime.timedelta(days=5000),
274
135
  datetime.date.today() - datetime.timedelta(days=10),
275
136
  ],
276
- market_capitalization=[5e8, 1e12],
277
- order_by_desc="momentum",
278
- country=[
279
- "Germany",
280
- "United states",
281
- "France",
282
- "United kingdom",
283
- "Canada",
284
- "Japan",
285
- "Belgium",
286
- ],
287
- )
288
- MOMENTUM_GROWTH_RSI_30 = NamedFilterQuery(
289
- name="Momentum Growth Screener (RSI 30)",
290
- price_per_earning_ratio=[10, 500],
291
- rsi_bullish_crossover_30=[
292
- datetime.date.today() - datetime.timedelta(days=7),
293
- datetime.date.today(),
294
- ],
295
- macd_12_26_9_bullish_crossover=[
296
- datetime.date.today() - datetime.timedelta(days=7),
297
- datetime.date.today(),
298
- ],
299
- sma_50_above_sma_200=[
300
- datetime.date.today() - datetime.timedelta(days=5000),
301
- datetime.date.today() - datetime.timedelta(days=10),
302
- ],
303
- market_capitalization=[5e8, 1e12],
304
- order_by_desc="momentum",
305
- country=[
306
- "Germany",
307
- "United states",
308
- "France",
309
- "United kingdom",
310
- "Canada",
311
- "Japan",
312
- "Belgium",
313
- ],
314
- )
315
- MOMENTUM_GROWTH_RSI_40 = NamedFilterQuery(
316
- name="Momentum Growth Screener (RSI 40)",
317
- price_per_earning_ratio=[10, 500],
318
- rsi_bullish_crossover_40=[
319
- datetime.date.today() - datetime.timedelta(days=7),
320
- datetime.date.today(),
321
- ],
322
- macd_12_26_9_bullish_crossover=[
323
- datetime.date.today() - datetime.timedelta(days=7),
137
+ volume_above_average=DATE_THRESHOLD,
138
+ weekly_growth=[1, 100],
139
+ monthly_growth=[8, 100],
140
+ order_by_desc="market_capitalization",
141
+ ).variants()
142
+
143
+ LARGE_CAPS = NamedFilterQuery(
144
+ name="Large caps",
145
+ order_by_desc="market_capitalization",
146
+ limit="50",
147
+ ).variants()
148
+
149
+ NEXT_EARNINGS_DATE = NamedFilterQuery(
150
+ name="Next Earnings date",
151
+ order_by_desc="market_capitalization",
152
+ next_earnings_date=[
324
153
  datetime.date.today(),
154
+ datetime.date.today() + timedelta(days=10),
325
155
  ],
326
- sma_50_above_sma_200=[
327
- datetime.date.today() - datetime.timedelta(days=5000),
328
- datetime.date.today() - datetime.timedelta(days=10),
329
- ],
330
- market_capitalization=[5e8, 1e12],
331
- order_by_desc="momentum",
332
- country=[
333
- "Germany",
334
- "United states",
335
- "France",
336
- "United kingdom",
337
- "Canada",
338
- "Japan",
339
- "Belgium",
340
- ],
341
- )
156
+ ).variants()
157
+
158
+ RSI_CROSSOVER_40 = NamedFilterQuery(
159
+ name="RSI cross-over 40",
160
+ rsi_bullish_crossover_40=DATE_THRESHOLD,
161
+ market_capitalization=[5e8, 1e13],
162
+ order_by_desc="market_capitalization",
163
+ country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
164
+ ).variants()
342
165
 
343
- GOLDEN_CROSS_LAST_SEVEN_DAYS = NamedFilterQuery(
344
- name="Golden cross in the last five days",
166
+ RSI_CROSSOVER_30 = NamedFilterQuery(
167
+ name="RSI cross-over 30",
345
168
  price_per_earning_ratio=[10, 500],
346
- last_price=[1, 10000],
347
- golden_cross=[
348
- datetime.date.today() - datetime.timedelta(days=7),
349
- datetime.date.today(),
350
- ],
169
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
170
+ market_capitalization=[5e8, 1e13],
351
171
  order_by_desc="market_capitalization",
352
- country=[
353
- "Germany",
354
- "United states",
355
- "France",
356
- "United kingdom",
357
- "Canada",
358
- "Japan",
359
- "Belgium",
360
- ],
361
- )
172
+ ).variants()
362
173
 
363
174
 
364
175
  def predefined_filters() -> list[NamedFilterQuery]:
365
176
  return [
366
- STRONG_FUNDAMENTALS,
367
- GOOD_FUNDAMENTALS,
368
- RSI_CROSSOVER_30_GROWTH_STOCK_STRONG_FUNDAMENTAL,
369
- RSI_CROSSOVER_40_GROWTH_STOCK_STRONG_FUNDAMENTAL,
370
- RSI_CROSSOVER_30_GROWTH_STOCK,
371
- RSI_CROSSOVER_40_GROWTH_STOCK,
372
- MOMENTUM_GROWTH_GOOD_FUNDAMENTALS,
373
- MOMENTUM_GROWTH_STRONG_FUNDAMENTALS,
374
- MOMENTUM_GROWTH_RSI_30,
375
- MOMENTUM_GROWTH_RSI_40,
376
- GOLDEN_CROSS_LAST_SEVEN_DAYS,
377
- MEDIAN_YEARLY_GROWTH,
177
+ *SMALL_CAP,
178
+ *TOP_PERFORMERS,
179
+ *LARGE_CAPS,
180
+ *NEXT_EARNINGS_DATE,
181
+ *RSI_CROSSOVER_40,
182
+ *RSI_CROSSOVER_30,
378
183
  ]
379
184
 
380
185
 
bullish/app/app.py CHANGED
@@ -146,14 +146,20 @@ def build_filter(model: Type[BaseModel], data: Dict[str, Any]) -> Dict[str, Any]
146
146
  if data.get(field) and data[field] != info.default:
147
147
  default = data[field]
148
148
  if info.annotation == Optional[List[str]]: # type: ignore
149
+ mapping = groups_mapping().get(field)
150
+ if not mapping:
151
+ continue
149
152
  data[field] = st.multiselect(
150
153
  name,
151
- groups_mapping()[field],
154
+ mapping,
152
155
  default=default,
153
156
  key=hash((model.__name__, field)),
154
157
  )
155
158
  elif info.annotation == Optional[str]: # type: ignore
156
- options = ["", *groups_mapping()[field]]
159
+ mapping = groups_mapping().get(field)
160
+ if not mapping:
161
+ continue
162
+ options = ["", *mapping]
157
163
  data[field] = st.selectbox(
158
164
  name,
159
165
  options,
@@ -183,7 +189,7 @@ def build_filter(model: Type[BaseModel], data: Dict[str, Any]) -> Dict[str, Any]
183
189
  except Exception as e:
184
190
  logger.error(
185
191
  f"Error building filter for {model.__name__}.{field} "
186
- f"with the parameters {(info.annotation, name, ge, le, tuple(default))}: {e}"
192
+ f"with the parameters {(info.annotation, name, ge, le)}: {e}"
187
193
  )
188
194
  raise e
189
195
  return data
@@ -0,0 +1,39 @@
1
+ """
2
+
3
+ Revision ID: d0e58e050845
4
+ Revises: ff0cc4ba40ec
5
+ Create Date: 2025-08-05 14:02:54.407561
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "d0e58e050845"
17
+ down_revision: Union[str, None] = "ff0cc4ba40ec"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
25
+ batch_op.add_column(sa.Column("next_earnings_date", sa.Date(), nullable=True))
26
+ batch_op.create_index(
27
+ "ix_analysis_next_earnings_date", ["next_earnings_date"], unique=False
28
+ )
29
+
30
+ # ### end Alembic commands ###
31
+
32
+
33
+ def downgrade() -> None:
34
+ # ### commands auto generated by Alembic - please adjust! ###
35
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
36
+ batch_op.drop_index("ix_analysis_next_earnings_date")
37
+ batch_op.drop_column("next_earnings_date")
38
+
39
+ # ### end Alembic commands ###
@@ -0,0 +1,69 @@
1
+ """
2
+
3
+ Revision ID: ff0cc4ba40ec
4
+ Revises: 79bc71ec6f9e
5
+ Create Date: 2025-08-05 12:09:12.108606
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "ff0cc4ba40ec"
17
+ down_revision: Union[str, None] = "79bc71ec6f9e"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
25
+ batch_op.add_column(sa.Column("average_volume_10", sa.Float(), nullable=True))
26
+ batch_op.add_column(sa.Column("average_volume_30", sa.Float(), nullable=True))
27
+ batch_op.add_column(sa.Column("volume_above_average", sa.Date(), nullable=True))
28
+ batch_op.add_column(sa.Column("weekly_growth", sa.Float(), nullable=True))
29
+ batch_op.add_column(sa.Column("monthly_growth", sa.Float(), nullable=True))
30
+ batch_op.add_column(sa.Column("yearly_growth", sa.Float(), nullable=True))
31
+ batch_op.create_index(
32
+ "ix_analysis_average_volume_10", ["average_volume_10"], unique=False
33
+ )
34
+ batch_op.create_index(
35
+ "ix_analysis_average_volume_30", ["average_volume_30"], unique=False
36
+ )
37
+ batch_op.create_index(
38
+ "ix_analysis_monthly_growth", ["monthly_growth"], unique=False
39
+ )
40
+ batch_op.create_index(
41
+ "ix_analysis_volume_above_average", ["volume_above_average"], unique=False
42
+ )
43
+ batch_op.create_index(
44
+ "ix_analysis_weekly_growth", ["weekly_growth"], unique=False
45
+ )
46
+ batch_op.create_index(
47
+ "ix_analysis_yearly_growth", ["yearly_growth"], unique=False
48
+ )
49
+
50
+ # ### end Alembic commands ###
51
+
52
+
53
+ def downgrade() -> None:
54
+ # ### commands auto generated by Alembic - please adjust! ###
55
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
56
+ batch_op.drop_index("ix_analysis_yearly_growth")
57
+ batch_op.drop_index("ix_analysis_weekly_growth")
58
+ batch_op.drop_index("ix_analysis_volume_above_average")
59
+ batch_op.drop_index("ix_analysis_monthly_growth")
60
+ batch_op.drop_index("ix_analysis_average_volume_30")
61
+ batch_op.drop_index("ix_analysis_average_volume_10")
62
+ batch_op.drop_column("yearly_growth")
63
+ batch_op.drop_column("monthly_growth")
64
+ batch_op.drop_column("weekly_growth")
65
+ batch_op.drop_column("volume_above_average")
66
+ batch_op.drop_column("average_volume_30")
67
+ batch_op.drop_column("average_volume_10")
68
+
69
+ # ### end Alembic commands ###
bullish/database/crud.py CHANGED
@@ -331,3 +331,13 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
331
331
  return [BacktestResult.model_validate(r) for r in results]
332
332
  else:
333
333
  return []
334
+
335
+ def read_next_earnings_date(self, symbol: str) -> Optional[date]:
336
+ with Session(self._engine) as session:
337
+ stmt = select(EarningsDateORM.date).where(
338
+ EarningsDateORM.symbol == symbol, EarningsDateORM.date > date.today()
339
+ )
340
+ result = session.exec(stmt).first()
341
+ if result:
342
+ return result.date() # type: ignore
343
+ return None
@@ -35,7 +35,7 @@ class BullishDbBase(BearishDbBase): # type: ignore
35
35
  query_ = query.to_query()
36
36
  fields = ",".join(list(AnalysisView.model_fields))
37
37
  query_str: str = f"""
38
- SELECT {fields} FROM analysis WHERE {query_} LIMIT 1000
38
+ SELECT {fields} FROM analysis WHERE {query_}
39
39
  """ # noqa: S608
40
40
  return self._read_filter_query(query_str)
41
41
 
@@ -149,3 +149,6 @@ class BullishDbBase(BearishDbBase): # type: ignore
149
149
  def read_many_backtest_results(
150
150
  self, query: Optional[BacktestResultQuery] = None
151
151
  ) -> List[BacktestResult]: ...
152
+
153
+ @abc.abstractmethod
154
+ def read_next_earnings_date(self, symbol: str) -> Optional[date]: ...
bullish/jobs/tasks.py CHANGED
@@ -111,4 +111,4 @@ def news(
111
111
  task: Optional[Task] = None,
112
112
  ) -> None:
113
113
  database_config = DatabaseConfig(database_path=database_path, no_migration=True)
114
- get_news(symbols, database_config, headless=headless, model_name = "gpt-4o-mini")
114
+ get_news(symbols, database_config, headless=headless, model_name="gpt-4o-mini")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: bullishpy
3
- Version: 0.28.0
3
+ Version: 0.29.0
4
4
  Summary:
5
5
  Author: aan
6
6
  Author-email: andoludovic.andriamamonjy@gmail.com
@@ -1,15 +1,15 @@
1
1
  bullish/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  bullish/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- bullish/analysis/analysis.py,sha256=9a1j1-Fe0XW5_fiNXtgs7XM_0Rl0mQOWvXQv9nUirdg,22124
3
+ bullish/analysis/analysis.py,sha256=nCb___Tb2aOFIu1wWtPwS1VVZfXOSCp5zUWaIOCdvl4,22345
4
4
  bullish/analysis/backtest.py,sha256=x91ek5kOzJHvYq0TmJh1Q8wBDDduIaieE0zDaoZFXew,14325
5
- bullish/analysis/constants.py,sha256=OpjmrMWHKstoqi314GdJU0vg23IgWqa_rrFU_Lsms0Q,9940
6
- bullish/analysis/filter.py,sha256=wBVkHhQPfG8wJ0TR3KLo_Bb7cRZFPyHlF4UK2mpG6S0,8495
7
- bullish/analysis/functions.py,sha256=JSxYCuua_sMGLosN83j0GcY0Ls_gsE4OZLLGInxG9RA,14354
8
- bullish/analysis/indicators.py,sha256=Dpps-v4bfQ3KF-C8zjMlArw1DJgZo-_EedYwihIiFJ0,24462
9
- bullish/analysis/industry_views.py,sha256=3TwruMdRYtmBC5bRYFptLt18AN9PQSY9d-uWnTiJKgY,6787
10
- bullish/analysis/predefined_filters.py,sha256=FnEyWcVKlCSlDhE8LHGXGY3tFZmFTnExQmg78BW9ICs,12417
5
+ bullish/analysis/constants.py,sha256=X3oCyYNA6B-jsZSYJLeGQ94S453Z7jIVNPmv3lMPp8Q,9922
6
+ bullish/analysis/filter.py,sha256=0h4wfAuZ-ohhACPhOjZrNEV17gbZ7FopNv0k0HBtIWE,9262
7
+ bullish/analysis/functions.py,sha256=PgjyzbBFwxCBN_n1PWWI7HC1FBb2Q685PBF4nSOj95g,14795
8
+ bullish/analysis/indicators.py,sha256=XsMHc4-hEZwxFpI3JI-s4C2hcg0eCQLWcAQ8P46dtL8,26812
9
+ bullish/analysis/industry_views.py,sha256=-B4CCAYz2arGQtWTXLLMpox0loO_MGdVQd2ycCRMOQQ,6799
10
+ bullish/analysis/predefined_filters.py,sha256=RZe3lk1pLnQ7lZkTK7hC5Hh8GhaZ1-9fqy-D1A_3aR8,6657
11
11
  bullish/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- bullish/app/app.py,sha256=rIvz4A0aJu7wGXBSCqO3EBA4QGhV8GPYVOYLOd9WjnY,14253
12
+ bullish/app/app.py,sha256=9zxskm5gHt2uDGn59vL4ltI2zgrIBGhavp7jogt1us0,14411
13
13
  bullish/cli.py,sha256=azhVLwOUrmwrtFAJSgva8-UFgNgkepXhjp7DxQNc-yw,2427
14
14
  bullish/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  bullish/database/alembic/README,sha256=heMzebYwlGhnE8_4CWJ4LS74WoEZjBy-S-mIJRxAEKI,39
@@ -30,11 +30,13 @@ bullish/database/alembic/versions/73564b60fe24_.py,sha256=MTlDRDNHj3E9gK7IMeAzv2
30
30
  bullish/database/alembic/versions/79bc71ec6f9e_.py,sha256=4nShut2NEd1F3piSckIIBtke0GEsFAxYw5TZl5YYRzc,1140
31
31
  bullish/database/alembic/versions/b76079e9845f_.py,sha256=W8eeTABjI9tT1dp3hlK7g7tiKqDhmA8AoUX9Sw-ykLI,1165
32
32
  bullish/database/alembic/versions/bf6b86dd5463_.py,sha256=fKB8knCprGmiL6AEyFdhybVmB7QX_W4MPFF9sPzUrSM,1094
33
+ bullish/database/alembic/versions/d0e58e050845_.py,sha256=x_LS3J27FNyy_WD99uvZzNehly-jpgn9abOYN-VjjZc,1164
33
34
  bullish/database/alembic/versions/d663166c531d_.py,sha256=U92l6QXqPniAYrPeu2Bt77ReDbXveLj4aGXtgd806JY,1915
34
35
  bullish/database/alembic/versions/ec25c8fa449f_.py,sha256=8Yts74KEjK4jg20zIo90_0atw-sOBuE3hgCKl-rfS5E,2271
35
36
  bullish/database/alembic/versions/ee5baabb35f8_.py,sha256=nBMEY-_C8AsSXVPyaDdUkwrFFo2gxShzJhmrjejDwtc,1632
36
37
  bullish/database/alembic/versions/fc191121f522_.py,sha256=0sstF6TpAJ09-Mt-Vek9SdSWksvi4C58a5D92rBtuY8,1894
37
- bullish/database/crud.py,sha256=mu4Ddvg94gLNUENGTw1dbT6dgW5AiS7O3JpehB5vOWo,12701
38
+ bullish/database/alembic/versions/ff0cc4ba40ec_.py,sha256=74lxga54ig_LoNZYK9toJL9iRwGbNRezh1zvO1YI40U,2719
39
+ bullish/database/crud.py,sha256=TvA3TGjr6RNYdNP0u-7RmVU1i9mqNbsv8lOcRbaXbn8,13122
38
40
  bullish/database/schemas.py,sha256=3uRcNKuobqWC3mCfInzo-4KhrZp3DH6yx_0TEbLoHME,3428
39
41
  bullish/database/scripts/create_revision.py,sha256=rggIf-3koPqJNth8FIg89EOfnIM7a9QrvL8X7UJsP0g,628
40
42
  bullish/database/scripts/stamp.py,sha256=PWgVUEBumjNUMjTnGw46qmU3p221LeN-KspnW_gFuu4,839
@@ -44,14 +46,14 @@ bullish/exceptions.py,sha256=4z_i-dD-CDz1bkGmZH9DOf1L_awlCPCgdUDPF7dhWAI,106
44
46
  bullish/figures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
47
  bullish/figures/figures.py,sha256=EpJQOiSqSp7cHvZoGlZrF6UVpyv-fFyDApAfskqdUkU,4562
46
48
  bullish/interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
- bullish/interface/interface.py,sha256=9bhXOKlYtoCbbbDBzmwlVK2HuAzfO-1kE8mk_MMG0MM,5046
49
+ bullish/interface/interface.py,sha256=dFQW0tMYbFL-gWrlWTWP1qKKSzqlrhz6-T_lLqhILyw,5134
48
50
  bullish/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
51
  bullish/jobs/app.py,sha256=5MJ5KXUo7JSNAvOPgkpIMasD11VTrjQvGzM7vmCY65E,77
50
52
  bullish/jobs/models.py,sha256=S2yvBf69lmt4U-5OU5CjXCMSw0s9Ubh9xkrB3k2qOZo,764
51
- bullish/jobs/tasks.py,sha256=W699Lt6FP1zo7QzGcvVo7x2aowAcEjQBq5PFVjnCKvY,3719
53
+ bullish/jobs/tasks.py,sha256=5dGWT7uZlirlQsqvI2BR9V3ywbt8yD0s-jaNiTFaIAg,3717
52
54
  bullish/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
55
  bullish/utils/checks.py,sha256=Va10_xDVVnxYkOD2hafvyQ-TFV8FQpOkr4huJ7XgpDM,2188
54
- bullishpy-0.28.0.dist-info/METADATA,sha256=Asyi1bAYTjIESyT8xdzPp4DzP2p2ASKoTmJKPA--9zc,830
55
- bullishpy-0.28.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
56
- bullishpy-0.28.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
57
- bullishpy-0.28.0.dist-info/RECORD,,
56
+ bullishpy-0.29.0.dist-info/METADATA,sha256=ROYPl5D2qTigixZ1Xok4cfnwuVYUarBgrz22cKFUs7o,830
57
+ bullishpy-0.29.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
58
+ bullishpy-0.29.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
59
+ bullishpy-0.29.0.dist-info/RECORD,,