bullishpy 0.9.0__py3-none-any.whl → 0.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -4,6 +4,11 @@ from typing import Dict, Any, Optional
4
4
  from bullish.analysis.filter import FilterQuery
5
5
  from pydantic import BaseModel, Field
6
6
 
7
+ DATE_THRESHOLD = [
8
+ datetime.date.today() - datetime.timedelta(days=10),
9
+ datetime.date.today(),
10
+ ]
11
+
7
12
 
8
13
  class NamedFilterQuery(FilterQuery):
9
14
  name: str
@@ -35,6 +40,7 @@ STRONG_FUNDAMENTALS = NamedFilterQuery(
35
40
  "positive_debt_to_equity",
36
41
  ],
37
42
  market_capitalization=[1e10, 1e12], # 1 billion to 1 trillion
43
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
38
44
  )
39
45
 
40
46
  GOOD_FUNDAMENTALS = NamedFilterQuery(
@@ -51,20 +57,79 @@ GOOD_FUNDAMENTALS = NamedFilterQuery(
51
57
  "positive_debt_to_equity",
52
58
  ],
53
59
  market_capitalization=[1e10, 1e12], # 1 billion to 1 trillion
60
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
61
+ )
62
+
63
+
64
+ SHOOTING_STARS = NamedFilterQuery(
65
+ name="Shooting stars",
66
+ cash_flow=["positive_free_cash_flow"],
67
+ properties=["operating_cash_flow_is_higher_than_net_income"],
68
+ market_capitalization=[1e9, 1e12], # 1 billion to 1 trillion
69
+ order_by_desc="median_yearly_growth",
70
+ order_by_asc="last_price",
71
+ )
72
+
73
+ RSI_CROSSOVER_TECH = NamedFilterQuery(
74
+ name="RSI cross-over",
75
+ cash_flow=["positive_free_cash_flow"],
76
+ properties=["operating_cash_flow_is_higher_than_net_income"],
77
+ return_after_rsi_crossover_45_period_90=[0.0, 100],
78
+ rsi_bullish_crossover_45=DATE_THRESHOLD,
79
+ market_capitalization=[5e8, 1e11], # 1 billion to 1 trillion
80
+ order_by_desc="market_capitalization",
81
+ country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
82
+ industry=[
83
+ "Semiconductors",
84
+ "Software - Application",
85
+ "Software - Infrastructure",
86
+ "Biotechnology",
87
+ "Diagnostics & Research",
88
+ "Medical Devices",
89
+ "Health Information Services",
90
+ "Internet Retail",
91
+ "Electronic Gaming & Multimedia",
92
+ "Internet Content & Information",
93
+ "Solar",
94
+ "Information Technology Services",
95
+ "Scientific & Technical Instruments",
96
+ "Semiconductor Equipment & Materials",
97
+ "Diagnostics & Research",
98
+ ],
99
+ )
100
+ RSI_CROSSOVER_TECH_PE = NamedFilterQuery(
101
+ name="RSI cross-over P/E",
102
+ cash_flow=["positive_free_cash_flow"],
103
+ properties=["operating_cash_flow_is_higher_than_net_income"],
104
+ price_per_earning_ratio=[5, 30], # P/E ratio between 10 and 100
105
+ rsi_bullish_crossover_45=DATE_THRESHOLD,
106
+ market_capitalization=[5e8, 1e12], # 1 billion to 1 trillion
107
+ order_by_desc="market_capitalization",
108
+ country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
109
+ industry=[
110
+ "Semiconductors",
111
+ "Software - Application",
112
+ "Software - Infrastructure",
113
+ "Biotechnology",
114
+ "Diagnostics & Research",
115
+ "Medical Devices",
116
+ "Health Information Services",
117
+ "Internet Retail",
118
+ "Electronic Gaming & Multimedia",
119
+ "Internet Content & Information",
120
+ "Solar",
121
+ "Information Technology Services",
122
+ "Scientific & Technical Instruments",
123
+ "Semiconductor Equipment & Materials",
124
+ "Diagnostics & Research",
125
+ ],
54
126
  )
55
127
 
56
128
  MICRO_CAP_EVENT_SPECULATION = NamedFilterQuery(
57
129
  name="Micro-Cap Event Speculation",
58
130
  description="seeks tiny names where unusual volume and price gaps hint at "
59
131
  "pending corporate events (patent win, FDA news, buy-out rumors).",
60
- positive_adosc_20_day_breakout=[
61
- datetime.date.today() - datetime.timedelta(days=5),
62
- datetime.date.today(),
63
- ],
64
- cdltasukigap=[
65
- datetime.date.today() - datetime.timedelta(days=5),
66
- datetime.date.today(),
67
- ],
132
+ positive_adosc_20_day_breakout=DATE_THRESHOLD,
68
133
  rate_of_change_30=[20, 100], # 10% to 50% in the last 30 days
69
134
  market_capitalization=[0, 5e8],
70
135
  )
@@ -78,19 +143,10 @@ MOMENTUM_BREAKOUT_HUNTER = NamedFilterQuery(
78
143
  "positive_net_income",
79
144
  ],
80
145
  cash_flow=["positive_free_cash_flow"],
81
- golden_cross=[
82
- datetime.date.today() - datetime.timedelta(days=5),
83
- datetime.date.today(),
84
- ],
85
- adx_14_long=[
86
- datetime.date.today() - datetime.timedelta(days=5),
87
- datetime.date.today(),
88
- ],
146
+ golden_cross=DATE_THRESHOLD,
147
+ adx_14_long=DATE_THRESHOLD,
89
148
  rate_of_change_30=[0, 100],
90
- rsi_neutral=[
91
- datetime.date.today() - datetime.timedelta(days=5),
92
- datetime.date.today(),
93
- ],
149
+ rsi_neutral=DATE_THRESHOLD,
94
150
  )
95
151
 
96
152
  DEEP_VALUE_PLUS_CATALYST = NamedFilterQuery(
@@ -101,40 +157,168 @@ DEEP_VALUE_PLUS_CATALYST = NamedFilterQuery(
101
157
  "positive_operating_income",
102
158
  "positive_net_income",
103
159
  ],
104
- lower_than_200_day_high=[
105
- datetime.date.today() - datetime.timedelta(days=5),
106
- datetime.date.today(),
107
- ],
160
+ lower_than_200_day_high=DATE_THRESHOLD,
108
161
  rate_of_change_30=[3, 100],
109
- rsi_bullish_crossover=[
110
- datetime.date.today() - datetime.timedelta(days=5),
111
- datetime.date.today(),
112
- ],
162
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
113
163
  )
114
164
  END_OF_TREND_REVERSAL = NamedFilterQuery(
115
165
  name="End of trend reversal",
116
166
  description="Layers long-term MA breach with momentum exhaustion and a "
117
167
  "bullish candle—classic setup for mean-reversion traders.",
118
- death_cross=[
119
- datetime.date.today() - datetime.timedelta(days=5),
120
- datetime.date.today(),
168
+ death_cross=DATE_THRESHOLD,
169
+ rsi_oversold=DATE_THRESHOLD,
170
+ candlesticks=["cdlmorningstart", "cdlabandonedbaby", "cdl3whitesoldiers"],
171
+ )
172
+
173
+ HIGH_QUALITY_CASH_GENERATOR = NamedFilterQuery(
174
+ name="High Quality Cash Generator",
175
+ description="This quartet isolates companies that are profitable, cash-rich, and disciplined with leverage. "
176
+ "Ideal first pass for “quality” or “compounder” "
177
+ "portfolios where downside protection matters as much as upside.",
178
+ income=[
179
+ "positive_net_income",
180
+ ],
181
+ cash_flow=["positive_free_cash_flow"],
182
+ properties=[
183
+ "operating_cash_flow_is_higher_than_net_income",
184
+ "positive_return_on_equity",
185
+ "positive_return_on_assets",
186
+ "positive_debt_to_equity",
121
187
  ],
122
- rsi_oversold=[
123
- datetime.date.today() - datetime.timedelta(days=5),
124
- datetime.date.today(),
188
+ )
189
+
190
+ EARNINGS_ACCELERATION_TREND_CONFIRMATION = NamedFilterQuery(
191
+ name="Earnings Acceleration Trend Confirmation",
192
+ description="Pairs fundamental acceleration with momentum confirmation. Research shows this “double positive” "
193
+ "outperforms simple momentum because it filters out purely sentiment-driven rallies.",
194
+ income=[
195
+ "growing_operating_income",
196
+ "positive_net_income",
125
197
  ],
126
- candlesticks=["cdlmorningstart", "cdlabandonedbaby", "cdl3whitesoldiers"],
198
+ eps=["growing_basic_eps"],
199
+ golden_cross=DATE_THRESHOLD,
200
+ macd_12_26_9_bullish_crossover=DATE_THRESHOLD,
201
+ adx_14_long=DATE_THRESHOLD,
202
+ )
203
+ DIVIDEND_GROWTH_COMPOUNDER = NamedFilterQuery(
204
+ name="Dividend-Growth Compounders",
205
+ description="Separates true dividend growers from high-yield traps. "
206
+ "Critical for income portfolios that need both yield and growth to beat inflation.",
207
+ mean_dividend_payout_ratio=[0, 0.6], # 0% to 60% payout ratio
208
+ cash_flow=[
209
+ "positive_free_cash_flow",
210
+ "quarterly_positive_free_cash_flow",
211
+ "growing_operating_cash_flow",
212
+ ],
213
+ properties=["quarterly_positive_return_on_equity"],
214
+ )
215
+
216
+ BREAK_OUT_MOMENTUM = NamedFilterQuery(
217
+ name="Break-out Momentum",
218
+ description="Combines price, volume, and pattern confirmation. Great for tactical traders seeking "
219
+ "quick continuation moves with statistically higher follow-through.",
220
+ adosc_crosses_above_0=DATE_THRESHOLD,
221
+ positive_adosc_20_day_breakout=DATE_THRESHOLD,
222
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
223
+ )
224
+
225
+ OVERSOLD_MEAN_REVERSION = NamedFilterQuery(
226
+ name="Oversold Mean Reversion",
227
+ description="Gives contrarian traders a high-probability bounce setup by "
228
+ "stacking three different oversold measures plus a reversal pattern.",
229
+ rsi_oversold=DATE_THRESHOLD,
230
+ stoch_oversold=DATE_THRESHOLD,
231
+ mfi_oversold=DATE_THRESHOLD,
232
+ lower_than_200_day_high=DATE_THRESHOLD,
233
+ )
234
+ RSI_CROSSOVER_30_GROWTH_STOCK_STRONG_FUNDAMENTAL = NamedFilterQuery(
235
+ name="RSI cross-over 30 growth stock strong fundamental",
236
+ income=[
237
+ "positive_operating_income",
238
+ "growing_operating_income",
239
+ "positive_net_income",
240
+ "growing_net_income",
241
+ ],
242
+ cash_flow=["positive_free_cash_flow"],
243
+ properties=["operating_cash_flow_is_higher_than_net_income"],
244
+ price_per_earning_ratio=[20, 40],
245
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
246
+ market_capitalization=[5e8, 1e12],
247
+ order_by_desc="market_capitalization",
248
+ country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
249
+ )
250
+ RSI_CROSSOVER_40_GROWTH_STOCK_STRONG_FUNDAMENTAL = NamedFilterQuery(
251
+ name="RSI cross-over 40 growth stock strong fundamental",
252
+ income=[
253
+ "positive_operating_income",
254
+ "growing_operating_income",
255
+ "positive_net_income",
256
+ "growing_net_income",
257
+ ],
258
+ cash_flow=["positive_free_cash_flow"],
259
+ properties=["operating_cash_flow_is_higher_than_net_income"],
260
+ price_per_earning_ratio=[20, 40],
261
+ rsi_bullish_crossover_40=DATE_THRESHOLD,
262
+ market_capitalization=[5e8, 1e12],
263
+ order_by_desc="market_capitalization",
264
+ country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
265
+ )
266
+ RSI_CROSSOVER_45_GROWTH_STOCK_STRONG_FUNDAMENTAL = NamedFilterQuery(
267
+ name="RSI cross-over 45 growth stock strong fundamental",
268
+ income=[
269
+ "positive_operating_income",
270
+ "growing_operating_income",
271
+ "positive_net_income",
272
+ "growing_net_income",
273
+ ],
274
+ cash_flow=["positive_free_cash_flow"],
275
+ properties=["operating_cash_flow_is_higher_than_net_income"],
276
+ price_per_earning_ratio=[20, 40],
277
+ rsi_bullish_crossover_45=DATE_THRESHOLD,
278
+ market_capitalization=[5e8, 1e12],
279
+ order_by_desc="market_capitalization",
280
+ country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
281
+ )
282
+ RSI_CROSSOVER_30_GROWTH_STOCK = NamedFilterQuery(
283
+ name="RSI cross-over 30 growth stock",
284
+ cash_flow=["positive_free_cash_flow"],
285
+ properties=["operating_cash_flow_is_higher_than_net_income"],
286
+ price_per_earning_ratio=[20, 40],
287
+ rsi_bullish_crossover_30=DATE_THRESHOLD,
288
+ market_capitalization=[5e8, 1e12],
289
+ order_by_desc="market_capitalization",
290
+ country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
291
+ )
292
+ RSI_CROSSOVER_40_GROWTH_STOCK = NamedFilterQuery(
293
+ name="RSI cross-over 40 growth stock",
294
+ cash_flow=["positive_free_cash_flow"],
295
+ properties=["operating_cash_flow_is_higher_than_net_income"],
296
+ price_per_earning_ratio=[20, 40],
297
+ rsi_bullish_crossover_40=DATE_THRESHOLD,
298
+ market_capitalization=[5e8, 1e12],
299
+ order_by_desc="market_capitalization",
300
+ country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
301
+ )
302
+ RSI_CROSSOVER_45_GROWTH_STOCK = NamedFilterQuery(
303
+ name="RSI cross-over 45 growth stock",
304
+ cash_flow=["positive_free_cash_flow"],
305
+ properties=["operating_cash_flow_is_higher_than_net_income"],
306
+ price_per_earning_ratio=[20, 40],
307
+ rsi_bullish_crossover_45=DATE_THRESHOLD,
308
+ market_capitalization=[5e8, 1e12],
309
+ order_by_desc="market_capitalization",
310
+ country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
127
311
  )
128
312
 
129
313
 
130
314
  def predefined_filters() -> list[NamedFilterQuery]:
131
315
  return [
132
- STRONG_FUNDAMENTALS,
133
- GOOD_FUNDAMENTALS,
134
- MICRO_CAP_EVENT_SPECULATION,
135
- MOMENTUM_BREAKOUT_HUNTER,
136
- DEEP_VALUE_PLUS_CATALYST,
137
- END_OF_TREND_REVERSAL,
316
+ RSI_CROSSOVER_30_GROWTH_STOCK_STRONG_FUNDAMENTAL,
317
+ RSI_CROSSOVER_40_GROWTH_STOCK_STRONG_FUNDAMENTAL,
318
+ RSI_CROSSOVER_45_GROWTH_STOCK_STRONG_FUNDAMENTAL,
319
+ RSI_CROSSOVER_30_GROWTH_STOCK,
320
+ RSI_CROSSOVER_40_GROWTH_STOCK,
321
+ RSI_CROSSOVER_45_GROWTH_STOCK,
138
322
  ]
139
323
 
140
324
 
bullish/app/app.py CHANGED
@@ -1,3 +1,4 @@
1
+ import logging
1
2
  import shelve
2
3
  import uuid
3
4
  from pathlib import Path
@@ -24,7 +25,6 @@ from bullish.analysis.filter import (
24
25
  GeneralFilter,
25
26
  TechnicalAnalysisFilters,
26
27
  )
27
- from bullish.jobs.models import JobTracker
28
28
  from bullish.jobs.tasks import update, news, analysis
29
29
  from pydantic import BaseModel
30
30
 
@@ -38,6 +38,7 @@ CACHE_SHELVE = "user_cache"
38
38
  DB_KEY = "db_path"
39
39
 
40
40
  st.set_page_config(layout="wide")
41
+ logger = logging.getLogger(__name__)
41
42
 
42
43
 
43
44
  @st.cache_resource
@@ -85,8 +86,9 @@ def on_table_select() -> None:
85
86
  query = AssetQuery(symbols=Symbols(equities=[Ticker(symbol=symbol)]))
86
87
  prices = db.read_series(query, months=24)
87
88
  data = Prices(prices=prices).to_dataframe()
89
+ dates = db.read_dates(symbol)
88
90
 
89
- fig = plot(data, symbol)
91
+ fig = plot(data, symbol, dates=dates)
90
92
 
91
93
  st.session_state.ticker_figure = fig
92
94
 
@@ -145,6 +147,14 @@ def build_filter(model: Type[BaseModel], data: Dict[str, Any]) -> Dict[str, Any]
145
147
  default=default,
146
148
  key=hash((model.__name__, field)),
147
149
  )
150
+ elif info.annotation == Optional[str]: # type: ignore
151
+ options = ["", *groups_mapping()[field]]
152
+ data[field] = st.selectbox(
153
+ name,
154
+ options,
155
+ index=0 if not default else options.index(default),
156
+ key=hash((model.__name__, field)),
157
+ )
148
158
 
149
159
  else:
150
160
  ge = next(
@@ -155,18 +165,28 @@ def build_filter(model: Type[BaseModel], data: Dict[str, Any]) -> Dict[str, Any]
155
165
  (item.le for item in info.metadata if hasattr(item, "le")),
156
166
  info.default[1] if info.default and len(info.default) == 2 else None,
157
167
  )
158
- data[field] = list(
159
- st.slider( # type: ignore
160
- name, ge, le, tuple(default), key=hash((model.__name__, field))
168
+ if info.annotation == Optional[List[float]]: # type: ignore
169
+ ge = int(ge) # type: ignore
170
+ le = int(le) # type: ignore
171
+ default = [int(d) for d in default]
172
+ try:
173
+ data[field] = list(
174
+ st.slider( # type: ignore
175
+ name, ge, le, tuple(default), key=hash((model.__name__, field))
176
+ )
161
177
  )
162
- )
178
+ except Exception as e:
179
+ logger.error(
180
+ f"Error building filter for {model.__name__}.{field} "
181
+ f"with the parameters {(info.annotation, name, ge, le, tuple(default))}: {e}"
182
+ )
183
+ raise e
163
184
  return data
164
185
 
165
186
 
166
187
  @st.dialog("⏳ Jobs", width="large")
167
188
  def jobs() -> None:
168
189
  with st.expander("Update data"):
169
- bearish_db_ = bearish_db(st.session_state.database_path)
170
190
  update_query = sp.pydantic_form(key="update", model=FilterUpdate)
171
191
  if (
172
192
  update_query
@@ -174,19 +194,20 @@ def jobs() -> None:
174
194
  and not st.session_state.data.empty
175
195
  ):
176
196
  symbols = st.session_state.data["symbol"].unique().tolist()
177
- res = update(
197
+ update(
178
198
  database_path=st.session_state.database_path,
199
+ job_type="Update data",
179
200
  symbols=symbols,
180
201
  update_query=update_query,
181
202
  ) # enqueue & get result-handle
182
- bearish_db_.write_job_tracker(
183
- JobTracker(job_id=str(res.id), type="Update data")
184
- )
203
+
185
204
  st.success("Data update job has been enqueued.")
186
205
  st.rerun()
187
206
  with st.expander("Update analysis"):
188
207
  if st.button("Update analysis"):
189
- analysis(st.session_state.database_path)
208
+ analysis(st.session_state.database_path, job_type="Update analysis")
209
+ st.success("Data update job has been enqueued.")
210
+ st.rerun()
190
211
 
191
212
 
192
213
  @st.dialog("📥 Load", width="large")
@@ -284,14 +305,12 @@ def save_filtered_results(bearish_db_: BullishDb) -> None:
284
305
  )
285
306
 
286
307
  bearish_db_.write_filtered_results(filtered_results)
287
- res = news(
308
+ news(
288
309
  database_path=st.session_state.database_path,
310
+ job_type="Fetching news",
289
311
  symbols=symbols,
290
312
  headless=headless,
291
313
  )
292
- bearish_db_.write_job_tracker(
293
- JobTracker(job_id=str(res.id), type="Fetching news")
294
- )
295
314
  st.session_state.filter_query = None
296
315
  st.session_state.query = None
297
316
  st.rerun()
bullish/cli.py CHANGED
@@ -1,4 +1,6 @@
1
1
  from __future__ import annotations
2
+
3
+ import os
2
4
  import subprocess
3
5
  import signal
4
6
  import sys
@@ -57,6 +59,7 @@ def serve(
57
59
  host,
58
60
  "--server.port",
59
61
  str(port),
62
+ os.devnull,
60
63
  ]
61
64
  )
62
65
  )
@@ -0,0 +1,85 @@
1
+ """
2
+
3
+ Revision ID: 17e51420e7ad
4
+ Revises: d663166c531d
5
+ Create Date: 2025-07-10 17:35:02.376675
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "17e51420e7ad"
17
+ down_revision: Union[str, None] = "d663166c531d"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
25
+ batch_op.add_column(
26
+ sa.Column("rsi_bullish_crossover_30", sa.Date(), nullable=True)
27
+ )
28
+ batch_op.add_column(
29
+ sa.Column("rsi_bullish_crossover_40", sa.Date(), nullable=True)
30
+ )
31
+ batch_op.add_column(
32
+ sa.Column("rsi_bullish_crossover_45", sa.Date(), nullable=True)
33
+ )
34
+ batch_op.add_column(
35
+ sa.Column(
36
+ "return_after_rsi_crossover_45_period_90", sa.Float(), nullable=True
37
+ )
38
+ )
39
+ batch_op.drop_index(batch_op.f("ix_analysis_rsi_bullish_crossover"))
40
+ batch_op.create_index(
41
+ "ix_analysis_return_after_rsi_crossover_45_period_90",
42
+ ["return_after_rsi_crossover_45_period_90"],
43
+ unique=False,
44
+ )
45
+ batch_op.create_index(
46
+ "ix_analysis_rsi_bullish_crossover_30",
47
+ ["rsi_bullish_crossover_30"],
48
+ unique=False,
49
+ )
50
+ batch_op.create_index(
51
+ "ix_analysis_rsi_bullish_crossover_40",
52
+ ["rsi_bullish_crossover_40"],
53
+ unique=False,
54
+ )
55
+ batch_op.create_index(
56
+ "ix_analysis_rsi_bullish_crossover_45",
57
+ ["rsi_bullish_crossover_45"],
58
+ unique=False,
59
+ )
60
+ batch_op.drop_column("rsi_bullish_crossover")
61
+
62
+ # ### end Alembic commands ###
63
+
64
+
65
+ def downgrade() -> None:
66
+ # ### commands auto generated by Alembic - please adjust! ###
67
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
68
+ batch_op.add_column(
69
+ sa.Column("rsi_bullish_crossover", sa.DATE(), nullable=True)
70
+ )
71
+ batch_op.drop_index("ix_analysis_rsi_bullish_crossover_45")
72
+ batch_op.drop_index("ix_analysis_rsi_bullish_crossover_40")
73
+ batch_op.drop_index("ix_analysis_rsi_bullish_crossover_30")
74
+ batch_op.drop_index("ix_analysis_return_after_rsi_crossover_45_period_90")
75
+ batch_op.create_index(
76
+ batch_op.f("ix_analysis_rsi_bullish_crossover"),
77
+ ["rsi_bullish_crossover"],
78
+ unique=False,
79
+ )
80
+ batch_op.drop_column("return_after_rsi_crossover_45_period_90")
81
+ batch_op.drop_column("rsi_bullish_crossover_45")
82
+ batch_op.drop_column("rsi_bullish_crossover_40")
83
+ batch_op.drop_column("rsi_bullish_crossover_30")
84
+
85
+ # ### end Alembic commands ###
@@ -0,0 +1,56 @@
1
+ """
2
+
3
+ Revision ID: d663166c531d
4
+ Revises: fc191121f522
5
+ Create Date: 2025-07-09 17:44:25.728075
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "d663166c531d"
17
+ down_revision: Union[str, None] = "fc191121f522"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
25
+ batch_op.add_column(
26
+ sa.Column("median_weekly_growth", sa.Float(), nullable=True)
27
+ )
28
+ batch_op.add_column(
29
+ sa.Column("median_monthly_growth", sa.Float(), nullable=True)
30
+ )
31
+ batch_op.add_column(
32
+ sa.Column("median_yearly_growth", sa.Float(), nullable=True)
33
+ )
34
+ batch_op.create_index(
35
+ "ix_analysis_median_monthly_growth", ["median_monthly_growth"], unique=False
36
+ )
37
+ batch_op.create_index(
38
+ "ix_analysis_median_weekly_growth", ["median_weekly_growth"], unique=False
39
+ )
40
+ batch_op.create_index(
41
+ "ix_analysis_median_yearly_growth", ["median_yearly_growth"], unique=False
42
+ )
43
+
44
+ # ### end Alembic commands ###
45
+
46
+
47
+ def downgrade() -> None:
48
+ # ### commands auto generated by Alembic - please adjust! ###
49
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
50
+ batch_op.drop_index("ix_analysis_median_yearly_growth")
51
+ batch_op.drop_index("ix_analysis_median_weekly_growth")
52
+ batch_op.drop_index("ix_analysis_median_monthly_growth")
53
+ batch_op.drop_column("median_yearly_growth")
54
+ batch_op.drop_column("median_monthly_growth")
55
+ batch_op.drop_column("median_weekly_growth")
56
+ # ### end Alembic commands ###
bullish/database/crud.py CHANGED
@@ -1,12 +1,15 @@
1
1
  import json
2
2
  import logging
3
+ from datetime import date
3
4
  from functools import cached_property
4
5
  from pathlib import Path
6
+ from sqlite3 import OperationalError
5
7
  from typing import TYPE_CHECKING, Any, List, Optional
6
8
 
7
9
  import pandas as pd
8
10
  from bearish.database.crud import BearishDb # type: ignore
9
11
  from bearish.models.base import Ticker # type: ignore
12
+ from bearish.database.schemas import EarningsDateORM # type: ignore
10
13
  from pydantic import ConfigDict
11
14
  from sqlalchemy import Engine, create_engine, insert, delete, update
12
15
  from sqlmodel import Session, select
@@ -40,7 +43,14 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
40
43
  if not self.valid():
41
44
  raise DatabaseFileNotFoundError("Database file not found.")
42
45
  database_url = f"sqlite:///{Path(self.database_path)}"
43
- upgrade(self.database_path)
46
+ try:
47
+ upgrade(self.database_path)
48
+ except OperationalError as e:
49
+ logger.warning(
50
+ f"Failed to upgrade the database at {self.database_path}. "
51
+ f"Reason: {e}"
52
+ "Skipping upgrade. "
53
+ )
44
54
  engine = create_engine(database_url)
45
55
  return engine
46
56
 
@@ -101,6 +111,14 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
101
111
  session.exec(stmt) # type: ignore
102
112
  session.commit()
103
113
 
114
+ def read_job_tracker(self, task_id: str) -> Optional[JobTracker]:
115
+ stmt = select(JobTrackerORM).where(JobTrackerORM.job_id == task_id)
116
+ with Session(self._engine) as session:
117
+ result = session.execute(stmt).scalar_one_or_none()
118
+ if result:
119
+ return JobTracker.model_validate(result.model_dump())
120
+ return None
121
+
104
122
  def delete_job_trackers(self, job_ids: List[str]) -> None:
105
123
  with Session(self._engine) as session:
106
124
  stmt = delete(JobTrackerORM).where(JobTrackerORM.job_id.in_(job_ids)) # type: ignore
@@ -156,3 +174,12 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
156
174
  )
157
175
  session.exec(stmt) # type: ignore
158
176
  session.commit()
177
+
178
+ def read_dates(self, symbol: str) -> List[date]:
179
+ with Session(self._engine) as session:
180
+ return [
181
+ r.date()
182
+ for r in session.exec(
183
+ select(EarningsDateORM.date).where(EarningsDateORM.symbol == symbol)
184
+ )
185
+ ]