bullishpy 0.49.0__tar.gz → 0.51.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {bullishpy-0.49.0 → bullishpy-0.51.0}/PKG-INFO +1 -1
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/analysis/analysis.py +4 -2
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/analysis/indicators.py +18 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/analysis/predefined_filters.py +108 -32
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/app/app.py +26 -29
- bullishpy-0.51.0/bullish/database/alembic/versions/260fcff7212e_.py +45 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/jobs/models.py +3 -1
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/jobs/tasks.py +28 -1
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/utils/checks.py +2 -17
- {bullishpy-0.49.0 → bullishpy-0.51.0}/pyproject.toml +1 -1
- {bullishpy-0.49.0 → bullishpy-0.51.0}/README.md +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/__init__.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/analysis/__init__.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/analysis/backtest.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/analysis/constants.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/analysis/filter.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/analysis/functions.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/analysis/industry_views.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/app/__init__.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/cli.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/__init__.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/README +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/alembic.ini +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/env.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/script.py.mako +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/037dbd721317_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/040b15fba458_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/08ac1116e055_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/11d35a452b40_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/12889a2cbd7d_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/17e51420e7ad_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/49c83f9eb5ac_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/4b0a2f40b7d3_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/4ee82b171449_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/5b10ee7604c1_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/6d252e23f543_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/73564b60fe24_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/79bc71ec6f9e_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/ae444f338124_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/b76079e9845f_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/bf6b86dd5463_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/d0e58e050845_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/d663166c531d_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/ec25c8fa449f_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/ee5baabb35f8_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/fc191121f522_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/alembic/versions/ff0cc4ba40ec_.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/crud.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/schemas.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/scripts/create_revision.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/scripts/stamp.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/scripts/upgrade.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/database/settings.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/exceptions.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/figures/__init__.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/figures/figures.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/interface/__init__.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/interface/interface.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/jobs/__init__.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/jobs/app.py +0 -0
- {bullishpy-0.49.0 → bullishpy-0.51.0}/bullish/utils/__init__.py +0 -0
|
@@ -530,8 +530,10 @@ class SubjectAnalysis(BaseModel):
|
|
|
530
530
|
|
|
531
531
|
def compute_upside(self, last_price: float) -> None:
|
|
532
532
|
if self.high_price_target is not None:
|
|
533
|
-
self.upside = (
|
|
534
|
-
last_price
|
|
533
|
+
self.upside = (
|
|
534
|
+
(float(self.high_price_target) - float(last_price))
|
|
535
|
+
* 100
|
|
536
|
+
/ float(last_price)
|
|
535
537
|
)
|
|
536
538
|
|
|
537
539
|
def to_news(self) -> Optional[str]:
|
|
@@ -359,6 +359,15 @@ def indicators_factory() -> List[Indicator]:
|
|
|
359
359
|
in_use_backtest=True,
|
|
360
360
|
processing=ProcessingFunction(date=find_last_true_run_start),
|
|
361
361
|
),
|
|
362
|
+
Signal(
|
|
363
|
+
name="SMA_50_BELOW_SMA_200",
|
|
364
|
+
description="SMA 50 is below SMA 200",
|
|
365
|
+
type_info="Overbought",
|
|
366
|
+
type=Optional[date],
|
|
367
|
+
function=lambda d: d.SMA_50 < d.SMA_200,
|
|
368
|
+
in_use_backtest=True,
|
|
369
|
+
processing=ProcessingFunction(date=find_last_true_run_start),
|
|
370
|
+
),
|
|
362
371
|
Signal(
|
|
363
372
|
name="PRICE_ABOVE_SMA_50",
|
|
364
373
|
description="Price is above SMA 50",
|
|
@@ -368,6 +377,15 @@ def indicators_factory() -> List[Indicator]:
|
|
|
368
377
|
in_use_backtest=True,
|
|
369
378
|
processing=ProcessingFunction(date=find_last_true_run_start),
|
|
370
379
|
),
|
|
380
|
+
Signal(
|
|
381
|
+
name="PRICE_BELOW_SMA_50",
|
|
382
|
+
description="Price is below SMA 50",
|
|
383
|
+
type_info="Overbought",
|
|
384
|
+
type=Optional[date],
|
|
385
|
+
function=lambda d: d.SMA_50 > d.CLOSE,
|
|
386
|
+
in_use_backtest=True,
|
|
387
|
+
processing=ProcessingFunction(date=find_last_true_run_start),
|
|
388
|
+
),
|
|
371
389
|
],
|
|
372
390
|
),
|
|
373
391
|
Indicator(
|
|
@@ -119,10 +119,99 @@ class NamedFilterQuery(FilterQuery):
|
|
|
119
119
|
| {"name": f"{self.name} ({suffix})", rsi_parameter_name: DATE_THRESHOLD}
|
|
120
120
|
)
|
|
121
121
|
|
|
122
|
+
def _custom_variant(
|
|
123
|
+
self, suffix: str, properties: Dict[str, Any]
|
|
124
|
+
) -> "NamedFilterQuery":
|
|
125
|
+
return NamedFilterQuery.model_validate(
|
|
126
|
+
self.model_dump() | {"name": f"{self.name} ({suffix})", **properties}
|
|
127
|
+
)
|
|
128
|
+
|
|
129
|
+
def top_performers(self) -> "NamedFilterQuery":
|
|
130
|
+
properties = {
|
|
131
|
+
"volume_above_average": DATE_THRESHOLD,
|
|
132
|
+
"sma_50_above_sma_200": [
|
|
133
|
+
datetime.date.today() - datetime.timedelta(days=5000),
|
|
134
|
+
datetime.date.today(),
|
|
135
|
+
],
|
|
136
|
+
"weekly_growth": [1, 100],
|
|
137
|
+
"monthly_growth": [8, 100],
|
|
138
|
+
}
|
|
139
|
+
return self._custom_variant("Top Performers", properties)
|
|
140
|
+
|
|
141
|
+
def poor_performers(self) -> "NamedFilterQuery":
|
|
142
|
+
properties = {
|
|
143
|
+
"sma_50_below_sma_200": [
|
|
144
|
+
datetime.date.today() - datetime.timedelta(days=5000),
|
|
145
|
+
datetime.date.today(),
|
|
146
|
+
],
|
|
147
|
+
"price_below_sma_50": [
|
|
148
|
+
datetime.date.today() - datetime.timedelta(days=5000),
|
|
149
|
+
datetime.date.today(),
|
|
150
|
+
],
|
|
151
|
+
"monthly_growth": [-100, 0],
|
|
152
|
+
}
|
|
153
|
+
return self._custom_variant("Poor Performers", properties)
|
|
154
|
+
|
|
155
|
+
def short_term_profitability(self) -> "NamedFilterQuery":
|
|
156
|
+
properties = {
|
|
157
|
+
"income": [
|
|
158
|
+
"positive_operating_income",
|
|
159
|
+
"positive_net_income",
|
|
160
|
+
"quarterly_positive_operating_income",
|
|
161
|
+
"quarterly_positive_net_income",
|
|
162
|
+
],
|
|
163
|
+
"cash_flow": [
|
|
164
|
+
"positive_free_cash_flow",
|
|
165
|
+
"quarterly_positive_free_cash_flow",
|
|
166
|
+
],
|
|
167
|
+
"eps": [
|
|
168
|
+
"positive_basic_eps",
|
|
169
|
+
"positive_diluted_eps",
|
|
170
|
+
"quarterly_positive_basic_eps",
|
|
171
|
+
"quarterly_positive_diluted_eps",
|
|
172
|
+
],
|
|
173
|
+
"properties": [
|
|
174
|
+
"positive_return_on_assets",
|
|
175
|
+
"positive_return_on_equity",
|
|
176
|
+
"positive_debt_to_equity",
|
|
177
|
+
"operating_cash_flow_is_higher_than_net_income",
|
|
178
|
+
"quarterly_positive_return_on_assets",
|
|
179
|
+
"quarterly_positive_return_on_equity",
|
|
180
|
+
"quarterly_positive_debt_to_equity",
|
|
181
|
+
"quarterly_operating_cash_flow_is_higher_than_net_income",
|
|
182
|
+
],
|
|
183
|
+
}
|
|
184
|
+
return self._custom_variant("Short-term profitability", properties)
|
|
185
|
+
|
|
186
|
+
def long_term_profitability(self) -> "NamedFilterQuery":
|
|
187
|
+
properties = {
|
|
188
|
+
"income": [
|
|
189
|
+
"growing_net_income",
|
|
190
|
+
"growing_operating_income",
|
|
191
|
+
"quarterly_growing_net_income",
|
|
192
|
+
"quarterly_growing_operating_income",
|
|
193
|
+
],
|
|
194
|
+
"cash_flow": [
|
|
195
|
+
"growing_operating_cash_flow",
|
|
196
|
+
"quarterly_growing_operating_cash_flow",
|
|
197
|
+
],
|
|
198
|
+
"eps": [
|
|
199
|
+
"growing_basic_eps",
|
|
200
|
+
"growing_diluted_eps",
|
|
201
|
+
"quarterly_growing_basic_eps",
|
|
202
|
+
"quarterly_growing_diluted_eps",
|
|
203
|
+
],
|
|
204
|
+
}
|
|
205
|
+
return self._custom_variant("Long-term profitability", properties)
|
|
206
|
+
|
|
122
207
|
def variants(self) -> List["NamedFilterQuery"]:
|
|
123
|
-
|
|
208
|
+
variants_ = [
|
|
124
209
|
self.country_variant("Europe", list(get_args(Europe))),
|
|
125
210
|
self.country_variant("Us", list(get_args(Us))),
|
|
211
|
+
self.country_variant("Europe", list(get_args(Europe))).top_performers(),
|
|
212
|
+
self.country_variant("Us", list(get_args(Us))).top_performers(),
|
|
213
|
+
self.country_variant("Europe", list(get_args(Europe))).poor_performers(),
|
|
214
|
+
self.country_variant("Us", list(get_args(Us))).poor_performers(),
|
|
126
215
|
self.country_variant("Europe", list(get_args(Europe)))
|
|
127
216
|
.update_indicator_filter("RSI 30", "rsi_bullish_crossover_30")
|
|
128
217
|
.update_indicator_filter("MACD", "macd_12_26_9_bullish_crossover"),
|
|
@@ -142,6 +231,17 @@ class NamedFilterQuery(FilterQuery):
|
|
|
142
231
|
.update_indicator_filter("RSI Neutral", "rsi_neutral")
|
|
143
232
|
.update_indicator_filter("MACD", "macd_12_26_9_bullish_crossover"),
|
|
144
233
|
]
|
|
234
|
+
variants_short_term_profitability = [
|
|
235
|
+
v.short_term_profitability() for v in variants_
|
|
236
|
+
]
|
|
237
|
+
variants_long_term_profitability = [
|
|
238
|
+
v.long_term_profitability() for v in variants_
|
|
239
|
+
]
|
|
240
|
+
return [
|
|
241
|
+
*variants_,
|
|
242
|
+
*variants_short_term_profitability,
|
|
243
|
+
*variants_long_term_profitability,
|
|
244
|
+
]
|
|
145
245
|
|
|
146
246
|
|
|
147
247
|
def load_custom_filters() -> List[NamedFilterQuery]:
|
|
@@ -174,39 +274,16 @@ SMALL_CAP = NamedFilterQuery(
|
|
|
174
274
|
order_by_desc="market_capitalization",
|
|
175
275
|
).variants()
|
|
176
276
|
|
|
177
|
-
|
|
178
|
-
name="
|
|
179
|
-
volume_above_average=DATE_THRESHOLD,
|
|
180
|
-
sma_50_above_sma_200=[
|
|
181
|
-
datetime.date.today() - datetime.timedelta(days=5000),
|
|
182
|
-
datetime.date.today(),
|
|
183
|
-
],
|
|
184
|
-
weekly_growth=[1, 100],
|
|
185
|
-
monthly_growth=[8, 100],
|
|
186
|
-
order_by_desc="market_capitalization",
|
|
187
|
-
).variants()
|
|
188
|
-
|
|
189
|
-
TOP_PERFORMERS_YEARLY = NamedFilterQuery(
|
|
190
|
-
name="Top Performers Yearly",
|
|
191
|
-
sma_50_above_sma_200=[
|
|
192
|
-
datetime.date.today() - datetime.timedelta(days=5000),
|
|
193
|
-
datetime.date.today(),
|
|
194
|
-
],
|
|
195
|
-
price_above_sma_50=[
|
|
196
|
-
datetime.date.today() - datetime.timedelta(days=5000),
|
|
197
|
-
datetime.date.today(),
|
|
198
|
-
],
|
|
199
|
-
volume_above_average=DATE_THRESHOLD,
|
|
200
|
-
weekly_growth=[1, 100],
|
|
201
|
-
monthly_growth=[8, 100],
|
|
202
|
-
yearly_growth=[30, 100],
|
|
277
|
+
LARGE_CAPS = NamedFilterQuery(
|
|
278
|
+
name="Large caps",
|
|
203
279
|
order_by_desc="market_capitalization",
|
|
280
|
+
market_capitalization=[1e10, 1e14],
|
|
204
281
|
).variants()
|
|
205
282
|
|
|
206
|
-
|
|
207
|
-
name="
|
|
283
|
+
MID_CAPS = NamedFilterQuery(
|
|
284
|
+
name="Mid-caps",
|
|
208
285
|
order_by_desc="market_capitalization",
|
|
209
|
-
|
|
286
|
+
market_capitalization=[5e8, 1e10],
|
|
210
287
|
).variants()
|
|
211
288
|
|
|
212
289
|
NEXT_EARNINGS_DATE = NamedFilterQuery(
|
|
@@ -222,10 +299,9 @@ NEXT_EARNINGS_DATE = NamedFilterQuery(
|
|
|
222
299
|
def predefined_filters() -> list[NamedFilterQuery]:
|
|
223
300
|
return [
|
|
224
301
|
*SMALL_CAP,
|
|
225
|
-
*TOP_PERFORMERS,
|
|
226
|
-
*TOP_PERFORMERS_YEARLY,
|
|
227
302
|
*LARGE_CAPS,
|
|
228
303
|
*NEXT_EARNINGS_DATE,
|
|
304
|
+
*MID_CAPS,
|
|
229
305
|
*load_custom_filters(),
|
|
230
306
|
]
|
|
231
307
|
|
|
@@ -27,14 +27,13 @@ from bullish.analysis.filter import (
|
|
|
27
27
|
GeneralFilter,
|
|
28
28
|
TechnicalAnalysisFilters,
|
|
29
29
|
)
|
|
30
|
-
from bullish.jobs.tasks import update, news, analysis
|
|
30
|
+
from bullish.jobs.tasks import update, news, analysis, initialize
|
|
31
31
|
from pydantic import BaseModel
|
|
32
32
|
|
|
33
33
|
from bullish.utils.checks import (
|
|
34
34
|
compatible_bearish_database,
|
|
35
35
|
compatible_bullish_database,
|
|
36
36
|
empty_analysis_table,
|
|
37
|
-
DataBaseSingleTon,
|
|
38
37
|
)
|
|
39
38
|
|
|
40
39
|
CACHE_SHELVE = "user_cache"
|
|
@@ -235,33 +234,28 @@ def load() -> None:
|
|
|
235
234
|
|
|
236
235
|
@st.dialog("🔍 Filter", width="large")
|
|
237
236
|
def filter() -> None:
|
|
237
|
+
with st.container(), st.expander("Predefined filters"):
|
|
238
|
+
predefined_filter_names = PredefinedFilters().get_predefined_filter_names()
|
|
239
|
+
option = st.selectbox(
|
|
240
|
+
"Select a predefined filter",
|
|
241
|
+
["", *predefined_filter_names],
|
|
242
|
+
)
|
|
243
|
+
if option:
|
|
244
|
+
data_ = PredefinedFilters().get_predefined_filter(option)
|
|
245
|
+
st.session_state.filter_query.update(data_)
|
|
238
246
|
with st.container():
|
|
239
|
-
|
|
240
|
-
with
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
data_ = PredefinedFilters().get_predefined_filter(option)
|
|
252
|
-
st.session_state.filter_query.update(data_)
|
|
253
|
-
with st.expander("Technical Analysis"):
|
|
254
|
-
for filter in TechnicalAnalysisFilters:
|
|
255
|
-
with st.expander(filter._description): # type: ignore
|
|
256
|
-
build_filter(filter, st.session_state.filter_query)
|
|
257
|
-
|
|
258
|
-
with column_2:
|
|
259
|
-
with st.expander("Fundamental Analysis"):
|
|
260
|
-
for filter in FundamentalAnalysisFilters:
|
|
261
|
-
with st.expander(filter._description): # type: ignore
|
|
262
|
-
build_filter(filter, st.session_state.filter_query)
|
|
263
|
-
with st.expander("General filter"):
|
|
264
|
-
build_filter(GeneralFilter, st.session_state.filter_query)
|
|
247
|
+
|
|
248
|
+
with st.expander("Technical Analysis"):
|
|
249
|
+
for filter in TechnicalAnalysisFilters:
|
|
250
|
+
with st.expander(filter._description): # type: ignore
|
|
251
|
+
build_filter(filter, st.session_state.filter_query)
|
|
252
|
+
|
|
253
|
+
with st.expander("Fundamental Analysis"):
|
|
254
|
+
for filter in FundamentalAnalysisFilters:
|
|
255
|
+
with st.expander(filter._description): # type: ignore
|
|
256
|
+
build_filter(filter, st.session_state.filter_query)
|
|
257
|
+
with st.expander("General filter"):
|
|
258
|
+
build_filter(GeneralFilter, st.session_state.filter_query)
|
|
265
259
|
|
|
266
260
|
if st.button("🔍 Apply"):
|
|
267
261
|
query = FilterQuery.model_validate(st.session_state.filter_query)
|
|
@@ -421,7 +415,10 @@ def main() -> None:
|
|
|
421
415
|
if st.session_state.database_path is None:
|
|
422
416
|
dialog_pick_database()
|
|
423
417
|
bearish_db_ = bearish_db(st.session_state.database_path)
|
|
424
|
-
|
|
418
|
+
initialize(
|
|
419
|
+
database_path=st.session_state.database_path,
|
|
420
|
+
job_type="Initialize",
|
|
421
|
+
)
|
|
425
422
|
charts_tab, jobs_tab = st.tabs(["Charts", "Jobs"])
|
|
426
423
|
if "data" not in st.session_state:
|
|
427
424
|
st.session_state.data = load_analysis_data(bearish_db_)
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"""
|
|
2
|
+
|
|
3
|
+
Revision ID: 260fcff7212e
|
|
4
|
+
Revises: 4ee82b171449
|
|
5
|
+
Create Date: 2025-08-11 10:08:17.582390
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import Sequence, Union
|
|
10
|
+
|
|
11
|
+
from alembic import op
|
|
12
|
+
import sqlalchemy as sa
|
|
13
|
+
from sqlalchemy.dialects import sqlite
|
|
14
|
+
|
|
15
|
+
# revision identifiers, used by Alembic.
|
|
16
|
+
revision: str = "260fcff7212e"
|
|
17
|
+
down_revision: Union[str, None] = "4ee82b171449"
|
|
18
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
|
19
|
+
depends_on: Union[str, Sequence[str], None] = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def upgrade() -> None:
|
|
23
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
24
|
+
with op.batch_alter_table("analysis", schema=None) as batch_op:
|
|
25
|
+
batch_op.add_column(sa.Column("sma_50_below_sma_200", sa.Date(), nullable=True))
|
|
26
|
+
batch_op.add_column(sa.Column("price_below_sma_50", sa.Date(), nullable=True))
|
|
27
|
+
batch_op.create_index(
|
|
28
|
+
"ix_analysis_price_below_sma_50", ["price_below_sma_50"], unique=False
|
|
29
|
+
)
|
|
30
|
+
batch_op.create_index(
|
|
31
|
+
"ix_analysis_sma_50_below_sma_200", ["sma_50_below_sma_200"], unique=False
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
# ### end Alembic commands ###
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def downgrade() -> None:
|
|
38
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
39
|
+
with op.batch_alter_table("analysis", schema=None) as batch_op:
|
|
40
|
+
batch_op.drop_index("ix_analysis_sma_50_below_sma_200")
|
|
41
|
+
batch_op.drop_index("ix_analysis_price_below_sma_50")
|
|
42
|
+
batch_op.drop_column("price_below_sma_50")
|
|
43
|
+
batch_op.drop_column("sma_50_below_sma_200")
|
|
44
|
+
|
|
45
|
+
# ### end Alembic commands ###
|
|
@@ -4,7 +4,9 @@ from typing import Literal, get_args
|
|
|
4
4
|
import pandas as pd
|
|
5
5
|
from pydantic import BaseModel, Field
|
|
6
6
|
|
|
7
|
-
JobType = Literal[
|
|
7
|
+
JobType = Literal[
|
|
8
|
+
"Update data", "Update analysis", "Fetching news", "backtest signals", "Initialize"
|
|
9
|
+
]
|
|
8
10
|
JobStatus = Literal["Completed", "Failed", "Running", "Started"]
|
|
9
11
|
StatusIcon = ["✅ Completed", "❌ Failed", "🔄 Running", "🚀 Started"]
|
|
10
12
|
|
|
@@ -18,11 +18,26 @@ from ..analysis.industry_views import compute_industry_view
|
|
|
18
18
|
from ..analysis.predefined_filters import predefined_filters, load_custom_filters
|
|
19
19
|
from ..database.crud import BullishDb
|
|
20
20
|
from bullish.analysis.filter import FilterUpdate
|
|
21
|
-
from ..utils.checks import DataBaseSingleTon
|
|
22
21
|
|
|
23
22
|
logger = logging.getLogger(__name__)
|
|
24
23
|
|
|
25
24
|
|
|
25
|
+
class DataBaseSingleTon:
|
|
26
|
+
_instance = None
|
|
27
|
+
|
|
28
|
+
def __new__(cls, *args: Any, **kwargs: Any) -> Any:
|
|
29
|
+
if cls._instance is None:
|
|
30
|
+
cls._instance = super().__new__(cls)
|
|
31
|
+
return cls._instance
|
|
32
|
+
|
|
33
|
+
def __init__(self, path: Optional[Path] = None) -> None:
|
|
34
|
+
if not hasattr(self, "path"): # Only set once
|
|
35
|
+
self.path = path
|
|
36
|
+
|
|
37
|
+
def valid(self) -> bool:
|
|
38
|
+
return hasattr(self, "path") and self.path is not None
|
|
39
|
+
|
|
40
|
+
|
|
26
41
|
def job_tracker(func: Callable[..., Any]) -> Callable[..., Any]:
|
|
27
42
|
@functools.wraps(func)
|
|
28
43
|
def wrapper(
|
|
@@ -75,6 +90,18 @@ def _base_update(
|
|
|
75
90
|
compute_industry_view(bullish_db)
|
|
76
91
|
|
|
77
92
|
|
|
93
|
+
@huey.task(context=True) # type: ignore
|
|
94
|
+
@job_tracker
|
|
95
|
+
def initialize(
|
|
96
|
+
database_path: Path,
|
|
97
|
+
job_type: JobType,
|
|
98
|
+
task: Optional[Task] = None,
|
|
99
|
+
) -> None:
|
|
100
|
+
database = DataBaseSingleTon(path=database_path)
|
|
101
|
+
if not database.valid():
|
|
102
|
+
raise ValueError("Database path is not valid.")
|
|
103
|
+
|
|
104
|
+
|
|
78
105
|
@huey.task(context=True) # type: ignore
|
|
79
106
|
@job_tracker
|
|
80
107
|
def update(
|
|
@@ -2,9 +2,10 @@ import sqlite3
|
|
|
2
2
|
from contextlib import contextmanager
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
from sqlite3 import Connection
|
|
5
|
-
from typing import
|
|
5
|
+
from typing import Generator, List
|
|
6
6
|
|
|
7
7
|
from bearish.database.schemas import * # type: ignore # noqa: F403
|
|
8
|
+
|
|
8
9
|
from bullish.database.schemas import * # noqa: F403
|
|
9
10
|
|
|
10
11
|
|
|
@@ -64,19 +65,3 @@ def compatible_bearish_database(database_path: Path) -> bool:
|
|
|
64
65
|
|
|
65
66
|
def compatible_bullish_database(database_path: Path) -> bool:
|
|
66
67
|
return _compatible_table(database_path, "bullish.database.schemas")
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
class DataBaseSingleTon:
|
|
70
|
-
_instance = None
|
|
71
|
-
|
|
72
|
-
def __new__(cls, *args: Any, **kwargs: Any) -> Any:
|
|
73
|
-
if cls._instance is None:
|
|
74
|
-
cls._instance = super().__new__(cls)
|
|
75
|
-
return cls._instance
|
|
76
|
-
|
|
77
|
-
def __init__(self, path: Optional[Path] = None) -> None:
|
|
78
|
-
if not hasattr(self, "path"): # Only set once
|
|
79
|
-
self.path = path
|
|
80
|
-
|
|
81
|
-
def valid(self) -> bool:
|
|
82
|
-
return hasattr(self, "path")
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|