bullishpy 0.14.0__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -1,9 +1,19 @@
1
1
  import datetime
2
- from typing import Dict, Any, Optional
2
+ from datetime import timedelta
3
+ from typing import Dict, Any, Optional, List, Union
3
4
 
4
- from bullish.analysis.filter import FilterQuery
5
+ from bullish.analysis.analysis import AnalysisView
6
+ from bullish.analysis.backtest import (
7
+ BacktestQueryDate,
8
+ BacktestQueries,
9
+ BacktestQueryRange,
10
+ BacktestQuerySelection,
11
+ )
12
+ from bullish.analysis.filter import FilterQuery, BOOLEAN_GROUP_MAPPING
5
13
  from pydantic import BaseModel, Field
6
14
 
15
+ from bullish.analysis.indicators import Indicators
16
+ from bullish.database.crud import BullishDb
7
17
 
8
18
  DATE_THRESHOLD = [
9
19
  datetime.date.today() - datetime.timedelta(days=7),
@@ -23,6 +33,74 @@ class NamedFilterQuery(FilterQuery):
23
33
  exclude={"name"},
24
34
  )
25
35
 
36
+ def to_backtesting_query(
37
+ self, backtest_start_date: datetime.date
38
+ ) -> BacktestQueries:
39
+ queries: List[
40
+ Union[BacktestQueryRange, BacktestQueryDate, BacktestQuerySelection]
41
+ ] = []
42
+ in_use_backtests = Indicators().in_use_backtest()
43
+ for in_use in in_use_backtests:
44
+ value = self.to_dict().get(in_use)
45
+ if value and self.model_fields[in_use].annotation == List[datetime.date]:
46
+ delta = value[1] - value[0]
47
+ queries.append(
48
+ BacktestQueryDate(
49
+ name=in_use.upper(),
50
+ start=backtest_start_date - delta,
51
+ end=backtest_start_date,
52
+ table="signalseries",
53
+ )
54
+ )
55
+ for field in self.to_dict():
56
+ if field in BOOLEAN_GROUP_MAPPING:
57
+ value = self.to_dict().get(field)
58
+ if value and self.model_fields[field].annotation == Optional[List[str]]: # type: ignore
59
+ queries.extend(
60
+ [
61
+ BacktestQueryDate(
62
+ name=v.upper(),
63
+ start=backtest_start_date - timedelta(days=252),
64
+ end=backtest_start_date,
65
+ table="signalseries",
66
+ )
67
+ for v in value
68
+ ]
69
+ )
70
+
71
+ if field in AnalysisView.model_fields:
72
+ value = self.to_dict().get(field)
73
+ if (
74
+ value
75
+ and self.model_fields[field].annotation == Optional[List[float]] # type: ignore
76
+ and len(value) == 2
77
+ ):
78
+ queries.append(
79
+ BacktestQueryRange(
80
+ name=field.lower(),
81
+ min=value[0],
82
+ max=value[1],
83
+ table="analysis",
84
+ )
85
+ )
86
+ if value and self.model_fields[field].annotation == Optional[List[str]]: # type: ignore
87
+ queries.append(
88
+ BacktestQuerySelection(
89
+ name=field.lower(),
90
+ selections=value,
91
+ table="analysis",
92
+ )
93
+ )
94
+
95
+ return BacktestQueries(queries=queries)
96
+
97
+ def get_backtesting_symbols(
98
+ self, bullish_db: BullishDb, backtest_start_date: datetime.date
99
+ ) -> List[str]:
100
+ queries = self.to_backtesting_query(backtest_start_date)
101
+
102
+ return bullish_db.read_query(queries.to_query())["symbol"].tolist() # type: ignore
103
+
26
104
 
27
105
  STRONG_FUNDAMENTALS = NamedFilterQuery(
28
106
  name="Strong Fundamentals",
@@ -100,7 +178,15 @@ RSI_CROSSOVER_30_GROWTH_STOCK = NamedFilterQuery(
100
178
  rsi_bullish_crossover_30=DATE_THRESHOLD,
101
179
  market_capitalization=[5e8, 1e12],
102
180
  order_by_desc="market_capitalization",
103
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
181
+ country=[
182
+ "Germany",
183
+ "United states",
184
+ "France",
185
+ "United kingdom",
186
+ "Canada",
187
+ "Japan",
188
+ "Belgium",
189
+ ],
104
190
  )
105
191
  RSI_CROSSOVER_40_GROWTH_STOCK = NamedFilterQuery(
106
192
  name="RSI cross-over 40 growth stock",
@@ -108,7 +194,15 @@ RSI_CROSSOVER_40_GROWTH_STOCK = NamedFilterQuery(
108
194
  rsi_bullish_crossover_40=DATE_THRESHOLD,
109
195
  market_capitalization=[5e8, 1e12],
110
196
  order_by_desc="market_capitalization",
111
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
197
+ country=[
198
+ "Germany",
199
+ "United states",
200
+ "France",
201
+ "United kingdom",
202
+ "Canada",
203
+ "Japan",
204
+ "Belgium",
205
+ ],
112
206
  )
113
207
 
114
208
 
@@ -131,7 +225,15 @@ MOMENTUM_GROWTH_GOOD_FUNDAMENTALS = NamedFilterQuery(
131
225
  ],
132
226
  market_capitalization=[5e8, 1e12],
133
227
  order_by_desc="momentum",
134
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
228
+ country=[
229
+ "Germany",
230
+ "United states",
231
+ "France",
232
+ "United kingdom",
233
+ "Canada",
234
+ "Japan",
235
+ "Belgium",
236
+ ],
135
237
  )
136
238
 
137
239
  MOMENTUM_GROWTH_STRONG_FUNDAMENTALS = NamedFilterQuery(
@@ -159,7 +261,15 @@ MOMENTUM_GROWTH_STRONG_FUNDAMENTALS = NamedFilterQuery(
159
261
  ],
160
262
  market_capitalization=[5e8, 1e12],
161
263
  order_by_desc="momentum",
162
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
264
+ country=[
265
+ "Germany",
266
+ "United states",
267
+ "France",
268
+ "United kingdom",
269
+ "Canada",
270
+ "Japan",
271
+ "Belgium",
272
+ ],
163
273
  )
164
274
  MOMENTUM_GROWTH_RSI_30 = NamedFilterQuery(
165
275
  name="Momentum Growth Screener (RSI 30)",
@@ -178,7 +288,15 @@ MOMENTUM_GROWTH_RSI_30 = NamedFilterQuery(
178
288
  ],
179
289
  market_capitalization=[5e8, 1e12],
180
290
  order_by_desc="momentum",
181
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
291
+ country=[
292
+ "Germany",
293
+ "United states",
294
+ "France",
295
+ "United kingdom",
296
+ "Canada",
297
+ "Japan",
298
+ "Belgium",
299
+ ],
182
300
  )
183
301
  MOMENTUM_GROWTH_RSI_40 = NamedFilterQuery(
184
302
  name="Momentum Growth Screener (RSI 40)",
@@ -197,7 +315,15 @@ MOMENTUM_GROWTH_RSI_40 = NamedFilterQuery(
197
315
  ],
198
316
  market_capitalization=[5e8, 1e12],
199
317
  order_by_desc="momentum",
200
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
318
+ country=[
319
+ "Germany",
320
+ "United states",
321
+ "France",
322
+ "United kingdom",
323
+ "Canada",
324
+ "Japan",
325
+ "Belgium",
326
+ ],
201
327
  )
202
328
 
203
329
  GOLDEN_CROSS_LAST_SEVEN_DAYS = NamedFilterQuery(
@@ -209,7 +335,15 @@ GOLDEN_CROSS_LAST_SEVEN_DAYS = NamedFilterQuery(
209
335
  datetime.date.today(),
210
336
  ],
211
337
  order_by_desc="market_capitalization",
212
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
338
+ country=[
339
+ "Germany",
340
+ "United states",
341
+ "France",
342
+ "United kingdom",
343
+ "Canada",
344
+ "Japan",
345
+ "Belgium",
346
+ ],
213
347
  )
214
348
 
215
349
 
bullish/app/app.py CHANGED
@@ -12,6 +12,7 @@ from bearish.models.price.prices import Prices # type: ignore
12
12
  from bearish.models.query.query import AssetQuery, Symbols # type: ignore
13
13
  from streamlit_file_browser import st_file_browser # type: ignore
14
14
 
15
+ from bullish.analysis.backtest import BacktestResults
15
16
  from bullish.analysis.industry_views import get_industry_comparison_data
16
17
  from bullish.analysis.predefined_filters import PredefinedFilters
17
18
  from bullish.database.crud import BullishDb
@@ -26,7 +27,7 @@ from bullish.analysis.filter import (
26
27
  GeneralFilter,
27
28
  TechnicalAnalysisFilters,
28
29
  )
29
- from bullish.jobs.tasks import update, news, analysis
30
+ from bullish.jobs.tasks import update, news, analysis, backtest_signals
30
31
  from pydantic import BaseModel
31
32
 
32
33
  from bullish.utils.checks import (
@@ -212,6 +213,12 @@ def jobs() -> None:
212
213
  analysis(st.session_state.database_path, job_type="Update analysis")
213
214
  st.success("Data update job has been enqueued.")
214
215
  st.rerun()
216
+ with st.expander("Compute backtest signals"):
217
+ if st.button("Compute backtest signals"):
218
+ backtest_signals(
219
+ st.session_state.database_path, job_type="backtest signals"
220
+ )
221
+ st.rerun()
215
222
 
216
223
 
217
224
  @st.dialog("📥 Load", width="large")
@@ -336,7 +343,7 @@ def main() -> None:
336
343
  if st.session_state.database_path is None:
337
344
  dialog_pick_database()
338
345
  bearish_db_ = bearish_db(st.session_state.database_path)
339
- charts_tab, jobs_tab = st.tabs(["Charts", "Jobs"])
346
+ charts_tab, jobs_tab, backtests = st.tabs(["Charts", "Jobs", "Backtests"])
340
347
  if "data" not in st.session_state:
341
348
  st.session_state.data = load_analysis_data(bearish_db_)
342
349
 
@@ -387,6 +394,12 @@ def main() -> None:
387
394
  use_container_width=True,
388
395
  hide_index=True,
389
396
  )
397
+ with backtests:
398
+ results = bearish_db_.read_many_backtest_results()
399
+ backtest_results = BacktestResults(results=results)
400
+ with st.container():
401
+ figure = backtest_results.figure()
402
+ st.plotly_chart(figure)
390
403
 
391
404
 
392
405
  if __name__ == "__main__":
@@ -0,0 +1,52 @@
1
+ """
2
+
3
+ Revision ID: 12889a2cbd7d
4
+ Revises: 040b15fba458
5
+ Create Date: 2025-07-17 17:50:35.004785
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+ import sqlmodel
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision: str = "12889a2cbd7d"
18
+ down_revision: Union[str, None] = "040b15fba458"
19
+ branch_labels: Union[str, Sequence[str], None] = None
20
+ depends_on: Union[str, Sequence[str], None] = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.create_table(
26
+ "signalseries",
27
+ sa.Column("date", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
28
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
29
+ sa.Column("symbol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
30
+ sa.Column("value", sa.Float(), nullable=True),
31
+ sa.PrimaryKeyConstraint("date", "name", "symbol"),
32
+ )
33
+
34
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
35
+ batch_op.drop_index(batch_op.f("ix_analysis_momentum_time_span"))
36
+ batch_op.drop_column("momentum_time_span")
37
+
38
+ # ### end Alembic commands ###
39
+
40
+
41
+ def downgrade() -> None:
42
+ # ### commands auto generated by Alembic - please adjust! ###
43
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
44
+ batch_op.add_column(sa.Column("momentum_time_span", sa.DATE(), nullable=True))
45
+ batch_op.create_index(
46
+ batch_op.f("ix_analysis_momentum_time_span"),
47
+ ["momentum_time_span"],
48
+ unique=False,
49
+ )
50
+
51
+ op.drop_table("signalseries")
52
+ # ### end Alembic commands ###
@@ -0,0 +1,48 @@
1
+ """
2
+
3
+ Revision ID: 6d252e23f543
4
+ Revises: 12889a2cbd7d
5
+ Create Date: 2025-07-27 16:46:41.885125
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+ import sqlmodel
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision: str = "6d252e23f543"
18
+ down_revision: Union[str, None] = "12889a2cbd7d"
19
+ branch_labels: Union[str, Sequence[str], None] = None
20
+ depends_on: Union[str, Sequence[str], None] = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.create_table(
26
+ "backtestresult",
27
+ sa.Column("end", sa.Date(), nullable=False),
28
+ sa.Column("investment", sa.Float(), nullable=False),
29
+ sa.Column("start", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
30
+ sa.Column("holding_period", sa.Integer(), nullable=False),
31
+ sa.Column("extend_days", sa.Integer(), nullable=False),
32
+ sa.Column("percentage", sa.Integer(), nullable=False),
33
+ sa.Column("iterations", sa.Integer(), nullable=False),
34
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
35
+ sa.Column("data", sa.JSON(), nullable=True),
36
+ sa.PrimaryKeyConstraint(
37
+ "start", "holding_period", "extend_days", "percentage", "iterations", "name"
38
+ ),
39
+ )
40
+
41
+ # ### end Alembic commands ###
42
+
43
+
44
+ def downgrade() -> None:
45
+ # ### commands auto generated by Alembic - please adjust! ###
46
+
47
+ op.drop_table("backtestresult")
48
+ # ### end Alembic commands ###
bullish/database/crud.py CHANGED
@@ -8,14 +8,18 @@ from typing import TYPE_CHECKING, Any, List, Optional
8
8
  import pandas as pd
9
9
  from bearish.database.crud import BearishDb # type: ignore
10
10
  from bearish.models.base import Ticker # type: ignore
11
- from bearish.database.schemas import EarningsDateORM, EquityORM # type: ignore
11
+ from bearish.database.schemas import EarningsDateORM, EquityORM, PriceORM # type: ignore
12
12
  from bearish.types import Sources # type: ignore
13
+ from bearish.models.price.price import Price # type: ignore
14
+ from bearish.models.price.prices import Prices # type: ignore
13
15
  from pydantic import ConfigDict
14
16
  from sqlalchemy import Engine, create_engine, insert, delete, update
15
17
  from sqlmodel import Session, select
16
18
 
17
19
  from bullish.analysis.analysis import Analysis
20
+
18
21
  from bullish.analysis.constants import Industry, IndustryGroup, Sector, Country
22
+ from bullish.analysis.indicators import SignalSeries
19
23
  from bullish.analysis.industry_views import Type, IndustryView
20
24
 
21
25
  from bullish.database.schemas import (
@@ -23,6 +27,8 @@ from bullish.database.schemas import (
23
27
  JobTrackerORM,
24
28
  FilteredResultsORM,
25
29
  IndustryViewORM,
30
+ SignalSeriesORM,
31
+ BacktestResultORM,
26
32
  )
27
33
  from bullish.database.scripts.upgrade import upgrade
28
34
  from bullish.exceptions import DatabaseFileNotFoundError
@@ -31,7 +37,7 @@ from bullish.interface.interface import BullishDbBase
31
37
  from bullish.jobs.models import JobTracker, JobTrackerStatus
32
38
 
33
39
  if TYPE_CHECKING:
34
- pass
40
+ from bullish.analysis.backtest import BacktestResult, BacktestResultQuery
35
41
 
36
42
  logger = logging.getLogger(__name__)
37
43
 
@@ -261,3 +267,63 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
261
267
  )
262
268
  result = session.exec(stmt).all()
263
269
  return [IndustryView.model_validate(r) for r in result]
270
+
271
+ def write_signal_series(self, signal_series: List[SignalSeries]) -> None:
272
+ with Session(self._engine) as session:
273
+ stmt = (
274
+ insert(SignalSeriesORM)
275
+ .prefix_with("OR REPLACE")
276
+ .values([a.model_dump() for a in signal_series])
277
+ )
278
+ session.exec(stmt) # type: ignore
279
+ session.commit()
280
+
281
+ def read_signal_series(
282
+ self, name: str, start_date: date, end_date: date
283
+ ) -> List[str]:
284
+ with Session(self._engine) as session:
285
+ stmt = select(SignalSeriesORM.symbol).where(
286
+ SignalSeriesORM.name == name,
287
+ SignalSeriesORM.date >= start_date, # type: ignore
288
+ SignalSeriesORM.date <= end_date, # type: ignore
289
+ )
290
+ return list(set(session.exec(stmt).all()))
291
+
292
+ def read_symbol_series(
293
+ self, symbol: str, start_date: date, end_date: Optional[date] = None
294
+ ) -> pd.DataFrame:
295
+
296
+ with Session(self._engine) as session:
297
+ query_ = select(PriceORM)
298
+ query_ = query_.where(PriceORM.symbol == symbol)
299
+ if end_date:
300
+ query_ = query_.where(
301
+ PriceORM.date >= start_date, PriceORM.date <= end_date
302
+ )
303
+ else:
304
+ query_ = query_.where(PriceORM.date >= start_date)
305
+ series = session.exec(query_).all()
306
+ prices = [Price.model_validate(serie) for serie in series]
307
+ return Prices(prices=prices).to_dataframe() # type: ignore
308
+
309
+ def write_many_backtest_results(
310
+ self, backtest_results: List["BacktestResult"]
311
+ ) -> None:
312
+ with Session(self._engine) as session:
313
+ stmt = (
314
+ insert(BacktestResultORM)
315
+ .prefix_with("OR REPLACE")
316
+ .values([a.model_dump() for a in backtest_results])
317
+ )
318
+ session.exec(stmt) # type: ignore
319
+ session.commit()
320
+
321
+ def read_many_backtest_results(
322
+ self, query: Optional["BacktestResultQuery"] = None
323
+ ) -> List["BacktestResult"]:
324
+ from bullish.analysis.backtest import BacktestResult
325
+
326
+ with Session(self._engine) as session:
327
+ stmt = select(BacktestResultORM)
328
+ results = session.exec(stmt).all()
329
+ return [BacktestResult.model_validate(r) for r in results]
@@ -3,7 +3,9 @@ from typing import Dict, Any
3
3
  from sqlmodel import Field, SQLModel
4
4
  from sqlalchemy import Column, JSON
5
5
  from bullish.analysis.analysis import Analysis
6
+ from bullish.analysis.backtest import BacktestResult
6
7
  from bullish.analysis.filter import FilteredResults
8
+ from bullish.analysis.indicators import SignalSeries
7
9
  from bullish.analysis.industry_views import IndustryView
8
10
 
9
11
  from bullish.jobs.models import JobTracker
@@ -48,6 +50,15 @@ class FilteredResultsORM(SQLModel, FilteredResults, table=True):
48
50
  filter_query: Dict[str, Any] = Field(sa_column=Column(JSON)) # type: ignore
49
51
 
50
52
 
53
+ class SignalSeriesORM(SQLModel, SignalSeries, table=True):
54
+ __tablename__ = "signalseries"
55
+ __table_args__ = {"extend_existing": True} # noqa:RUF012
56
+ date: str = Field(primary_key=True) # type: ignore
57
+ name: str = Field(primary_key=True)
58
+ symbol: str = Field(primary_key=True)
59
+ value: float | None = Field(default=None, nullable=True) # type: ignore
60
+
61
+
51
62
  class IndustryViewORM(SQLModel, IndustryView, table=True):
52
63
  __tablename__ = "industryview"
53
64
  __table_args__ = {"extend_existing": True} # noqa:RUF012
@@ -61,3 +72,15 @@ class IndustryViewORM(SQLModel, IndustryView, table=True):
61
72
  industry_group: str | None = Field(default=None, nullable=True) # type: ignore
62
73
  sector: str | None = Field(default=None, nullable=True) # type: ignore
63
74
  type: str = Field(primary_key=True) # type: ignore
75
+
76
+
77
+ class BacktestResultORM(SQLModel, BacktestResult, table=True):
78
+ __tablename__ = "backtestresult"
79
+ __table_args__ = {"extend_existing": True} # noqa:RUF012
80
+ name: str = Field(primary_key=True)
81
+ start: str = Field(primary_key=True) # type: ignore
82
+ holding_period: int = Field(primary_key=True)
83
+ extend_days: int = Field(primary_key=True)
84
+ percentage: int = Field(primary_key=True)
85
+ iterations: int = Field(primary_key=True)
86
+ data: Dict[str, Any] = Field(sa_column=Column(JSON))
@@ -10,8 +10,10 @@ from bearish.types import Sources # type: ignore
10
10
 
11
11
 
12
12
  from bullish.analysis.analysis import Analysis, AnalysisView
13
+ from bullish.analysis.backtest import BacktestResult, BacktestResultQuery
13
14
  from bullish.analysis.constants import Industry, Sector, IndustryGroup, Country
14
15
  from bullish.analysis.filter import FilterQuery, FilteredResults
16
+ from bullish.analysis.indicators import SignalSeries
15
17
  from bullish.analysis.industry_views import Type, IndustryView
16
18
  from bullish.jobs.models import JobTracker, JobTrackerStatus, add_icons
17
19
 
@@ -125,3 +127,25 @@ class BullishDbBase(BearishDbBase): # type: ignore
125
127
  def read_returns(
126
128
  self, type: Type, industry: Industry, country: Country
127
129
  ) -> List[IndustryView]: ...
130
+
131
+ @abc.abstractmethod
132
+ def write_signal_series(self, signal_series: List[SignalSeries]) -> None: ...
133
+
134
+ @abc.abstractmethod
135
+ def read_signal_series(
136
+ self, name: str, start_date: date, end_date: date
137
+ ) -> List[str]: ...
138
+
139
+ @abc.abstractmethod
140
+ def read_symbol_series(
141
+ self, symbol: str, start_date: date, end_date: Optional[date] = None
142
+ ) -> pd.DataFrame: ...
143
+ @abc.abstractmethod
144
+ def write_many_backtest_results(
145
+ self, backtest_results: List[BacktestResult]
146
+ ) -> None: ...
147
+
148
+ @abc.abstractmethod
149
+ def read_many_backtest_results(
150
+ self, query: Optional[BacktestResultQuery] = None
151
+ ) -> List[BacktestResult]: ...
bullish/jobs/models.py CHANGED
@@ -4,7 +4,7 @@ from typing import Literal, get_args
4
4
  import pandas as pd
5
5
  from pydantic import BaseModel, Field
6
6
 
7
- JobType = Literal["Update data", "Update analysis", "Fetching news"]
7
+ JobType = Literal["Update data", "Update analysis", "Fetching news", "backtest signals"]
8
8
  JobStatus = Literal["Completed", "Failed", "Running", "Started"]
9
9
  StatusIcon = ["✅ Completed", "❌ Failed", "🔄 Running", "🚀 Started"]
10
10
 
bullish/jobs/tasks.py CHANGED
@@ -11,7 +11,9 @@ from pathlib import Path
11
11
  from huey.api import Task # type: ignore
12
12
 
13
13
  from .models import JobTrackerStatus, JobTracker, JobType
14
- from ..analysis.analysis import run_analysis
14
+ from ..analysis.analysis import run_analysis, run_signal_series_analysis
15
+ from ..analysis.backtest import run_many_tests, BackTestConfig
16
+ from ..analysis.predefined_filters import predefined_filters
15
17
  from ..database.crud import BullishDb
16
18
  from bullish.analysis.filter import FilterUpdate
17
19
 
@@ -54,11 +56,13 @@ def job_tracker(func: Callable[..., Any]) -> Callable[..., Any]:
54
56
  def update(
55
57
  database_path: Path,
56
58
  job_type: JobType,
57
- symbols: List[str],
59
+ symbols: Optional[List[str]],
58
60
  update_query: FilterUpdate,
59
61
  task: Optional[Task] = None,
60
62
  ) -> None:
61
- logger.debug(f"Running update task for {len(symbols)} tickers.")
63
+ logger.debug(
64
+ f"Running update task for {len(symbols) if symbols else 'ALL'} tickers."
65
+ )
62
66
  if not update_query.update_analysis_only:
63
67
  bearish = Bearish(path=database_path, auto_migration=False)
64
68
  bearish.update_prices(
@@ -83,6 +87,18 @@ def analysis(
83
87
  run_analysis(bullish_db)
84
88
 
85
89
 
90
+ @huey.task(context=True) # type: ignore
91
+ @job_tracker
92
+ def backtest_signals(
93
+ database_path: Path,
94
+ job_type: JobType,
95
+ task: Optional[Task] = None,
96
+ ) -> None:
97
+ bullish_db = BullishDb(database_path=database_path)
98
+ run_signal_series_analysis(bullish_db)
99
+ run_many_tests(bullish_db, predefined_filters(), BackTestConfig())
100
+
101
+
86
102
  @huey.task(context=True) # type: ignore
87
103
  @job_tracker
88
104
  def news(
@@ -1,23 +1,24 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: bullishpy
3
- Version: 0.14.0
3
+ Version: 0.16.0
4
4
  Summary:
5
5
  Author: aan
6
6
  Author-email: andoludovic.andriamamonjy@gmail.com
7
7
  Requires-Python: >=3.12,<3.13
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: Programming Language :: Python :: 3.12
10
- Requires-Dist: bearishpy (>=0.22.0,<0.23.0)
10
+ Requires-Dist: bearishpy (>=0.26.0,<0.27.0)
11
11
  Requires-Dist: click (>=7.0,<=8.1)
12
12
  Requires-Dist: huey (>=2.5.3,<3.0.0)
13
13
  Requires-Dist: joblib (>=1.5.1,<2.0.0)
14
14
  Requires-Dist: pandas-ta (>=0.3.14b0,<0.4.0)
15
- Requires-Dist: plotly (>=6.1.2,<7.0.0)
15
+ Requires-Dist: plotly (>=4.12.0,<6.0.0)
16
16
  Requires-Dist: streamlit (>=1.45.1,<2.0.0)
17
17
  Requires-Dist: streamlit-file-browser (>=3.2.22,<4.0.0)
18
18
  Requires-Dist: streamlit-pydantic (>=v0.6.1-rc.3,<0.7.0)
19
19
  Requires-Dist: ta-lib (>=0.6.4,<0.7.0)
20
20
  Requires-Dist: tickermood (>=0.4.0,<0.5.0)
21
+ Requires-Dist: vectorbt (>=0.28.0,<0.29.0)
21
22
  Description-Content-Type: text/markdown
22
23
 
23
24
  ## Bullish