bullishpy 0.15.0__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -28,7 +28,7 @@ from bearish.models.financials.balance_sheet import ( # type: ignore
28
28
  BalanceSheet,
29
29
  QuarterlyBalanceSheet,
30
30
  )
31
- from bearish.models.financials.base import Financials # type: ignore
31
+ from bearish.models.financials.base import Financials, FinancialsWithDate # type: ignore
32
32
  from bearish.models.financials.cash_flow import ( # type: ignore
33
33
  CashFlow,
34
34
  QuarterlyCashFlow,
@@ -404,6 +404,24 @@ class FundamentalAnalysis(YearlyFundamentalAnalysis, QuarterlyFundamentalAnalysi
404
404
  yearly_analysis.model_dump() | quarterly_analysis.model_dump()
405
405
  )
406
406
 
407
+ @classmethod
408
+ def compute_series(
409
+ cls, financials: FinancialsWithDate, ticker: Ticker
410
+ ) -> List[SignalSeries]:
411
+ fundamendal_analysis = FundamentalAnalysis.from_financials(financials, ticker)
412
+ fundamental_analysis_ = fundamendal_analysis.model_dump(
413
+ exclude_none=True, exclude_unset=True, exclude_defaults=True
414
+ )
415
+ fundamental_analysis_ = {
416
+ k: v for k, v in fundamental_analysis_.items() if v is True
417
+ }
418
+ return [
419
+ SignalSeries(
420
+ name=k.upper(), symbol=ticker.symbol, value=v, date=financials.date
421
+ )
422
+ for k, v in fundamental_analysis_.items()
423
+ ]
424
+
407
425
 
408
426
  class AnalysisView(BaseModel):
409
427
  sector: Annotated[
@@ -488,6 +506,16 @@ class Analysis(AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis)
488
506
  )
489
507
 
490
508
 
509
+ def compute_financials_series(
510
+ financials_: Financials, ticker: Ticker
511
+ ) -> List[SignalSeries]:
512
+ financials_with_dates = FinancialsWithDate.from_financials(financials_)
513
+ series = []
514
+ for f in financials_with_dates:
515
+ series.extend(FundamentalAnalysis.compute_series(f, ticker))
516
+ return series
517
+
518
+
491
519
  def compute_analysis(database_path: Path, ticker: Ticker) -> Analysis:
492
520
  from bullish.database.crud import BullishDb
493
521
 
@@ -502,7 +530,9 @@ def compute_signal_series(database_path: Path, ticker: Ticker) -> List[SignalSer
502
530
  indicators = Indicators()
503
531
  prices = Prices.from_ticker(bullish_db, ticker)
504
532
  signal_series = indicators.compute_series(prices.to_dataframe(), ticker.symbol)
505
- return signal_series
533
+ financials = Financials.from_ticker(bullish_db, ticker)
534
+ financial_series = compute_financials_series(financials, ticker)
535
+ return signal_series + financial_series
506
536
 
507
537
 
508
538
  def run_signal_series_analysis(bullish_db: "BullishDb") -> None:
@@ -511,17 +541,16 @@ def run_signal_series_analysis(bullish_db: "BullishDb") -> None:
511
541
  tickers = list(price_trackers.intersection(finance_trackers))
512
542
  parallel = Parallel(n_jobs=-1)
513
543
 
514
- for batch_ticker in batched(tickers, 100):
515
- start = time.perf_counter()
544
+ for batch_ticker in batched(tickers, 1):
516
545
  many_signal_series = parallel(
517
546
  delayed(compute_signal_series)(bullish_db.database_path, ticker)
518
547
  for ticker in batch_ticker
519
548
  )
520
- bullish_db.write_signal_series(list(chain.from_iterable(many_signal_series)))
521
- elapsed_time = time.perf_counter() - start
522
- print(
523
- f"Computed signal series for {len(batch_ticker)} tickers in {elapsed_time:.2f} seconds."
524
- )
549
+ series = list(chain.from_iterable(many_signal_series))
550
+ try:
551
+ bullish_db.write_signal_series(series)
552
+ except Exception as e:
553
+ logger.error(f"Failed to compute signal series for {batch_ticker}: {e}")
525
554
 
526
555
 
527
556
  def run_analysis(bullish_db: "BullishDb") -> None:
@@ -0,0 +1,422 @@
1
+ import json
2
+ import logging
3
+ import random
4
+ from datetime import date, timedelta
5
+ from typing import TYPE_CHECKING, Optional, Union, List, Dict, Any
6
+
7
+ import numpy as np
8
+ import pandas as pd
9
+ from pydantic import BaseModel, Field, model_validator
10
+
11
+
12
+ import plotly.graph_objects as go
13
+
14
+ if TYPE_CHECKING:
15
+ from bullish.analysis.predefined_filters import NamedFilterQuery
16
+ from bullish.database.crud import BullishDb
17
+
18
+ logger = logging.getLogger(__name__)
19
+ COLOR = {
20
+ "mean": "#1f77b4", # A refined blue (Plotly default)
21
+ "upper": "#d62728", # Strong red
22
+ "lower": "#2ca02c", # Rich green
23
+ "median": "#ff7f0e", # Bright orange
24
+ }
25
+
26
+
27
+ class BacktestQueryBase(BaseModel):
28
+ name: str
29
+ table: str
30
+
31
+
32
+ class BacktestQueryDate(BacktestQueryBase):
33
+
34
+ start: date
35
+ end: date
36
+
37
+
38
+ class BacktestQueryRange(BacktestQueryBase):
39
+
40
+ min: float
41
+ max: float
42
+
43
+
44
+ class BacktestQuerySelection(BacktestQueryBase):
45
+
46
+ selections: List[str]
47
+
48
+ def to_selections(self) -> str:
49
+ return ", ".join([f"'{s}'" for s in self.selections])
50
+
51
+
52
+ class BacktestQueries(BaseModel):
53
+ queries: list[Union[BacktestQueryDate, BacktestQueryRange, BacktestQuerySelection]]
54
+
55
+ def to_query(self) -> str:
56
+ query_parts = []
57
+ for query in self.queries:
58
+ if isinstance(query, (BacktestQueryDate)):
59
+ query_parts.append(
60
+ f"SELECT symbol FROM {query.table} WHERE name='{query.name}' " # noqa: S608
61
+ f"AND date >='{query.start}' AND date <='{query.end}'"
62
+ )
63
+ if isinstance(query, (BacktestQueryRange)):
64
+ query_parts.append(
65
+ f"SELECT symbol FROM {query.table} WHERE " # noqa: S608
66
+ f"{query.name} >= {query.min} AND {query.name} <= {query.max}"
67
+ )
68
+ if isinstance(query, (BacktestQuerySelection)):
69
+ query_parts.append(
70
+ f"SELECT symbol FROM {query.table} WHERE " # noqa: S608
71
+ f"{query.name} IN ({query.to_selections()})"
72
+ )
73
+
74
+ if len(query_parts) == 1:
75
+ return query_parts[0]
76
+ else:
77
+ return " INTERSECT ".join(query_parts)
78
+
79
+
80
+ class ReturnPercentage(BaseModel):
81
+ return_percentage: float = Field(
82
+ default=12, description="Return percentage of the backtest"
83
+ )
84
+
85
+
86
+ class BaseBacktestResult(BaseModel):
87
+ start: date = Field(default=date.today() - timedelta(days=252))
88
+ end: date = Field(default=date.today())
89
+ investment: float = Field(default=1000)
90
+ holding_period: int = Field(default=30 * 3)
91
+ extend_days: int = Field(
92
+ default=5,
93
+ description="Extend the backtest by this many days if no symbols are found",
94
+ )
95
+ percentage: int = Field(default=12, description="Return percentage of the backtest")
96
+ iterations: int = Field(default=200, description="Number of iterations to run")
97
+
98
+
99
+ class BacktestResultQuery(BaseBacktestResult):
100
+ name: str
101
+
102
+
103
+ class BacktestResult(BacktestResultQuery):
104
+ data: Dict[str, Any]
105
+
106
+ def to_dataframe(self) -> pd.DataFrame:
107
+ return pd.read_json(json.dumps(self.data)).sort_index()
108
+
109
+
110
+ class BacktestResults(BaseModel):
111
+ results: List[BacktestResult]
112
+
113
+ def figure(self, type: str = "mean") -> go.Figure:
114
+ fig = go.Figure()
115
+ for result in self.results:
116
+ data = result.to_dataframe()[type]
117
+ fig.add_trace(
118
+ go.Scatter(
119
+ x=data.index,
120
+ y=data,
121
+ mode="lines",
122
+ name=f"{result.name} ({type})",
123
+ line={"width": 1},
124
+ hovertemplate=(
125
+ "Date: %{x}<br>"
126
+ + "Price: %{y:.2f}<br>"
127
+ + f"Percentage: {result.percentage}<br>"
128
+ + f"Iterations: {result.iterations}<br>"
129
+ + f"Investment: {result.investment}<extra></extra>"
130
+ ),
131
+ )
132
+ )
133
+ fig.update_layout(
134
+ height=800,
135
+ showlegend=True,
136
+ margin={"t": 60, "b": 40},
137
+ )
138
+
139
+ return fig
140
+
141
+
142
+ class BackTestConfig(BaseBacktestResult):
143
+ exit_strategy: ReturnPercentage = Field(default=ReturnPercentage)
144
+
145
+ def to_base_backtest_result(self) -> BaseBacktestResult:
146
+ return BaseBacktestResult(
147
+ start=self.start,
148
+ end=self.end,
149
+ investment=self.investment,
150
+ holding_period=self.holding_period,
151
+ extend_days=self.extend_days,
152
+ percentage=self.percentage,
153
+ iterations=self.iterations,
154
+ )
155
+
156
+
157
+ class Equity(BaseModel):
158
+ symbol: str
159
+ start: date
160
+ end: date
161
+ buy: float
162
+ sell: float
163
+ investment_in: float
164
+ investment_out: Optional[float] = None
165
+
166
+ def profit(self) -> float:
167
+ return (self.sell - self.buy) * (self.investment_in / self.buy)
168
+
169
+ def current_value(self) -> float:
170
+ return self.investment_in + self.profit()
171
+
172
+ def set_investment_out(self) -> None:
173
+ self.investment_out = self.current_value()
174
+
175
+
176
+ class BackTest(BaseModel):
177
+ equities: list[Equity] = Field(
178
+ default_factory=list, description="List of equities bought during the backtest"
179
+ )
180
+ end: date = Field(default=date.today(), description="End date of the backtest")
181
+
182
+ def valid(self) -> bool:
183
+ return bool(self.equities)
184
+
185
+ def total_profit(self) -> float:
186
+ return sum(equity.profit() for equity in self.equities)
187
+
188
+ def symbols(self) -> list[str]:
189
+ return [equity.symbol for equity in self.equities]
190
+
191
+ def show(self) -> None:
192
+ for eq in self.equities:
193
+ print(
194
+ f"\n{eq.symbol} ({eq.type}): {eq.start}:{eq.investment_in} ({eq.buy}) - "
195
+ f"{eq.end}:{eq.investment_out} ({eq.sell})"
196
+ )
197
+
198
+ def to_dataframe(self) -> pd.DataFrame:
199
+ prices = [
200
+ self.equities[0].investment_in,
201
+ *[e.investment_out for e in self.equities],
202
+ ]
203
+ symbols = [self.equities[0].symbol, *[e.symbol for e in self.equities]]
204
+ index = [self.equities[0].start, *[e.end for e in self.equities]]
205
+ buy = [self.equities[0].buy, *[e.buy for e in self.equities]]
206
+ sell = [self.equities[0].sell, *[e.sell for e in self.equities]]
207
+ data = pd.DataFrame(
208
+ np.array([prices, symbols, buy, sell]).T,
209
+ index=index,
210
+ columns=["prices", "symbols", "buy", "sell"],
211
+ )
212
+ data = data[~data.index.duplicated(keep="first")]
213
+ return data
214
+
215
+ def __hash__(self) -> int:
216
+ return hash(tuple(sorted(equity.symbol for equity in self.equities)))
217
+
218
+
219
+ class BackTests(BaseModel):
220
+ tests: list[BackTest] = Field(default_factory=list, description="List of backtests")
221
+ config: BackTestConfig
222
+ name: str
223
+
224
+ @model_validator(mode="after")
225
+ def _validate(self) -> "BackTests":
226
+ self.tests = list(set(self.tests)) # Remove duplicates
227
+ return self
228
+
229
+ def to_dataframe(self) -> pd.DataFrame:
230
+
231
+ data = (
232
+ pd.concat([t.to_dataframe() for t in self.tests if t.valid()], axis=1)
233
+ .sort_index()
234
+ .fillna(method="ffill")
235
+ )
236
+ data = data[~data.index.duplicated(keep="first")]
237
+ return data
238
+
239
+ def to_error(self) -> pd.DataFrame:
240
+ data_ = self.to_dataframe()
241
+ mean = data_.prices.astype(float).mean(axis=1).rename("mean")
242
+ std = data_.prices.astype(float).std(axis=1)
243
+ median = data_.prices.astype(float).median(axis=1).rename("median")
244
+ upper = (mean + std).rename("upper")
245
+ lower = (mean - std).rename("lower")
246
+ return pd.concat([mean, upper, lower, median], axis=1).sort_index()
247
+
248
+ def to_backtest_result(self) -> BacktestResult:
249
+
250
+ return BacktestResult.model_validate(
251
+ self.config.to_base_backtest_result().model_dump()
252
+ | {"data": json.loads(self.to_error().to_json()), "name": self.name}
253
+ )
254
+
255
+ def to_figure(self) -> go.Figure:
256
+
257
+ data_ = self.to_dataframe()
258
+ self.to_error()
259
+ column_chunks = [data_.iloc[:, i : i + 4] for i in range(0, data_.shape[1], 4)]
260
+ fig = go.Figure()
261
+ for data in column_chunks:
262
+ fig.add_trace(
263
+ go.Scatter(
264
+ x=data.index,
265
+ y=data.prices.astype(float),
266
+ mode="lines",
267
+ showlegend=False,
268
+ customdata=data[
269
+ ["symbols", "sell", "buy"]
270
+ ], # Include multiple overlay columns
271
+ line={"color": "grey", "width": 0.5}, # normal grey
272
+ opacity=0.5,
273
+ hovertemplate=(
274
+ "Date: %{x}<br>"
275
+ + "Price: %{y:.2f}<br>"
276
+ + "Symbols: %{customdata[0]}<br>"
277
+ + "Sell: %{customdata[1]}<br>"
278
+ + "Buy: %{customdata[2]}<extra></extra>"
279
+ ),
280
+ )
281
+ )
282
+ for name, column in self.to_error().items():
283
+ fig.add_trace(
284
+ go.Scatter(
285
+ x=column.index,
286
+ y=column,
287
+ mode="lines",
288
+ line={"color": COLOR[name], "width": 1},
289
+ showlegend=True,
290
+ name=name,
291
+ )
292
+ )
293
+ fig.update_layout(
294
+ title="Predefined filter performance",
295
+ xaxis_title="Date",
296
+ yaxis_title="Prices [Currency]",
297
+ )
298
+ fig.show()
299
+ return fig
300
+
301
+
302
+ def run_backtest( # noqa: C901, PLR0915
303
+ bullish_db: "BullishDb", named_filter: "NamedFilterQuery", config: BackTestConfig
304
+ ) -> BackTest:
305
+ equities = []
306
+ start_date = config.start
307
+ presence_delta = timedelta(days=config.holding_period)
308
+ investment = config.investment
309
+ exclude_symbols = []
310
+ while True:
311
+ symbols = []
312
+ while not symbols:
313
+ symbols = named_filter.get_backtesting_symbols(bullish_db, start_date)
314
+ symbols = [b for b in symbols if b not in exclude_symbols]
315
+ if symbols:
316
+ break
317
+ start_date = start_date + timedelta(days=config.extend_days)
318
+ if start_date > config.end:
319
+ logger.debug("No symbols found for the given date range.")
320
+ break
321
+ if symbols:
322
+ symbol = random.choice(symbols) # noqa: S311
323
+ logger.debug(f"Found symbol: {symbol}, for date: {start_date}")
324
+ enter_position = start_date
325
+ end_position = None
326
+ counter = 0
327
+ buy_price = None
328
+ while True:
329
+
330
+ data = bullish_db.read_symbol_series(
331
+ symbol,
332
+ start_date=enter_position + counter * presence_delta,
333
+ end_date=enter_position + (counter + 1) * presence_delta,
334
+ )
335
+ if data.empty:
336
+ logger.debug(f"No data found for symbol: {symbol}")
337
+ exclude_symbols.append(symbol)
338
+ end_position = start_date
339
+ break
340
+ data.index = data.index.tz_localize(None)
341
+ if counter == 0:
342
+ enter_position_timestamp = data.close.first_valid_index()
343
+ enter_position = enter_position_timestamp.date()
344
+ buy_price = data.close.loc[enter_position_timestamp]
345
+
346
+ mask = data.close >= buy_price * (
347
+ 1 + config.percentage / (100 * (counter + 1))
348
+ )
349
+ mask_ = mask[mask == True] # noqa: E712
350
+
351
+ if mask_.empty:
352
+ if enter_position + (counter + 1) * presence_delta > config.end:
353
+ end_position = data.close.index[-1].date()
354
+ sell_price = data.close.iloc[-1]
355
+ equity = Equity(
356
+ symbol=symbol,
357
+ start=enter_position,
358
+ end=end_position,
359
+ buy=buy_price,
360
+ sell=sell_price,
361
+ investment_in=investment,
362
+ )
363
+ equity.set_investment_out()
364
+ equities.append(equity)
365
+ investment = equity.current_value()
366
+ end_position = config.end
367
+ break
368
+ counter += 1
369
+ continue
370
+ else:
371
+ end_position_timestamp = data[mask].first_valid_index()
372
+ end_position = end_position_timestamp.date()
373
+ equity = Equity(
374
+ symbol=symbol,
375
+ start=enter_position,
376
+ end=end_position,
377
+ buy=buy_price,
378
+ sell=data[mask].close.loc[end_position_timestamp],
379
+ investment_in=investment,
380
+ )
381
+ equity.set_investment_out()
382
+ equities.append(equity)
383
+ investment = equity.current_value()
384
+ break
385
+
386
+ start_date = end_position
387
+ if start_date >= config.end:
388
+ break
389
+ back_test = BackTest(equities=equities)
390
+ return back_test
391
+
392
+
393
+ def run_tests(
394
+ bullish_db: "BullishDb", named_filter: "NamedFilterQuery", config: BackTestConfig
395
+ ) -> BackTests:
396
+ return BackTests(
397
+ config=config,
398
+ name=named_filter.name,
399
+ tests=[
400
+ run_backtest(bullish_db, named_filter, config)
401
+ for _ in range(config.iterations)
402
+ ],
403
+ )
404
+
405
+
406
+ def run_many_tests(
407
+ bullish_db: "BullishDb",
408
+ named_filters: List["NamedFilterQuery"],
409
+ config: BackTestConfig,
410
+ ) -> None:
411
+ back_tests = []
412
+ for named_filter in named_filters:
413
+ try:
414
+ back_tests.append(
415
+ run_tests(bullish_db, named_filter, config).to_backtest_result()
416
+ )
417
+ except Exception as e: # noqa: PERF203
418
+ logger.error(e)
419
+ continue
420
+
421
+ if back_tests:
422
+ bullish_db.write_many_backtest_results(back_tests)
@@ -82,12 +82,15 @@ PROPERTIES_GROUP = list(
82
82
  {*INCOME_GROUP, *CASH_FLOW_GROUP, *EPS_GROUP}
83
83
  )
84
84
  )
85
-
86
- GROUP_MAPPING: Dict[str, List[str]] = {
85
+ BOOLEAN_GROUP_MAPPING: Dict[str, List[str]] = {
87
86
  "income": INCOME_GROUP,
88
87
  "cash_flow": CASH_FLOW_GROUP,
89
88
  "eps": EPS_GROUP,
90
89
  "properties": PROPERTIES_GROUP,
90
+ }
91
+ GROUP_MAPPING: Dict[str, List[str]] = {
92
+ **BOOLEAN_GROUP_MAPPING,
93
+ "properties": PROPERTIES_GROUP,
91
94
  "country": list(get_args(Country)),
92
95
  "industry": list(get_args(Industry)),
93
96
  "industry_group": list(get_args(IndustryGroup)),
@@ -322,7 +322,9 @@ class IndicatorFunction(BaseModel):
322
322
  except Exception as e:
323
323
  logger.error(f"Fail to compute function {function.__name__}: {e}")
324
324
  if data_ is None:
325
- raise ValueError("No data returned from indicator functions.")
325
+ raise ValueError(
326
+ f"No data returned from indicator functions with expected columns {self.expected_columns}."
327
+ )
326
328
  if not set(self.expected_columns).issubset(set(data_.columns)):
327
329
  raise ValueError(
328
330
  f"Expected columns {self.expected_columns} not found in data columns {data_.columns.tolist()}"
@@ -58,6 +58,7 @@ class Signal(BaseModel):
58
58
  description: str
59
59
  date: Optional[date] = None
60
60
  value: Optional[float] = None
61
+ in_use_backtest: bool = False
61
62
 
62
63
  def is_date(self) -> bool:
63
64
  if self.type == Optional[date]:
@@ -113,28 +114,40 @@ class Indicator(BaseModel):
113
114
 
114
115
  def compute_series(self, data: pd.DataFrame, symbol: str) -> pd.DataFrame:
115
116
  series = []
116
- results = self.function(data)
117
+ try:
118
+ results = self.function(data)
119
+ except Exception as e:
120
+ logger.error(
121
+ f"Failed to compute indicator {self.name} for symbol {symbol}: {e}"
122
+ )
123
+ return pd.DataFrame()
117
124
  if not set(self.expected_columns).issubset(results.columns):
118
125
  raise ValueError(
119
126
  f"Expected columns {self.expected_columns}, but got {results.columns.tolist()}"
120
127
  )
121
128
  for signal in self.signals:
129
+ if not signal.in_use_backtest:
130
+ continue
122
131
  try:
123
132
  series_ = signal.compute_series(results)
124
133
  if signal.type == Optional[date]:
125
134
  series__ = pd.DataFrame(series_[series_ == 1].rename("value"))
126
135
  else:
127
- series__ = pd.DataFrame(series_.rename("value"))
136
+ series__ = pd.DataFrame(
137
+ series_[series_ != None].rename("value") # noqa: E711
138
+ )
128
139
 
129
140
  series__["name"] = signal.name
130
141
  series__["date"] = series__.index.date # type: ignore
131
142
  series__["symbol"] = symbol
132
143
  series__ = series__.reset_index(drop=True)
133
144
  series.append(series__)
134
- except Exception as e: # noqa: PERF203
145
+ except Exception as e:
135
146
  logger.error(
136
147
  f"Fail to compute signal {signal.name} for indicator {self.name}: {e}"
137
148
  )
149
+ if not series:
150
+ return pd.DataFrame()
138
151
  data = pd.concat(series).reset_index(drop=True)
139
152
  return data
140
153
 
@@ -181,6 +194,7 @@ def indicators_factory() -> List[Indicator]:
181
194
  function=lambda d: cross_simple(
182
195
  d.MACD_12_26_9, d.MACD_12_26_9_SIGNAL
183
196
  ),
197
+ in_use_backtest=True,
184
198
  ),
185
199
  Signal(
186
200
  name="MACD_12_26_9_BEARISH_CROSSOVER",
@@ -221,6 +235,7 @@ def indicators_factory() -> List[Indicator]:
221
235
  type_info="Long",
222
236
  type=Optional[date],
223
237
  function=lambda d: cross_value_series(d.RSI, 30),
238
+ in_use_backtest=True,
224
239
  ),
225
240
  Signal(
226
241
  name="RSI_BULLISH_CROSSOVER_40",
@@ -228,6 +243,7 @@ def indicators_factory() -> List[Indicator]:
228
243
  type_info="Long",
229
244
  type=Optional[date],
230
245
  function=lambda d: cross_value_series(d.RSI, 40),
246
+ in_use_backtest=True,
231
247
  ),
232
248
  Signal(
233
249
  name="RSI_BULLISH_CROSSOVER_45",
@@ -235,6 +251,7 @@ def indicators_factory() -> List[Indicator]:
235
251
  type_info="Long",
236
252
  type=Optional[date],
237
253
  function=lambda d: cross_value_series(d.RSI, 45),
254
+ in_use_backtest=True,
238
255
  ),
239
256
  Signal(
240
257
  name="RSI_BEARISH_CROSSOVER",
@@ -249,6 +266,7 @@ def indicators_factory() -> List[Indicator]:
249
266
  type_info="Oversold",
250
267
  type=Optional[date],
251
268
  function=lambda d: (d.RSI < 30) & (d.RSI > 0),
269
+ in_use_backtest=True,
252
270
  ),
253
271
  Signal(
254
272
  name="RSI_OVERBOUGHT",
@@ -322,6 +340,7 @@ def indicators_factory() -> List[Indicator]:
322
340
  type_info="Oversold",
323
341
  type=Optional[date],
324
342
  function=lambda d: cross_simple(d.SMA_50, d.SMA_200),
343
+ in_use_backtest=True,
325
344
  ),
326
345
  Signal(
327
346
  name="DEATH_CROSS",
@@ -336,6 +355,7 @@ def indicators_factory() -> List[Indicator]:
336
355
  type_info="Overbought",
337
356
  type=Optional[date],
338
357
  function=lambda d: d.SMA_50 > d.SMA_200,
358
+ in_use_backtest=True,
339
359
  processing=ProcessingFunction(date=find_last_true_run_start),
340
360
  ),
341
361
  Signal(
@@ -344,6 +364,7 @@ def indicators_factory() -> List[Indicator]:
344
364
  type_info="Overbought",
345
365
  type=Optional[date],
346
366
  function=lambda d: d.SMA_50 < d.CLOSE,
367
+ in_use_backtest=True,
347
368
  processing=ProcessingFunction(date=find_last_true_run_start),
348
369
  ),
349
370
  ],
@@ -562,6 +583,14 @@ def indicators_factory() -> List[Indicator]:
562
583
  class Indicators(BaseModel):
563
584
  indicators: List[Indicator] = Field(default_factory=indicators_factory)
564
585
 
586
+ def in_use_backtest(self) -> List[str]:
587
+ return [
588
+ signal.name.lower()
589
+ for indicator in self.indicators
590
+ for signal in indicator.signals
591
+ if signal.in_use_backtest
592
+ ]
593
+
565
594
  def _compute(self, data: pd.DataFrame) -> None:
566
595
  for indicator in self.indicators:
567
596
  try:
@@ -1,9 +1,19 @@
1
1
  import datetime
2
- from typing import Dict, Any, Optional
2
+ from datetime import timedelta
3
+ from typing import Dict, Any, Optional, List, Union
3
4
 
4
- from bullish.analysis.filter import FilterQuery
5
+ from bullish.analysis.analysis import AnalysisView
6
+ from bullish.analysis.backtest import (
7
+ BacktestQueryDate,
8
+ BacktestQueries,
9
+ BacktestQueryRange,
10
+ BacktestQuerySelection,
11
+ )
12
+ from bullish.analysis.filter import FilterQuery, BOOLEAN_GROUP_MAPPING
5
13
  from pydantic import BaseModel, Field
6
14
 
15
+ from bullish.analysis.indicators import Indicators
16
+ from bullish.database.crud import BullishDb
7
17
 
8
18
  DATE_THRESHOLD = [
9
19
  datetime.date.today() - datetime.timedelta(days=7),
@@ -23,6 +33,74 @@ class NamedFilterQuery(FilterQuery):
23
33
  exclude={"name"},
24
34
  )
25
35
 
36
+ def to_backtesting_query(
37
+ self, backtest_start_date: datetime.date
38
+ ) -> BacktestQueries:
39
+ queries: List[
40
+ Union[BacktestQueryRange, BacktestQueryDate, BacktestQuerySelection]
41
+ ] = []
42
+ in_use_backtests = Indicators().in_use_backtest()
43
+ for in_use in in_use_backtests:
44
+ value = self.to_dict().get(in_use)
45
+ if value and self.model_fields[in_use].annotation == List[datetime.date]:
46
+ delta = value[1] - value[0]
47
+ queries.append(
48
+ BacktestQueryDate(
49
+ name=in_use.upper(),
50
+ start=backtest_start_date - delta,
51
+ end=backtest_start_date,
52
+ table="signalseries",
53
+ )
54
+ )
55
+ for field in self.to_dict():
56
+ if field in BOOLEAN_GROUP_MAPPING:
57
+ value = self.to_dict().get(field)
58
+ if value and self.model_fields[field].annotation == Optional[List[str]]: # type: ignore
59
+ queries.extend(
60
+ [
61
+ BacktestQueryDate(
62
+ name=v.upper(),
63
+ start=backtest_start_date - timedelta(days=252),
64
+ end=backtest_start_date,
65
+ table="signalseries",
66
+ )
67
+ for v in value
68
+ ]
69
+ )
70
+
71
+ if field in AnalysisView.model_fields:
72
+ value = self.to_dict().get(field)
73
+ if (
74
+ value
75
+ and self.model_fields[field].annotation == Optional[List[float]] # type: ignore
76
+ and len(value) == 2
77
+ ):
78
+ queries.append(
79
+ BacktestQueryRange(
80
+ name=field.lower(),
81
+ min=value[0],
82
+ max=value[1],
83
+ table="analysis",
84
+ )
85
+ )
86
+ if value and self.model_fields[field].annotation == Optional[List[str]]: # type: ignore
87
+ queries.append(
88
+ BacktestQuerySelection(
89
+ name=field.lower(),
90
+ selections=value,
91
+ table="analysis",
92
+ )
93
+ )
94
+
95
+ return BacktestQueries(queries=queries)
96
+
97
+ def get_backtesting_symbols(
98
+ self, bullish_db: BullishDb, backtest_start_date: datetime.date
99
+ ) -> List[str]:
100
+ queries = self.to_backtesting_query(backtest_start_date)
101
+
102
+ return bullish_db.read_query(queries.to_query())["symbol"].tolist() # type: ignore
103
+
26
104
 
27
105
  STRONG_FUNDAMENTALS = NamedFilterQuery(
28
106
  name="Strong Fundamentals",
@@ -100,7 +178,15 @@ RSI_CROSSOVER_30_GROWTH_STOCK = NamedFilterQuery(
100
178
  rsi_bullish_crossover_30=DATE_THRESHOLD,
101
179
  market_capitalization=[5e8, 1e12],
102
180
  order_by_desc="market_capitalization",
103
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
181
+ country=[
182
+ "Germany",
183
+ "United states",
184
+ "France",
185
+ "United kingdom",
186
+ "Canada",
187
+ "Japan",
188
+ "Belgium",
189
+ ],
104
190
  )
105
191
  RSI_CROSSOVER_40_GROWTH_STOCK = NamedFilterQuery(
106
192
  name="RSI cross-over 40 growth stock",
@@ -108,7 +194,15 @@ RSI_CROSSOVER_40_GROWTH_STOCK = NamedFilterQuery(
108
194
  rsi_bullish_crossover_40=DATE_THRESHOLD,
109
195
  market_capitalization=[5e8, 1e12],
110
196
  order_by_desc="market_capitalization",
111
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
197
+ country=[
198
+ "Germany",
199
+ "United states",
200
+ "France",
201
+ "United kingdom",
202
+ "Canada",
203
+ "Japan",
204
+ "Belgium",
205
+ ],
112
206
  )
113
207
 
114
208
 
@@ -131,7 +225,15 @@ MOMENTUM_GROWTH_GOOD_FUNDAMENTALS = NamedFilterQuery(
131
225
  ],
132
226
  market_capitalization=[5e8, 1e12],
133
227
  order_by_desc="momentum",
134
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
228
+ country=[
229
+ "Germany",
230
+ "United states",
231
+ "France",
232
+ "United kingdom",
233
+ "Canada",
234
+ "Japan",
235
+ "Belgium",
236
+ ],
135
237
  )
136
238
 
137
239
  MOMENTUM_GROWTH_STRONG_FUNDAMENTALS = NamedFilterQuery(
@@ -159,7 +261,15 @@ MOMENTUM_GROWTH_STRONG_FUNDAMENTALS = NamedFilterQuery(
159
261
  ],
160
262
  market_capitalization=[5e8, 1e12],
161
263
  order_by_desc="momentum",
162
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
264
+ country=[
265
+ "Germany",
266
+ "United states",
267
+ "France",
268
+ "United kingdom",
269
+ "Canada",
270
+ "Japan",
271
+ "Belgium",
272
+ ],
163
273
  )
164
274
  MOMENTUM_GROWTH_RSI_30 = NamedFilterQuery(
165
275
  name="Momentum Growth Screener (RSI 30)",
@@ -178,7 +288,15 @@ MOMENTUM_GROWTH_RSI_30 = NamedFilterQuery(
178
288
  ],
179
289
  market_capitalization=[5e8, 1e12],
180
290
  order_by_desc="momentum",
181
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
291
+ country=[
292
+ "Germany",
293
+ "United states",
294
+ "France",
295
+ "United kingdom",
296
+ "Canada",
297
+ "Japan",
298
+ "Belgium",
299
+ ],
182
300
  )
183
301
  MOMENTUM_GROWTH_RSI_40 = NamedFilterQuery(
184
302
  name="Momentum Growth Screener (RSI 40)",
@@ -197,7 +315,15 @@ MOMENTUM_GROWTH_RSI_40 = NamedFilterQuery(
197
315
  ],
198
316
  market_capitalization=[5e8, 1e12],
199
317
  order_by_desc="momentum",
200
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
318
+ country=[
319
+ "Germany",
320
+ "United states",
321
+ "France",
322
+ "United kingdom",
323
+ "Canada",
324
+ "Japan",
325
+ "Belgium",
326
+ ],
201
327
  )
202
328
 
203
329
  GOLDEN_CROSS_LAST_SEVEN_DAYS = NamedFilterQuery(
@@ -209,7 +335,15 @@ GOLDEN_CROSS_LAST_SEVEN_DAYS = NamedFilterQuery(
209
335
  datetime.date.today(),
210
336
  ],
211
337
  order_by_desc="market_capitalization",
212
- country=["Germany", "United states", "France", "United kingdom", "Canada", "Japan"],
338
+ country=[
339
+ "Germany",
340
+ "United states",
341
+ "France",
342
+ "United kingdom",
343
+ "Canada",
344
+ "Japan",
345
+ "Belgium",
346
+ ],
213
347
  )
214
348
 
215
349
 
bullish/app/app.py CHANGED
@@ -12,6 +12,7 @@ from bearish.models.price.prices import Prices # type: ignore
12
12
  from bearish.models.query.query import AssetQuery, Symbols # type: ignore
13
13
  from streamlit_file_browser import st_file_browser # type: ignore
14
14
 
15
+ from bullish.analysis.backtest import BacktestResults
15
16
  from bullish.analysis.industry_views import get_industry_comparison_data
16
17
  from bullish.analysis.predefined_filters import PredefinedFilters
17
18
  from bullish.database.crud import BullishDb
@@ -26,7 +27,7 @@ from bullish.analysis.filter import (
26
27
  GeneralFilter,
27
28
  TechnicalAnalysisFilters,
28
29
  )
29
- from bullish.jobs.tasks import update, news, analysis
30
+ from bullish.jobs.tasks import update, news, analysis, backtest_signals
30
31
  from pydantic import BaseModel
31
32
 
32
33
  from bullish.utils.checks import (
@@ -212,6 +213,12 @@ def jobs() -> None:
212
213
  analysis(st.session_state.database_path, job_type="Update analysis")
213
214
  st.success("Data update job has been enqueued.")
214
215
  st.rerun()
216
+ with st.expander("Compute backtest signals"):
217
+ if st.button("Compute backtest signals"):
218
+ backtest_signals(
219
+ st.session_state.database_path, job_type="backtest signals"
220
+ )
221
+ st.rerun()
215
222
 
216
223
 
217
224
  @st.dialog("📥 Load", width="large")
@@ -336,7 +343,7 @@ def main() -> None:
336
343
  if st.session_state.database_path is None:
337
344
  dialog_pick_database()
338
345
  bearish_db_ = bearish_db(st.session_state.database_path)
339
- charts_tab, jobs_tab = st.tabs(["Charts", "Jobs"])
346
+ charts_tab, jobs_tab, backtests = st.tabs(["Charts", "Jobs", "Backtests"])
340
347
  if "data" not in st.session_state:
341
348
  st.session_state.data = load_analysis_data(bearish_db_)
342
349
 
@@ -387,6 +394,12 @@ def main() -> None:
387
394
  use_container_width=True,
388
395
  hide_index=True,
389
396
  )
397
+ with backtests:
398
+ results = bearish_db_.read_many_backtest_results()
399
+ backtest_results = BacktestResults(results=results)
400
+ with st.container():
401
+ figure = backtest_results.figure()
402
+ st.plotly_chart(figure)
390
403
 
391
404
 
392
405
  if __name__ == "__main__":
@@ -1,8 +1,8 @@
1
1
  """
2
2
 
3
- Revision ID: 3e1a14c41916
3
+ Revision ID: 12889a2cbd7d
4
4
  Revises: 040b15fba458
5
- Create Date: 2025-07-17 15:07:44.125783
5
+ Create Date: 2025-07-17 17:50:35.004785
6
6
 
7
7
  """
8
8
 
@@ -14,7 +14,7 @@ from sqlalchemy.dialects import sqlite
14
14
  import sqlmodel
15
15
 
16
16
  # revision identifiers, used by Alembic.
17
- revision: str = "3e1a14c41916"
17
+ revision: str = "12889a2cbd7d"
18
18
  down_revision: Union[str, None] = "040b15fba458"
19
19
  branch_labels: Union[str, Sequence[str], None] = None
20
20
  depends_on: Union[str, Sequence[str], None] = None
@@ -25,11 +25,12 @@ def upgrade() -> None:
25
25
  op.create_table(
26
26
  "signalseries",
27
27
  sa.Column("date", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
28
- sa.Column("value", sa.Float(), nullable=False),
29
28
  sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
30
29
  sa.Column("symbol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
30
+ sa.Column("value", sa.Float(), nullable=True),
31
31
  sa.PrimaryKeyConstraint("date", "name", "symbol"),
32
32
  )
33
+
33
34
  with op.batch_alter_table("analysis", schema=None) as batch_op:
34
35
  batch_op.drop_index(batch_op.f("ix_analysis_momentum_time_span"))
35
36
  batch_op.drop_column("momentum_time_span")
@@ -0,0 +1,48 @@
1
+ """
2
+
3
+ Revision ID: 6d252e23f543
4
+ Revises: 12889a2cbd7d
5
+ Create Date: 2025-07-27 16:46:41.885125
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+ import sqlmodel
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision: str = "6d252e23f543"
18
+ down_revision: Union[str, None] = "12889a2cbd7d"
19
+ branch_labels: Union[str, Sequence[str], None] = None
20
+ depends_on: Union[str, Sequence[str], None] = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.create_table(
26
+ "backtestresult",
27
+ sa.Column("end", sa.Date(), nullable=False),
28
+ sa.Column("investment", sa.Float(), nullable=False),
29
+ sa.Column("start", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
30
+ sa.Column("holding_period", sa.Integer(), nullable=False),
31
+ sa.Column("extend_days", sa.Integer(), nullable=False),
32
+ sa.Column("percentage", sa.Integer(), nullable=False),
33
+ sa.Column("iterations", sa.Integer(), nullable=False),
34
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
35
+ sa.Column("data", sa.JSON(), nullable=True),
36
+ sa.PrimaryKeyConstraint(
37
+ "start", "holding_period", "extend_days", "percentage", "iterations", "name"
38
+ ),
39
+ )
40
+
41
+ # ### end Alembic commands ###
42
+
43
+
44
+ def downgrade() -> None:
45
+ # ### commands auto generated by Alembic - please adjust! ###
46
+
47
+ op.drop_table("backtestresult")
48
+ # ### end Alembic commands ###
bullish/database/crud.py CHANGED
@@ -8,13 +8,16 @@ from typing import TYPE_CHECKING, Any, List, Optional
8
8
  import pandas as pd
9
9
  from bearish.database.crud import BearishDb # type: ignore
10
10
  from bearish.models.base import Ticker # type: ignore
11
- from bearish.database.schemas import EarningsDateORM, EquityORM # type: ignore
11
+ from bearish.database.schemas import EarningsDateORM, EquityORM, PriceORM # type: ignore
12
12
  from bearish.types import Sources # type: ignore
13
+ from bearish.models.price.price import Price # type: ignore
14
+ from bearish.models.price.prices import Prices # type: ignore
13
15
  from pydantic import ConfigDict
14
16
  from sqlalchemy import Engine, create_engine, insert, delete, update
15
17
  from sqlmodel import Session, select
16
18
 
17
19
  from bullish.analysis.analysis import Analysis
20
+
18
21
  from bullish.analysis.constants import Industry, IndustryGroup, Sector, Country
19
22
  from bullish.analysis.indicators import SignalSeries
20
23
  from bullish.analysis.industry_views import Type, IndustryView
@@ -25,6 +28,7 @@ from bullish.database.schemas import (
25
28
  FilteredResultsORM,
26
29
  IndustryViewORM,
27
30
  SignalSeriesORM,
31
+ BacktestResultORM,
28
32
  )
29
33
  from bullish.database.scripts.upgrade import upgrade
30
34
  from bullish.exceptions import DatabaseFileNotFoundError
@@ -33,7 +37,7 @@ from bullish.interface.interface import BullishDbBase
33
37
  from bullish.jobs.models import JobTracker, JobTrackerStatus
34
38
 
35
39
  if TYPE_CHECKING:
36
- pass
40
+ from bullish.analysis.backtest import BacktestResult, BacktestResultQuery
37
41
 
38
42
  logger = logging.getLogger(__name__)
39
43
 
@@ -284,3 +288,42 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
284
288
  SignalSeriesORM.date <= end_date, # type: ignore
285
289
  )
286
290
  return list(set(session.exec(stmt).all()))
291
+
292
+ def read_symbol_series(
293
+ self, symbol: str, start_date: date, end_date: Optional[date] = None
294
+ ) -> pd.DataFrame:
295
+
296
+ with Session(self._engine) as session:
297
+ query_ = select(PriceORM)
298
+ query_ = query_.where(PriceORM.symbol == symbol)
299
+ if end_date:
300
+ query_ = query_.where(
301
+ PriceORM.date >= start_date, PriceORM.date <= end_date
302
+ )
303
+ else:
304
+ query_ = query_.where(PriceORM.date >= start_date)
305
+ series = session.exec(query_).all()
306
+ prices = [Price.model_validate(serie) for serie in series]
307
+ return Prices(prices=prices).to_dataframe() # type: ignore
308
+
309
+ def write_many_backtest_results(
310
+ self, backtest_results: List["BacktestResult"]
311
+ ) -> None:
312
+ with Session(self._engine) as session:
313
+ stmt = (
314
+ insert(BacktestResultORM)
315
+ .prefix_with("OR REPLACE")
316
+ .values([a.model_dump() for a in backtest_results])
317
+ )
318
+ session.exec(stmt) # type: ignore
319
+ session.commit()
320
+
321
+ def read_many_backtest_results(
322
+ self, query: Optional["BacktestResultQuery"] = None
323
+ ) -> List["BacktestResult"]:
324
+ from bullish.analysis.backtest import BacktestResult
325
+
326
+ with Session(self._engine) as session:
327
+ stmt = select(BacktestResultORM)
328
+ results = session.exec(stmt).all()
329
+ return [BacktestResult.model_validate(r) for r in results]
@@ -3,6 +3,7 @@ from typing import Dict, Any
3
3
  from sqlmodel import Field, SQLModel
4
4
  from sqlalchemy import Column, JSON
5
5
  from bullish.analysis.analysis import Analysis
6
+ from bullish.analysis.backtest import BacktestResult
6
7
  from bullish.analysis.filter import FilteredResults
7
8
  from bullish.analysis.indicators import SignalSeries
8
9
  from bullish.analysis.industry_views import IndustryView
@@ -55,6 +56,7 @@ class SignalSeriesORM(SQLModel, SignalSeries, table=True):
55
56
  date: str = Field(primary_key=True) # type: ignore
56
57
  name: str = Field(primary_key=True)
57
58
  symbol: str = Field(primary_key=True)
59
+ value: float | None = Field(default=None, nullable=True) # type: ignore
58
60
 
59
61
 
60
62
  class IndustryViewORM(SQLModel, IndustryView, table=True):
@@ -70,3 +72,15 @@ class IndustryViewORM(SQLModel, IndustryView, table=True):
70
72
  industry_group: str | None = Field(default=None, nullable=True) # type: ignore
71
73
  sector: str | None = Field(default=None, nullable=True) # type: ignore
72
74
  type: str = Field(primary_key=True) # type: ignore
75
+
76
+
77
+ class BacktestResultORM(SQLModel, BacktestResult, table=True):
78
+ __tablename__ = "backtestresult"
79
+ __table_args__ = {"extend_existing": True} # noqa:RUF012
80
+ name: str = Field(primary_key=True)
81
+ start: str = Field(primary_key=True) # type: ignore
82
+ holding_period: int = Field(primary_key=True)
83
+ extend_days: int = Field(primary_key=True)
84
+ percentage: int = Field(primary_key=True)
85
+ iterations: int = Field(primary_key=True)
86
+ data: Dict[str, Any] = Field(sa_column=Column(JSON))
@@ -10,6 +10,7 @@ from bearish.types import Sources # type: ignore
10
10
 
11
11
 
12
12
  from bullish.analysis.analysis import Analysis, AnalysisView
13
+ from bullish.analysis.backtest import BacktestResult, BacktestResultQuery
13
14
  from bullish.analysis.constants import Industry, Sector, IndustryGroup, Country
14
15
  from bullish.analysis.filter import FilterQuery, FilteredResults
15
16
  from bullish.analysis.indicators import SignalSeries
@@ -134,3 +135,17 @@ class BullishDbBase(BearishDbBase): # type: ignore
134
135
  def read_signal_series(
135
136
  self, name: str, start_date: date, end_date: date
136
137
  ) -> List[str]: ...
138
+
139
+ @abc.abstractmethod
140
+ def read_symbol_series(
141
+ self, symbol: str, start_date: date, end_date: Optional[date] = None
142
+ ) -> pd.DataFrame: ...
143
+ @abc.abstractmethod
144
+ def write_many_backtest_results(
145
+ self, backtest_results: List[BacktestResult]
146
+ ) -> None: ...
147
+
148
+ @abc.abstractmethod
149
+ def read_many_backtest_results(
150
+ self, query: Optional[BacktestResultQuery] = None
151
+ ) -> List[BacktestResult]: ...
bullish/jobs/models.py CHANGED
@@ -4,7 +4,7 @@ from typing import Literal, get_args
4
4
  import pandas as pd
5
5
  from pydantic import BaseModel, Field
6
6
 
7
- JobType = Literal["Update data", "Update analysis", "Fetching news"]
7
+ JobType = Literal["Update data", "Update analysis", "Fetching news", "backtest signals"]
8
8
  JobStatus = Literal["Completed", "Failed", "Running", "Started"]
9
9
  StatusIcon = ["✅ Completed", "❌ Failed", "🔄 Running", "🚀 Started"]
10
10
 
bullish/jobs/tasks.py CHANGED
@@ -11,7 +11,9 @@ from pathlib import Path
11
11
  from huey.api import Task # type: ignore
12
12
 
13
13
  from .models import JobTrackerStatus, JobTracker, JobType
14
- from ..analysis.analysis import run_analysis
14
+ from ..analysis.analysis import run_analysis, run_signal_series_analysis
15
+ from ..analysis.backtest import run_many_tests, BackTestConfig
16
+ from ..analysis.predefined_filters import predefined_filters
15
17
  from ..database.crud import BullishDb
16
18
  from bullish.analysis.filter import FilterUpdate
17
19
 
@@ -54,11 +56,13 @@ def job_tracker(func: Callable[..., Any]) -> Callable[..., Any]:
54
56
  def update(
55
57
  database_path: Path,
56
58
  job_type: JobType,
57
- symbols: List[str],
59
+ symbols: Optional[List[str]],
58
60
  update_query: FilterUpdate,
59
61
  task: Optional[Task] = None,
60
62
  ) -> None:
61
- logger.debug(f"Running update task for {len(symbols)} tickers.")
63
+ logger.debug(
64
+ f"Running update task for {len(symbols) if symbols else 'ALL'} tickers."
65
+ )
62
66
  if not update_query.update_analysis_only:
63
67
  bearish = Bearish(path=database_path, auto_migration=False)
64
68
  bearish.update_prices(
@@ -83,6 +87,18 @@ def analysis(
83
87
  run_analysis(bullish_db)
84
88
 
85
89
 
90
+ @huey.task(context=True) # type: ignore
91
+ @job_tracker
92
+ def backtest_signals(
93
+ database_path: Path,
94
+ job_type: JobType,
95
+ task: Optional[Task] = None,
96
+ ) -> None:
97
+ bullish_db = BullishDb(database_path=database_path)
98
+ run_signal_series_analysis(bullish_db)
99
+ run_many_tests(bullish_db, predefined_filters(), BackTestConfig())
100
+
101
+
86
102
  @huey.task(context=True) # type: ignore
87
103
  @job_tracker
88
104
  def news(
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: bullishpy
3
- Version: 0.15.0
3
+ Version: 0.16.0
4
4
  Summary:
5
5
  Author: aan
6
6
  Author-email: andoludovic.andriamamonjy@gmail.com
7
7
  Requires-Python: >=3.12,<3.13
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: Programming Language :: Python :: 3.12
10
- Requires-Dist: bearishpy (>=0.22.0,<0.23.0)
10
+ Requires-Dist: bearishpy (>=0.26.0,<0.27.0)
11
11
  Requires-Dist: click (>=7.0,<=8.1)
12
12
  Requires-Dist: huey (>=2.5.3,<3.0.0)
13
13
  Requires-Dist: joblib (>=1.5.1,<2.0.0)
@@ -1,14 +1,15 @@
1
1
  bullish/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  bullish/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- bullish/analysis/analysis.py,sha256=ag1wAfNDKWmPxJz7sd1mNhuh4CZfEleTp2_a3DIkEL4,20524
3
+ bullish/analysis/analysis.py,sha256=SqlLrbhMzm2dvK89Ip5KCyTOoLS41vNf1WJolevLKfc,21638
4
+ bullish/analysis/backtest.py,sha256=u3zzdP0IzpcxSzoI_wWT-YkmBulwVCFfhusttquTjSQ,14291
4
5
  bullish/analysis/constants.py,sha256=tVDPQEufH8lytMj4DdUdvXt79b7cvWaDwSUOpeqMWts,9851
5
- bullish/analysis/filter.py,sha256=kSG6fXZrnwqE1HvKQW6O3yVNV49qhVleer9M_7BIDpg,8381
6
- bullish/analysis/functions.py,sha256=ebCXxYeKlWhvcRRFMbX8E63bL7OquxyoWsYIy0o0SCA,14277
7
- bullish/analysis/indicators.py,sha256=S3pUsWn4MC_BKPhY1iFQDNNuHcqulez8jOsfWJdht_8,23470
6
+ bullish/analysis/filter.py,sha256=wBVkHhQPfG8wJ0TR3KLo_Bb7cRZFPyHlF4UK2mpG6S0,8495
7
+ bullish/analysis/functions.py,sha256=JSxYCuua_sMGLosN83j0GcY0Ls_gsE4OZLLGInxG9RA,14354
8
+ bullish/analysis/indicators.py,sha256=Dpps-v4bfQ3KF-C8zjMlArw1DJgZo-_EedYwihIiFJ0,24462
8
9
  bullish/analysis/industry_views.py,sha256=1B5V39Fm9rNQEsun1xrwELfOiKlGdTie0ZolS2UBh2w,6247
9
- bullish/analysis/predefined_filters.py,sha256=28e42hGaH7Qb6SPNeH7EK9YIhjERj-qpbY-7xLahvDM,8361
10
+ bullish/analysis/predefined_filters.py,sha256=5G75u4uDLHJR-iEtGZ2s-gkAqNIRUYu5msK8G-LWmD0,12074
10
11
  bullish/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- bullish/app/app.py,sha256=3xSO4x3T7BnD60M-AQM6-xkRVFLCWqGJ6DWqLmKxbzw,13663
12
+ bullish/app/app.py,sha256=rIvz4A0aJu7wGXBSCqO3EBA4QGhV8GPYVOYLOd9WjnY,14253
12
13
  bullish/cli.py,sha256=uYLZmGDAolZKWzduZ58bP-xul1adg0oKfeUQtZMXTvA,1958
13
14
  bullish/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
15
  bullish/database/alembic/README,sha256=heMzebYwlGhnE8_4CWJ4LS74WoEZjBy-S-mIJRxAEKI,39
@@ -19,11 +20,12 @@ bullish/database/alembic/versions/037dbd721317_.py,sha256=U7EA4odH3t9w0-J4FmvBUt
19
20
  bullish/database/alembic/versions/040b15fba458_.py,sha256=scSauB4wZe0sMFHOAMHkx-rNSF06Pn3D52QJ10PvERg,2314
20
21
  bullish/database/alembic/versions/08ac1116e055_.py,sha256=zMEiCbraMEAZItT4ibc3evAH7-7mkXpdgnZy4tPVYeg,27263
21
22
  bullish/database/alembic/versions/11d35a452b40_.py,sha256=j2PaU1RssLQ20OevGmBC7S9E9ocWiXpBue9SOS4AQoY,11521
23
+ bullish/database/alembic/versions/12889a2cbd7d_.py,sha256=LT_-dM-UstatA1xloCAQnvVgFXMyIAR_LpHPbQnWyVo,1668
22
24
  bullish/database/alembic/versions/17e51420e7ad_.py,sha256=xeiVIm1YUZb08opE9rocHZP1__9WQWXsKsXgeFV9cvs,2960
23
- bullish/database/alembic/versions/3e1a14c41916_.py,sha256=TmpfLl4dBw-CqHsnxwnnBc00nlUtHb0Hh8cc3yLBRME,1668
24
25
  bullish/database/alembic/versions/49c83f9eb5ac_.py,sha256=kCBItp7KmqpJ03roy5ikQjhefZia1oKgfZwournQDq8,3890
25
26
  bullish/database/alembic/versions/4b0a2f40b7d3_.py,sha256=G0K7w7pOPYjPZkXTB8LWhxoxuWBPcPwOfnubTBtdeEY,1827
26
27
  bullish/database/alembic/versions/5b10ee7604c1_.py,sha256=YlqaagPasR3RKASv7acME1jPS8p26VoTE2BvpOwdCpY,1463
28
+ bullish/database/alembic/versions/6d252e23f543_.py,sha256=izF-ejdXk733INkAokGqjA2U_M0_c1f_ruihZ-cgP7s,1525
27
29
  bullish/database/alembic/versions/73564b60fe24_.py,sha256=MTlDRDNHj3E9gK7IMeAzv2UxxxYtWiu3gI_9xTLE-wg,1008
28
30
  bullish/database/alembic/versions/b76079e9845f_.py,sha256=W8eeTABjI9tT1dp3hlK7g7tiKqDhmA8AoUX9Sw-ykLI,1165
29
31
  bullish/database/alembic/versions/bf6b86dd5463_.py,sha256=fKB8knCprGmiL6AEyFdhybVmB7QX_W4MPFF9sPzUrSM,1094
@@ -31,8 +33,8 @@ bullish/database/alembic/versions/d663166c531d_.py,sha256=U92l6QXqPniAYrPeu2Bt77
31
33
  bullish/database/alembic/versions/ec25c8fa449f_.py,sha256=8Yts74KEjK4jg20zIo90_0atw-sOBuE3hgCKl-rfS5E,2271
32
34
  bullish/database/alembic/versions/ee5baabb35f8_.py,sha256=nBMEY-_C8AsSXVPyaDdUkwrFFo2gxShzJhmrjejDwtc,1632
33
35
  bullish/database/alembic/versions/fc191121f522_.py,sha256=0sstF6TpAJ09-Mt-Vek9SdSWksvi4C58a5D92rBtuY8,1894
34
- bullish/database/crud.py,sha256=EIXCnhvPAxwldicUG4fwsdiXiq08TjXoZ8wSt27ph0g,10808
35
- bullish/database/schemas.py,sha256=ySTaw77X9rvLg-4PAKaOH6fPe8Bgi8kTtPc8DvsR6F8,2791
36
+ bullish/database/crud.py,sha256=g7gA0NKmAw7SLF-Wk-r5m1BJFOtcxDVC9idPYYKICjk,12565
37
+ bullish/database/schemas.py,sha256=3uRcNKuobqWC3mCfInzo-4KhrZp3DH6yx_0TEbLoHME,3428
36
38
  bullish/database/scripts/create_revision.py,sha256=rggIf-3koPqJNth8FIg89EOfnIM7a9QrvL8X7UJsP0g,628
37
39
  bullish/database/scripts/stamp.py,sha256=PWgVUEBumjNUMjTnGw46qmU3p221LeN-KspnW_gFuu4,839
38
40
  bullish/database/scripts/upgrade.py,sha256=-Gz7aFNPEt9y9e1kltqXE76-j_8QeNtet_VlwY5AWjo,806
@@ -41,14 +43,14 @@ bullish/exceptions.py,sha256=4z_i-dD-CDz1bkGmZH9DOf1L_awlCPCgdUDPF7dhWAI,106
41
43
  bullish/figures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
44
  bullish/figures/figures.py,sha256=imrvIIcL9L-z-3vzWK5hDEsNttZs60QxlFI-PLw0hJQ,4829
43
45
  bullish/interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
44
- bullish/interface/interface.py,sha256=QbjC_tWLbhvQr8vaiQl8ymrb6f_vR_KCxEmzJ5lA1Zg,4528
46
+ bullish/interface/interface.py,sha256=9bhXOKlYtoCbbbDBzmwlVK2HuAzfO-1kE8mk_MMG0MM,5046
45
47
  bullish/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
48
  bullish/jobs/app.py,sha256=5MJ5KXUo7JSNAvOPgkpIMasD11VTrjQvGzM7vmCY65E,77
47
- bullish/jobs/models.py,sha256=ndrGTMP08S57yGLGEG9TQt8Uw2slc4HvbG-TZtEEuN0,744
48
- bullish/jobs/tasks.py,sha256=V_b0c8_GQC0-KIxaHDlLFhtkclQJOsck0gXaW6OlC_w,3055
49
+ bullish/jobs/models.py,sha256=S2yvBf69lmt4U-5OU5CjXCMSw0s9Ubh9xkrB3k2qOZo,764
50
+ bullish/jobs/tasks.py,sha256=6IH45-JV3GFM6Q9uIsuLNjCvdburx3gJZJLI3x0Dn2Y,3593
49
51
  bullish/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
52
  bullish/utils/checks.py,sha256=Va10_xDVVnxYkOD2hafvyQ-TFV8FQpOkr4huJ7XgpDM,2188
51
- bullishpy-0.15.0.dist-info/METADATA,sha256=j-VPiqIl7LJyJ56b4oolBx4ZAsJljbxw-NA6wKzmir8,828
52
- bullishpy-0.15.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
53
- bullishpy-0.15.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
54
- bullishpy-0.15.0.dist-info/RECORD,,
53
+ bullishpy-0.16.0.dist-info/METADATA,sha256=db70yObjbrRpbGeqwITzNA1XGHjKZ_KXOOUTRimxgi0,828
54
+ bullishpy-0.16.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
55
+ bullishpy-0.16.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
56
+ bullishpy-0.16.0.dist-info/RECORD,,