bullishpy 0.15.0__tar.gz → 0.17.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

Files changed (55) hide show
  1. {bullishpy-0.15.0 → bullishpy-0.17.0}/PKG-INFO +2 -2
  2. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/analysis/analysis.py +38 -9
  3. bullishpy-0.17.0/bullish/analysis/backtest.py +422 -0
  4. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/analysis/filter.py +5 -2
  5. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/analysis/functions.py +3 -1
  6. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/analysis/indicators.py +32 -3
  7. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/analysis/predefined_filters.py +143 -9
  8. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/app/app.py +15 -2
  9. bullishpy-0.15.0/bullish/database/alembic/versions/3e1a14c41916_.py → bullishpy-0.17.0/bullish/database/alembic/versions/12889a2cbd7d_.py +5 -4
  10. bullishpy-0.17.0/bullish/database/alembic/versions/6d252e23f543_.py +48 -0
  11. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/crud.py +48 -2
  12. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/schemas.py +14 -0
  13. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/interface/interface.py +15 -0
  14. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/jobs/models.py +1 -1
  15. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/jobs/tasks.py +21 -3
  16. {bullishpy-0.15.0 → bullishpy-0.17.0}/pyproject.toml +2 -2
  17. {bullishpy-0.15.0 → bullishpy-0.17.0}/README.md +0 -0
  18. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/__init__.py +0 -0
  19. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/analysis/__init__.py +0 -0
  20. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/analysis/constants.py +0 -0
  21. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/analysis/industry_views.py +0 -0
  22. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/app/__init__.py +0 -0
  23. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/cli.py +0 -0
  24. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/__init__.py +0 -0
  25. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/README +0 -0
  26. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/alembic.ini +0 -0
  27. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/env.py +0 -0
  28. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/script.py.mako +0 -0
  29. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/037dbd721317_.py +0 -0
  30. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/040b15fba458_.py +0 -0
  31. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/08ac1116e055_.py +0 -0
  32. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/11d35a452b40_.py +0 -0
  33. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/17e51420e7ad_.py +0 -0
  34. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/49c83f9eb5ac_.py +0 -0
  35. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/4b0a2f40b7d3_.py +0 -0
  36. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/5b10ee7604c1_.py +0 -0
  37. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/73564b60fe24_.py +0 -0
  38. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/b76079e9845f_.py +0 -0
  39. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/bf6b86dd5463_.py +0 -0
  40. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/d663166c531d_.py +0 -0
  41. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/ec25c8fa449f_.py +0 -0
  42. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/ee5baabb35f8_.py +0 -0
  43. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/alembic/versions/fc191121f522_.py +0 -0
  44. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/scripts/create_revision.py +0 -0
  45. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/scripts/stamp.py +0 -0
  46. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/scripts/upgrade.py +0 -0
  47. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/database/settings.py +0 -0
  48. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/exceptions.py +0 -0
  49. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/figures/__init__.py +0 -0
  50. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/figures/figures.py +0 -0
  51. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/interface/__init__.py +0 -0
  52. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/jobs/__init__.py +0 -0
  53. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/jobs/app.py +0 -0
  54. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/utils/__init__.py +0 -0
  55. {bullishpy-0.15.0 → bullishpy-0.17.0}/bullish/utils/checks.py +0 -0
@@ -1,13 +1,13 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: bullishpy
3
- Version: 0.15.0
3
+ Version: 0.17.0
4
4
  Summary:
5
5
  Author: aan
6
6
  Author-email: andoludovic.andriamamonjy@gmail.com
7
7
  Requires-Python: >=3.12,<3.13
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: Programming Language :: Python :: 3.12
10
- Requires-Dist: bearishpy (>=0.22.0,<0.23.0)
10
+ Requires-Dist: bearishpy (>=0.26.0,<0.27.0)
11
11
  Requires-Dist: click (>=7.0,<=8.1)
12
12
  Requires-Dist: huey (>=2.5.3,<3.0.0)
13
13
  Requires-Dist: joblib (>=1.5.1,<2.0.0)
@@ -28,7 +28,7 @@ from bearish.models.financials.balance_sheet import ( # type: ignore
28
28
  BalanceSheet,
29
29
  QuarterlyBalanceSheet,
30
30
  )
31
- from bearish.models.financials.base import Financials # type: ignore
31
+ from bearish.models.financials.base import Financials, FinancialsWithDate # type: ignore
32
32
  from bearish.models.financials.cash_flow import ( # type: ignore
33
33
  CashFlow,
34
34
  QuarterlyCashFlow,
@@ -404,6 +404,24 @@ class FundamentalAnalysis(YearlyFundamentalAnalysis, QuarterlyFundamentalAnalysi
404
404
  yearly_analysis.model_dump() | quarterly_analysis.model_dump()
405
405
  )
406
406
 
407
+ @classmethod
408
+ def compute_series(
409
+ cls, financials: FinancialsWithDate, ticker: Ticker
410
+ ) -> List[SignalSeries]:
411
+ fundamendal_analysis = FundamentalAnalysis.from_financials(financials, ticker)
412
+ fundamental_analysis_ = fundamendal_analysis.model_dump(
413
+ exclude_none=True, exclude_unset=True, exclude_defaults=True
414
+ )
415
+ fundamental_analysis_ = {
416
+ k: v for k, v in fundamental_analysis_.items() if v is True
417
+ }
418
+ return [
419
+ SignalSeries(
420
+ name=k.upper(), symbol=ticker.symbol, value=v, date=financials.date
421
+ )
422
+ for k, v in fundamental_analysis_.items()
423
+ ]
424
+
407
425
 
408
426
  class AnalysisView(BaseModel):
409
427
  sector: Annotated[
@@ -488,6 +506,16 @@ class Analysis(AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis)
488
506
  )
489
507
 
490
508
 
509
+ def compute_financials_series(
510
+ financials_: Financials, ticker: Ticker
511
+ ) -> List[SignalSeries]:
512
+ financials_with_dates = FinancialsWithDate.from_financials(financials_)
513
+ series = []
514
+ for f in financials_with_dates:
515
+ series.extend(FundamentalAnalysis.compute_series(f, ticker))
516
+ return series
517
+
518
+
491
519
  def compute_analysis(database_path: Path, ticker: Ticker) -> Analysis:
492
520
  from bullish.database.crud import BullishDb
493
521
 
@@ -502,7 +530,9 @@ def compute_signal_series(database_path: Path, ticker: Ticker) -> List[SignalSer
502
530
  indicators = Indicators()
503
531
  prices = Prices.from_ticker(bullish_db, ticker)
504
532
  signal_series = indicators.compute_series(prices.to_dataframe(), ticker.symbol)
505
- return signal_series
533
+ financials = Financials.from_ticker(bullish_db, ticker)
534
+ financial_series = compute_financials_series(financials, ticker)
535
+ return signal_series + financial_series
506
536
 
507
537
 
508
538
  def run_signal_series_analysis(bullish_db: "BullishDb") -> None:
@@ -511,17 +541,16 @@ def run_signal_series_analysis(bullish_db: "BullishDb") -> None:
511
541
  tickers = list(price_trackers.intersection(finance_trackers))
512
542
  parallel = Parallel(n_jobs=-1)
513
543
 
514
- for batch_ticker in batched(tickers, 100):
515
- start = time.perf_counter()
544
+ for batch_ticker in batched(tickers, 1):
516
545
  many_signal_series = parallel(
517
546
  delayed(compute_signal_series)(bullish_db.database_path, ticker)
518
547
  for ticker in batch_ticker
519
548
  )
520
- bullish_db.write_signal_series(list(chain.from_iterable(many_signal_series)))
521
- elapsed_time = time.perf_counter() - start
522
- print(
523
- f"Computed signal series for {len(batch_ticker)} tickers in {elapsed_time:.2f} seconds."
524
- )
549
+ series = list(chain.from_iterable(many_signal_series))
550
+ try:
551
+ bullish_db.write_signal_series(series)
552
+ except Exception as e:
553
+ logger.error(f"Failed to compute signal series for {batch_ticker}: {e}")
525
554
 
526
555
 
527
556
  def run_analysis(bullish_db: "BullishDb") -> None:
@@ -0,0 +1,422 @@
1
+ import json
2
+ import logging
3
+ import random
4
+ from datetime import date, timedelta
5
+ from typing import TYPE_CHECKING, Optional, Union, List, Dict, Any
6
+
7
+ import numpy as np
8
+ import pandas as pd
9
+ from pydantic import BaseModel, Field, model_validator
10
+
11
+
12
+ import plotly.graph_objects as go
13
+
14
+ if TYPE_CHECKING:
15
+ from bullish.analysis.predefined_filters import NamedFilterQuery
16
+ from bullish.database.crud import BullishDb
17
+
18
+ logger = logging.getLogger(__name__)
19
+ COLOR = {
20
+ "mean": "#1f77b4", # A refined blue (Plotly default)
21
+ "upper": "#d62728", # Strong red
22
+ "lower": "#2ca02c", # Rich green
23
+ "median": "#ff7f0e", # Bright orange
24
+ }
25
+
26
+
27
+ class BacktestQueryBase(BaseModel):
28
+ name: str
29
+ table: str
30
+
31
+
32
+ class BacktestQueryDate(BacktestQueryBase):
33
+
34
+ start: date
35
+ end: date
36
+
37
+
38
+ class BacktestQueryRange(BacktestQueryBase):
39
+
40
+ min: float
41
+ max: float
42
+
43
+
44
+ class BacktestQuerySelection(BacktestQueryBase):
45
+
46
+ selections: List[str]
47
+
48
+ def to_selections(self) -> str:
49
+ return ", ".join([f"'{s}'" for s in self.selections])
50
+
51
+
52
+ class BacktestQueries(BaseModel):
53
+ queries: list[Union[BacktestQueryDate, BacktestQueryRange, BacktestQuerySelection]]
54
+
55
+ def to_query(self) -> str:
56
+ query_parts = []
57
+ for query in self.queries:
58
+ if isinstance(query, (BacktestQueryDate)):
59
+ query_parts.append(
60
+ f"SELECT symbol FROM {query.table} WHERE name='{query.name}' " # noqa: S608
61
+ f"AND date >='{query.start}' AND date <='{query.end}'"
62
+ )
63
+ if isinstance(query, (BacktestQueryRange)):
64
+ query_parts.append(
65
+ f"SELECT symbol FROM {query.table} WHERE " # noqa: S608
66
+ f"{query.name} >= {query.min} AND {query.name} <= {query.max}"
67
+ )
68
+ if isinstance(query, (BacktestQuerySelection)):
69
+ query_parts.append(
70
+ f"SELECT symbol FROM {query.table} WHERE " # noqa: S608
71
+ f"{query.name} IN ({query.to_selections()})"
72
+ )
73
+
74
+ if len(query_parts) == 1:
75
+ return query_parts[0]
76
+ else:
77
+ return " INTERSECT ".join(query_parts)
78
+
79
+
80
+ class ReturnPercentage(BaseModel):
81
+ return_percentage: float = Field(
82
+ default=12, description="Return percentage of the backtest"
83
+ )
84
+
85
+
86
+ class BaseBacktestResult(BaseModel):
87
+ start: date = Field(default=date.today() - timedelta(days=252))
88
+ end: date = Field(default=date.today())
89
+ investment: float = Field(default=1000)
90
+ holding_period: int = Field(default=30 * 3)
91
+ extend_days: int = Field(
92
+ default=5,
93
+ description="Extend the backtest by this many days if no symbols are found",
94
+ )
95
+ percentage: int = Field(default=12, description="Return percentage of the backtest")
96
+ iterations: int = Field(default=200, description="Number of iterations to run")
97
+
98
+
99
+ class BacktestResultQuery(BaseBacktestResult):
100
+ name: str
101
+
102
+
103
+ class BacktestResult(BacktestResultQuery):
104
+ data: Dict[str, Any]
105
+
106
+ def to_dataframe(self) -> pd.DataFrame:
107
+ return pd.read_json(json.dumps(self.data)).sort_index()
108
+
109
+
110
+ class BacktestResults(BaseModel):
111
+ results: List[BacktestResult]
112
+
113
+ def figure(self, type: str = "mean") -> go.Figure:
114
+ fig = go.Figure()
115
+ for result in self.results:
116
+ data = result.to_dataframe()[type]
117
+ fig.add_trace(
118
+ go.Scatter(
119
+ x=data.index,
120
+ y=data,
121
+ mode="lines",
122
+ name=f"{result.name} ({type})",
123
+ line={"width": 1},
124
+ hovertemplate=(
125
+ "Date: %{x}<br>"
126
+ + "Price: %{y:.2f}<br>"
127
+ + f"Percentage: {result.percentage}<br>"
128
+ + f"Iterations: {result.iterations}<br>"
129
+ + f"Investment: {result.investment}<extra></extra>"
130
+ ),
131
+ )
132
+ )
133
+ fig.update_layout(
134
+ height=800,
135
+ showlegend=True,
136
+ margin={"t": 60, "b": 40},
137
+ )
138
+
139
+ return fig
140
+
141
+
142
+ class BackTestConfig(BaseBacktestResult):
143
+ exit_strategy: ReturnPercentage = Field(default=ReturnPercentage)
144
+
145
+ def to_base_backtest_result(self) -> BaseBacktestResult:
146
+ return BaseBacktestResult(
147
+ start=self.start,
148
+ end=self.end,
149
+ investment=self.investment,
150
+ holding_period=self.holding_period,
151
+ extend_days=self.extend_days,
152
+ percentage=self.percentage,
153
+ iterations=self.iterations,
154
+ )
155
+
156
+
157
+ class Equity(BaseModel):
158
+ symbol: str
159
+ start: date
160
+ end: date
161
+ buy: float
162
+ sell: float
163
+ investment_in: float
164
+ investment_out: Optional[float] = None
165
+
166
+ def profit(self) -> float:
167
+ return (self.sell - self.buy) * (self.investment_in / self.buy)
168
+
169
+ def current_value(self) -> float:
170
+ return self.investment_in + self.profit()
171
+
172
+ def set_investment_out(self) -> None:
173
+ self.investment_out = self.current_value()
174
+
175
+
176
+ class BackTest(BaseModel):
177
+ equities: list[Equity] = Field(
178
+ default_factory=list, description="List of equities bought during the backtest"
179
+ )
180
+ end: date = Field(default=date.today(), description="End date of the backtest")
181
+
182
+ def valid(self) -> bool:
183
+ return bool(self.equities)
184
+
185
+ def total_profit(self) -> float:
186
+ return sum(equity.profit() for equity in self.equities)
187
+
188
+ def symbols(self) -> list[str]:
189
+ return [equity.symbol for equity in self.equities]
190
+
191
+ def show(self) -> None:
192
+ for eq in self.equities:
193
+ print(
194
+ f"\n{eq.symbol} ({eq.type}): {eq.start}:{eq.investment_in} ({eq.buy}) - "
195
+ f"{eq.end}:{eq.investment_out} ({eq.sell})"
196
+ )
197
+
198
+ def to_dataframe(self) -> pd.DataFrame:
199
+ prices = [
200
+ self.equities[0].investment_in,
201
+ *[e.investment_out for e in self.equities],
202
+ ]
203
+ symbols = [self.equities[0].symbol, *[e.symbol for e in self.equities]]
204
+ index = [self.equities[0].start, *[e.end for e in self.equities]]
205
+ buy = [self.equities[0].buy, *[e.buy for e in self.equities]]
206
+ sell = [self.equities[0].sell, *[e.sell for e in self.equities]]
207
+ data = pd.DataFrame(
208
+ np.array([prices, symbols, buy, sell]).T,
209
+ index=index,
210
+ columns=["prices", "symbols", "buy", "sell"],
211
+ )
212
+ data = data[~data.index.duplicated(keep="first")]
213
+ return data
214
+
215
+ def __hash__(self) -> int:
216
+ return hash(tuple(sorted(equity.symbol for equity in self.equities)))
217
+
218
+
219
+ class BackTests(BaseModel):
220
+ tests: list[BackTest] = Field(default_factory=list, description="List of backtests")
221
+ config: BackTestConfig
222
+ name: str
223
+
224
+ @model_validator(mode="after")
225
+ def _validate(self) -> "BackTests":
226
+ self.tests = list(set(self.tests)) # Remove duplicates
227
+ return self
228
+
229
+ def to_dataframe(self) -> pd.DataFrame:
230
+
231
+ data = (
232
+ pd.concat([t.to_dataframe() for t in self.tests if t.valid()], axis=1)
233
+ .sort_index()
234
+ .fillna(method="ffill")
235
+ )
236
+ data = data[~data.index.duplicated(keep="first")]
237
+ return data
238
+
239
+ def to_error(self) -> pd.DataFrame:
240
+ data_ = self.to_dataframe()
241
+ mean = data_.prices.astype(float).mean(axis=1).rename("mean")
242
+ std = data_.prices.astype(float).std(axis=1)
243
+ median = data_.prices.astype(float).median(axis=1).rename("median")
244
+ upper = (mean + std).rename("upper")
245
+ lower = (mean - std).rename("lower")
246
+ return pd.concat([mean, upper, lower, median], axis=1).sort_index()
247
+
248
+ def to_backtest_result(self) -> BacktestResult:
249
+
250
+ return BacktestResult.model_validate(
251
+ self.config.to_base_backtest_result().model_dump()
252
+ | {"data": json.loads(self.to_error().to_json()), "name": self.name}
253
+ )
254
+
255
+ def to_figure(self) -> go.Figure:
256
+
257
+ data_ = self.to_dataframe()
258
+ self.to_error()
259
+ column_chunks = [data_.iloc[:, i : i + 4] for i in range(0, data_.shape[1], 4)]
260
+ fig = go.Figure()
261
+ for data in column_chunks:
262
+ fig.add_trace(
263
+ go.Scatter(
264
+ x=data.index,
265
+ y=data.prices.astype(float),
266
+ mode="lines",
267
+ showlegend=False,
268
+ customdata=data[
269
+ ["symbols", "sell", "buy"]
270
+ ], # Include multiple overlay columns
271
+ line={"color": "grey", "width": 0.5}, # normal grey
272
+ opacity=0.5,
273
+ hovertemplate=(
274
+ "Date: %{x}<br>"
275
+ + "Price: %{y:.2f}<br>"
276
+ + "Symbols: %{customdata[0]}<br>"
277
+ + "Sell: %{customdata[1]}<br>"
278
+ + "Buy: %{customdata[2]}<extra></extra>"
279
+ ),
280
+ )
281
+ )
282
+ for name, column in self.to_error().items():
283
+ fig.add_trace(
284
+ go.Scatter(
285
+ x=column.index,
286
+ y=column,
287
+ mode="lines",
288
+ line={"color": COLOR[name], "width": 1},
289
+ showlegend=True,
290
+ name=name,
291
+ )
292
+ )
293
+ fig.update_layout(
294
+ title="Predefined filter performance",
295
+ xaxis_title="Date",
296
+ yaxis_title="Prices [Currency]",
297
+ )
298
+ fig.show()
299
+ return fig
300
+
301
+
302
+ def run_backtest( # noqa: C901, PLR0915
303
+ bullish_db: "BullishDb", named_filter: "NamedFilterQuery", config: BackTestConfig
304
+ ) -> BackTest:
305
+ equities = []
306
+ start_date = config.start
307
+ presence_delta = timedelta(days=config.holding_period)
308
+ investment = config.investment
309
+ exclude_symbols = []
310
+ while True:
311
+ symbols = []
312
+ while not symbols:
313
+ symbols = named_filter.get_backtesting_symbols(bullish_db, start_date)
314
+ symbols = [b for b in symbols if b not in exclude_symbols]
315
+ if symbols:
316
+ break
317
+ start_date = start_date + timedelta(days=config.extend_days)
318
+ if start_date > config.end:
319
+ logger.debug("No symbols found for the given date range.")
320
+ break
321
+ if symbols:
322
+ symbol = random.choice(symbols) # noqa: S311
323
+ logger.debug(f"Found symbol: {symbol}, for date: {start_date}")
324
+ enter_position = start_date
325
+ end_position = None
326
+ counter = 0
327
+ buy_price = None
328
+ while True:
329
+
330
+ data = bullish_db.read_symbol_series(
331
+ symbol,
332
+ start_date=enter_position + counter * presence_delta,
333
+ end_date=enter_position + (counter + 1) * presence_delta,
334
+ )
335
+ if data.empty:
336
+ logger.debug(f"No data found for symbol: {symbol}")
337
+ exclude_symbols.append(symbol)
338
+ end_position = start_date
339
+ break
340
+ data.index = data.index.tz_localize(None)
341
+ if counter == 0:
342
+ enter_position_timestamp = data.close.first_valid_index()
343
+ enter_position = enter_position_timestamp.date()
344
+ buy_price = data.close.loc[enter_position_timestamp]
345
+
346
+ mask = data.close >= buy_price * (
347
+ 1 + config.percentage / (100 * (counter + 1))
348
+ )
349
+ mask_ = mask[mask == True] # noqa: E712
350
+
351
+ if mask_.empty:
352
+ if enter_position + (counter + 1) * presence_delta > config.end:
353
+ end_position = data.close.index[-1].date()
354
+ sell_price = data.close.iloc[-1]
355
+ equity = Equity(
356
+ symbol=symbol,
357
+ start=enter_position,
358
+ end=end_position,
359
+ buy=buy_price,
360
+ sell=sell_price,
361
+ investment_in=investment,
362
+ )
363
+ equity.set_investment_out()
364
+ equities.append(equity)
365
+ investment = equity.current_value()
366
+ end_position = config.end
367
+ break
368
+ counter += 1
369
+ continue
370
+ else:
371
+ end_position_timestamp = data[mask].first_valid_index()
372
+ end_position = end_position_timestamp.date()
373
+ equity = Equity(
374
+ symbol=symbol,
375
+ start=enter_position,
376
+ end=end_position,
377
+ buy=buy_price,
378
+ sell=data[mask].close.loc[end_position_timestamp],
379
+ investment_in=investment,
380
+ )
381
+ equity.set_investment_out()
382
+ equities.append(equity)
383
+ investment = equity.current_value()
384
+ break
385
+
386
+ start_date = end_position
387
+ if start_date >= config.end:
388
+ break
389
+ back_test = BackTest(equities=equities)
390
+ return back_test
391
+
392
+
393
+ def run_tests(
394
+ bullish_db: "BullishDb", named_filter: "NamedFilterQuery", config: BackTestConfig
395
+ ) -> BackTests:
396
+ return BackTests(
397
+ config=config,
398
+ name=named_filter.name,
399
+ tests=[
400
+ run_backtest(bullish_db, named_filter, config)
401
+ for _ in range(config.iterations)
402
+ ],
403
+ )
404
+
405
+
406
+ def run_many_tests(
407
+ bullish_db: "BullishDb",
408
+ named_filters: List["NamedFilterQuery"],
409
+ config: BackTestConfig,
410
+ ) -> None:
411
+ back_tests = []
412
+ for named_filter in named_filters:
413
+ try:
414
+ back_tests.append(
415
+ run_tests(bullish_db, named_filter, config).to_backtest_result()
416
+ )
417
+ except Exception as e: # noqa: PERF203
418
+ logger.error(e)
419
+ continue
420
+
421
+ if back_tests:
422
+ bullish_db.write_many_backtest_results(back_tests)
@@ -82,12 +82,15 @@ PROPERTIES_GROUP = list(
82
82
  {*INCOME_GROUP, *CASH_FLOW_GROUP, *EPS_GROUP}
83
83
  )
84
84
  )
85
-
86
- GROUP_MAPPING: Dict[str, List[str]] = {
85
+ BOOLEAN_GROUP_MAPPING: Dict[str, List[str]] = {
87
86
  "income": INCOME_GROUP,
88
87
  "cash_flow": CASH_FLOW_GROUP,
89
88
  "eps": EPS_GROUP,
90
89
  "properties": PROPERTIES_GROUP,
90
+ }
91
+ GROUP_MAPPING: Dict[str, List[str]] = {
92
+ **BOOLEAN_GROUP_MAPPING,
93
+ "properties": PROPERTIES_GROUP,
91
94
  "country": list(get_args(Country)),
92
95
  "industry": list(get_args(Industry)),
93
96
  "industry_group": list(get_args(IndustryGroup)),
@@ -322,7 +322,9 @@ class IndicatorFunction(BaseModel):
322
322
  except Exception as e:
323
323
  logger.error(f"Fail to compute function {function.__name__}: {e}")
324
324
  if data_ is None:
325
- raise ValueError("No data returned from indicator functions.")
325
+ raise ValueError(
326
+ f"No data returned from indicator functions with expected columns {self.expected_columns}."
327
+ )
326
328
  if not set(self.expected_columns).issubset(set(data_.columns)):
327
329
  raise ValueError(
328
330
  f"Expected columns {self.expected_columns} not found in data columns {data_.columns.tolist()}"
@@ -58,6 +58,7 @@ class Signal(BaseModel):
58
58
  description: str
59
59
  date: Optional[date] = None
60
60
  value: Optional[float] = None
61
+ in_use_backtest: bool = False
61
62
 
62
63
  def is_date(self) -> bool:
63
64
  if self.type == Optional[date]:
@@ -113,28 +114,40 @@ class Indicator(BaseModel):
113
114
 
114
115
  def compute_series(self, data: pd.DataFrame, symbol: str) -> pd.DataFrame:
115
116
  series = []
116
- results = self.function(data)
117
+ try:
118
+ results = self.function(data)
119
+ except Exception as e:
120
+ logger.error(
121
+ f"Failed to compute indicator {self.name} for symbol {symbol}: {e}"
122
+ )
123
+ return pd.DataFrame()
117
124
  if not set(self.expected_columns).issubset(results.columns):
118
125
  raise ValueError(
119
126
  f"Expected columns {self.expected_columns}, but got {results.columns.tolist()}"
120
127
  )
121
128
  for signal in self.signals:
129
+ if not signal.in_use_backtest:
130
+ continue
122
131
  try:
123
132
  series_ = signal.compute_series(results)
124
133
  if signal.type == Optional[date]:
125
134
  series__ = pd.DataFrame(series_[series_ == 1].rename("value"))
126
135
  else:
127
- series__ = pd.DataFrame(series_.rename("value"))
136
+ series__ = pd.DataFrame(
137
+ series_[series_ != None].rename("value") # noqa: E711
138
+ )
128
139
 
129
140
  series__["name"] = signal.name
130
141
  series__["date"] = series__.index.date # type: ignore
131
142
  series__["symbol"] = symbol
132
143
  series__ = series__.reset_index(drop=True)
133
144
  series.append(series__)
134
- except Exception as e: # noqa: PERF203
145
+ except Exception as e:
135
146
  logger.error(
136
147
  f"Fail to compute signal {signal.name} for indicator {self.name}: {e}"
137
148
  )
149
+ if not series:
150
+ return pd.DataFrame()
138
151
  data = pd.concat(series).reset_index(drop=True)
139
152
  return data
140
153
 
@@ -181,6 +194,7 @@ def indicators_factory() -> List[Indicator]:
181
194
  function=lambda d: cross_simple(
182
195
  d.MACD_12_26_9, d.MACD_12_26_9_SIGNAL
183
196
  ),
197
+ in_use_backtest=True,
184
198
  ),
185
199
  Signal(
186
200
  name="MACD_12_26_9_BEARISH_CROSSOVER",
@@ -221,6 +235,7 @@ def indicators_factory() -> List[Indicator]:
221
235
  type_info="Long",
222
236
  type=Optional[date],
223
237
  function=lambda d: cross_value_series(d.RSI, 30),
238
+ in_use_backtest=True,
224
239
  ),
225
240
  Signal(
226
241
  name="RSI_BULLISH_CROSSOVER_40",
@@ -228,6 +243,7 @@ def indicators_factory() -> List[Indicator]:
228
243
  type_info="Long",
229
244
  type=Optional[date],
230
245
  function=lambda d: cross_value_series(d.RSI, 40),
246
+ in_use_backtest=True,
231
247
  ),
232
248
  Signal(
233
249
  name="RSI_BULLISH_CROSSOVER_45",
@@ -235,6 +251,7 @@ def indicators_factory() -> List[Indicator]:
235
251
  type_info="Long",
236
252
  type=Optional[date],
237
253
  function=lambda d: cross_value_series(d.RSI, 45),
254
+ in_use_backtest=True,
238
255
  ),
239
256
  Signal(
240
257
  name="RSI_BEARISH_CROSSOVER",
@@ -249,6 +266,7 @@ def indicators_factory() -> List[Indicator]:
249
266
  type_info="Oversold",
250
267
  type=Optional[date],
251
268
  function=lambda d: (d.RSI < 30) & (d.RSI > 0),
269
+ in_use_backtest=True,
252
270
  ),
253
271
  Signal(
254
272
  name="RSI_OVERBOUGHT",
@@ -322,6 +340,7 @@ def indicators_factory() -> List[Indicator]:
322
340
  type_info="Oversold",
323
341
  type=Optional[date],
324
342
  function=lambda d: cross_simple(d.SMA_50, d.SMA_200),
343
+ in_use_backtest=True,
325
344
  ),
326
345
  Signal(
327
346
  name="DEATH_CROSS",
@@ -336,6 +355,7 @@ def indicators_factory() -> List[Indicator]:
336
355
  type_info="Overbought",
337
356
  type=Optional[date],
338
357
  function=lambda d: d.SMA_50 > d.SMA_200,
358
+ in_use_backtest=True,
339
359
  processing=ProcessingFunction(date=find_last_true_run_start),
340
360
  ),
341
361
  Signal(
@@ -344,6 +364,7 @@ def indicators_factory() -> List[Indicator]:
344
364
  type_info="Overbought",
345
365
  type=Optional[date],
346
366
  function=lambda d: d.SMA_50 < d.CLOSE,
367
+ in_use_backtest=True,
347
368
  processing=ProcessingFunction(date=find_last_true_run_start),
348
369
  ),
349
370
  ],
@@ -562,6 +583,14 @@ def indicators_factory() -> List[Indicator]:
562
583
  class Indicators(BaseModel):
563
584
  indicators: List[Indicator] = Field(default_factory=indicators_factory)
564
585
 
586
+ def in_use_backtest(self) -> List[str]:
587
+ return [
588
+ signal.name.lower()
589
+ for indicator in self.indicators
590
+ for signal in indicator.signals
591
+ if signal.in_use_backtest
592
+ ]
593
+
565
594
  def _compute(self, data: pd.DataFrame) -> None:
566
595
  for indicator in self.indicators:
567
596
  try: