bullishpy 0.14.0__py3-none-any.whl → 0.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  import logging
2
2
  import time
3
- from itertools import batched
3
+ from itertools import batched, chain
4
4
  from pathlib import Path
5
5
  from typing import (
6
6
  Annotated,
@@ -42,7 +42,7 @@ from bearish.models.query.query import AssetQuery, Symbols # type: ignore
42
42
  from bearish.types import TickerOnlySources # type: ignore
43
43
  from pydantic import BaseModel, BeforeValidator, Field, create_model
44
44
 
45
- from bullish.analysis.indicators import Indicators, IndicatorModels
45
+ from bullish.analysis.indicators import Indicators, IndicatorModels, SignalSeries
46
46
  from joblib import Parallel, delayed # type: ignore
47
47
 
48
48
  from bullish.analysis.industry_views import compute_industry_view
@@ -130,7 +130,7 @@ class TechnicalAnalysis(*TechnicalAnalysisModels): # type: ignore
130
130
  )
131
131
  return cls()
132
132
  try:
133
- res = Indicators().to_dict(prices)
133
+ res = Indicators().compute(prices)
134
134
  return cls(last_price=prices.close.iloc[-1], **res)
135
135
  except Exception as e:
136
136
  logger.error(
@@ -495,6 +495,35 @@ def compute_analysis(database_path: Path, ticker: Ticker) -> Analysis:
495
495
  return Analysis.from_ticker(bullish_db, ticker)
496
496
 
497
497
 
498
+ def compute_signal_series(database_path: Path, ticker: Ticker) -> List[SignalSeries]:
499
+ from bullish.database.crud import BullishDb
500
+
501
+ bullish_db = BullishDb(database_path=database_path)
502
+ indicators = Indicators()
503
+ prices = Prices.from_ticker(bullish_db, ticker)
504
+ signal_series = indicators.compute_series(prices.to_dataframe(), ticker.symbol)
505
+ return signal_series
506
+
507
+
508
+ def run_signal_series_analysis(bullish_db: "BullishDb") -> None:
509
+ price_trackers = set(bullish_db._read_tracker(TrackerQuery(), PriceTracker))
510
+ finance_trackers = set(bullish_db._read_tracker(TrackerQuery(), FinancialsTracker))
511
+ tickers = list(price_trackers.intersection(finance_trackers))
512
+ parallel = Parallel(n_jobs=-1)
513
+
514
+ for batch_ticker in batched(tickers, 100):
515
+ start = time.perf_counter()
516
+ many_signal_series = parallel(
517
+ delayed(compute_signal_series)(bullish_db.database_path, ticker)
518
+ for ticker in batch_ticker
519
+ )
520
+ bullish_db.write_signal_series(list(chain.from_iterable(many_signal_series)))
521
+ elapsed_time = time.perf_counter() - start
522
+ print(
523
+ f"Computed signal series for {len(batch_ticker)} tickers in {elapsed_time:.2f} seconds."
524
+ )
525
+
526
+
498
527
  def run_analysis(bullish_db: "BullishDb") -> None:
499
528
  compute_industry_view(bullish_db)
500
529
  price_trackers = set(bullish_db._read_tracker(TrackerQuery(), PriceTracker))
@@ -15,10 +15,17 @@ except Exception:
15
15
  logger.warning("Talib is not installed, skipping analysis")
16
16
 
17
17
 
18
+ def cross_simple(
19
+ series_a: pd.Series, series_b: pd.Series, above: bool = True
20
+ ) -> pd.Series:
21
+ crossing = ta.cross(series_a=series_a, series_b=series_b, above=above)
22
+ return crossing # type: ignore
23
+
24
+
18
25
  def cross(
19
26
  series_a: pd.Series, series_b: pd.Series, above: bool = True
20
27
  ) -> Optional[date]:
21
- crossing = ta.cross(series_a=series_a, series_b=series_b, above=above)
28
+ crossing = cross_simple(series_a=series_a, series_b=series_b, above=above)
22
29
  if not crossing[crossing == 1].index.empty:
23
30
  return crossing[crossing == 1].last_valid_index().date() # type: ignore
24
31
  return None
@@ -31,10 +38,10 @@ def cross_value(series: pd.Series, number: int, above: bool = True) -> Optional[
31
38
  def cross_value_series(
32
39
  series_a: pd.Series, number: int, above: bool = True
33
40
  ) -> pd.Series:
34
- crossing = ta.cross(
35
- series_a=series_a, series_b=pd.Series(number, index=series_a.index), above=above
41
+ crossing = cross_simple(
42
+ series_a, pd.Series(number, index=series_a.index), above=above
36
43
  )
37
- return crossing # type: ignore
44
+ return crossing
38
45
 
39
46
 
40
47
  def compute_adx(data: pd.DataFrame) -> pd.DataFrame:
@@ -302,14 +309,6 @@ def price_above_sma50(data: pd.DataFrame) -> Optional[date]:
302
309
  return date_1
303
310
 
304
311
 
305
- def momentum(data: pd.DataFrame) -> Optional[date]:
306
- date_1 = find_last_true_run_start(data.SMA_50 < data.CLOSE)
307
- date_2 = find_last_true_run_start(data.SMA_200 < data.SMA_50)
308
- if date_1 is None or date_2 is None:
309
- return None
310
- return max(date_1, date_2)
311
-
312
-
313
312
  class IndicatorFunction(BaseModel):
314
313
  expected_columns: list[str]
315
314
  functions: list[Callable[[pd.DataFrame], pd.DataFrame]]
@@ -1,14 +1,12 @@
1
1
  import logging
2
2
  from datetime import date
3
- from typing import Optional, List, Callable, Any, Literal, Dict, Union
3
+ from typing import Optional, List, Callable, Any, Literal, Dict
4
4
 
5
5
  import numpy as np
6
6
  import pandas as pd
7
7
  from pydantic import BaseModel, Field, PrivateAttr, create_model
8
8
 
9
9
  from bullish.analysis.functions import (
10
- cross,
11
- cross_value,
12
10
  ADX,
13
11
  MACD,
14
12
  RSI,
@@ -19,21 +17,44 @@ from bullish.analysis.functions import (
19
17
  SMA,
20
18
  ADOSC,
21
19
  PRICE,
22
- momentum,
23
- sma_50_above_sma_200,
24
- price_above_sma50,
20
+ cross_simple,
21
+ cross_value_series,
22
+ find_last_true_run_start,
25
23
  )
26
24
 
27
25
  logger = logging.getLogger(__name__)
28
26
  SignalType = Literal["Short", "Long", "Oversold", "Overbought", "Value"]
29
27
 
30
28
 
29
+ def _last_date(d: pd.Series) -> Optional[date]:
30
+ d_valid = d[d == 1]
31
+ if d_valid.empty:
32
+ return None
33
+ last_index = d_valid.last_valid_index()
34
+ return last_index.date() if last_index is not None else None # type: ignore
35
+
36
+
37
+ class ProcessingFunction(BaseModel):
38
+ date: Callable[[pd.Series], Optional[date]] = Field(default=_last_date)
39
+ number: Callable[[pd.Series], Optional[float]] = Field(
40
+ default=lambda d: d.iloc[-1] if not d.dropna().empty else None
41
+ )
42
+
43
+
44
+ class SignalSeries(BaseModel):
45
+ name: str
46
+ date: date
47
+ value: float
48
+ symbol: str
49
+
50
+
31
51
  class Signal(BaseModel):
32
52
  name: str
33
53
  type_info: SignalType
34
54
  type: Any
35
55
  range: Optional[List[float]] = None
36
- function: Callable[[pd.DataFrame], Optional[Union[date, float]]]
56
+ function: Callable[[pd.DataFrame], pd.Series]
57
+ processing: ProcessingFunction = Field(default_factory=ProcessingFunction)
37
58
  description: str
38
59
  date: Optional[date] = None
39
60
  value: Optional[float] = None
@@ -46,11 +67,22 @@ class Signal(BaseModel):
46
67
  else:
47
68
  raise NotImplementedError
48
69
 
70
+ def apply_function(self, data: pd.DataFrame) -> pd.Series:
71
+ result = self.function(data)
72
+ if not isinstance(result, pd.Series):
73
+ raise ValueError(
74
+ f"Function for signal {self.name} must return a pandas Series"
75
+ )
76
+ return result
77
+
49
78
  def compute(self, data: pd.DataFrame) -> None:
50
79
  if self.is_date():
51
- self.date = self.function(data) # type: ignore
80
+ self.date = self.processing.date(self.apply_function(data))
52
81
  else:
53
- self.value = self.function(data) # type: ignore
82
+ self.value = self.processing.number(self.apply_function(data))
83
+
84
+ def compute_series(self, data: pd.DataFrame) -> pd.Series:
85
+ return self.apply_function(data)
54
86
 
55
87
 
56
88
  class Indicator(BaseModel):
@@ -68,9 +100,9 @@ class Indicator(BaseModel):
68
100
  f"Expected columns {self.expected_columns}, but got {results.columns.tolist()}"
69
101
  )
70
102
  self._data = results
71
- self._signals()
103
+ self.compute_signals()
72
104
 
73
- def _signals(self) -> None:
105
+ def compute_signals(self) -> None:
74
106
  for signal in self.signals:
75
107
  try:
76
108
  signal.compute(self._data)
@@ -79,6 +111,33 @@ class Indicator(BaseModel):
79
111
  f"Fail to compute signal {signal.name} for indicator {self.name}: {e}"
80
112
  )
81
113
 
114
+ def compute_series(self, data: pd.DataFrame, symbol: str) -> pd.DataFrame:
115
+ series = []
116
+ results = self.function(data)
117
+ if not set(self.expected_columns).issubset(results.columns):
118
+ raise ValueError(
119
+ f"Expected columns {self.expected_columns}, but got {results.columns.tolist()}"
120
+ )
121
+ for signal in self.signals:
122
+ try:
123
+ series_ = signal.compute_series(results)
124
+ if signal.type == Optional[date]:
125
+ series__ = pd.DataFrame(series_[series_ == 1].rename("value"))
126
+ else:
127
+ series__ = pd.DataFrame(series_.rename("value"))
128
+
129
+ series__["name"] = signal.name
130
+ series__["date"] = series__.index.date # type: ignore
131
+ series__["symbol"] = symbol
132
+ series__ = series__.reset_index(drop=True)
133
+ series.append(series__)
134
+ except Exception as e: # noqa: PERF203
135
+ logger.error(
136
+ f"Fail to compute signal {signal.name} for indicator {self.name}: {e}"
137
+ )
138
+ data = pd.concat(series).reset_index(drop=True)
139
+ return data
140
+
82
141
 
83
142
  def indicators_factory() -> List[Indicator]:
84
143
  return [
@@ -93,18 +152,14 @@ def indicators_factory() -> List[Indicator]:
93
152
  description="ADX 14 Long Signal",
94
153
  type_info="Long",
95
154
  type=Optional[date],
96
- function=lambda d: d[
97
- (d.ADX_14 > 20) & (d.PLUS_DI > d.MINUS_DI)
98
- ].last_valid_index(),
155
+ function=lambda d: (d.ADX_14 > 20) & (d.PLUS_DI > d.MINUS_DI),
99
156
  ),
100
157
  Signal(
101
158
  name="ADX_14_SHORT",
102
159
  description="ADX 14 Short Signal",
103
160
  type_info="Short",
104
161
  type=Optional[date],
105
- function=lambda d: d[
106
- (d.ADX_14 > 20) & (d.MINUS_DI > d.PLUS_DI)
107
- ].last_valid_index(),
162
+ function=lambda d: (d.ADX_14 > 20) & (d.MINUS_DI > d.PLUS_DI),
108
163
  ),
109
164
  ],
110
165
  ),
@@ -123,28 +178,34 @@ def indicators_factory() -> List[Indicator]:
123
178
  description="MACD 12-26-9 Bullish Crossover",
124
179
  type_info="Long",
125
180
  type=Optional[date],
126
- function=lambda d: cross(d.MACD_12_26_9, d.MACD_12_26_9_SIGNAL),
181
+ function=lambda d: cross_simple(
182
+ d.MACD_12_26_9, d.MACD_12_26_9_SIGNAL
183
+ ),
127
184
  ),
128
185
  Signal(
129
186
  name="MACD_12_26_9_BEARISH_CROSSOVER",
130
187
  description="MACD 12-26-9 Bearish Crossover",
131
188
  type_info="Short",
132
189
  type=Optional[date],
133
- function=lambda d: cross(d.MACD_12_26_9_SIGNAL, d.MACD_12_26_9),
190
+ function=lambda d: cross_simple(
191
+ d.MACD_12_26_9_SIGNAL, d.MACD_12_26_9
192
+ ),
134
193
  ),
135
194
  Signal(
136
195
  name="MACD_12_26_9_ZERO_LINE_CROSS_UP",
137
196
  description="MACD 12-26-9 Zero Line Cross Up",
138
197
  type_info="Long",
139
198
  type=Optional[date],
140
- function=lambda d: cross_value(d.MACD_12_26_9, 0),
199
+ function=lambda d: cross_value_series(d.MACD_12_26_9, 0),
141
200
  ),
142
201
  Signal(
143
202
  name="MACD_12_26_9_ZERO_LINE_CROSS_DOWN",
144
203
  description="MACD 12-26-9 Zero Line Cross Down",
145
204
  type_info="Long",
146
205
  type=Optional[date],
147
- function=lambda d: cross_value(d.MACD_12_26_9, 0, above=False),
206
+ function=lambda d: cross_value_series(
207
+ d.MACD_12_26_9, 0, above=False
208
+ ),
148
209
  ),
149
210
  ],
150
211
  ),
@@ -159,53 +220,49 @@ def indicators_factory() -> List[Indicator]:
159
220
  description="RSI Bullish Crossover",
160
221
  type_info="Long",
161
222
  type=Optional[date],
162
- function=lambda d: cross_value(d.RSI, 30),
223
+ function=lambda d: cross_value_series(d.RSI, 30),
163
224
  ),
164
225
  Signal(
165
226
  name="RSI_BULLISH_CROSSOVER_40",
166
227
  description="RSI Bullish Crossover 40",
167
228
  type_info="Long",
168
229
  type=Optional[date],
169
- function=lambda d: cross_value(d.RSI, 40),
230
+ function=lambda d: cross_value_series(d.RSI, 40),
170
231
  ),
171
232
  Signal(
172
233
  name="RSI_BULLISH_CROSSOVER_45",
173
234
  description="RSI Bullish Crossover 45",
174
235
  type_info="Long",
175
236
  type=Optional[date],
176
- function=lambda d: cross_value(d.RSI, 45),
237
+ function=lambda d: cross_value_series(d.RSI, 45),
177
238
  ),
178
239
  Signal(
179
240
  name="RSI_BEARISH_CROSSOVER",
180
241
  description="RSI Bearish Crossover",
181
242
  type_info="Short",
182
243
  type=Optional[date],
183
- function=lambda d: cross_value(d.RSI, 70, above=False),
244
+ function=lambda d: cross_value_series(d.RSI, 70, above=False),
184
245
  ),
185
246
  Signal(
186
247
  name="RSI_OVERSOLD",
187
248
  description="RSI Oversold Signal",
188
249
  type_info="Oversold",
189
250
  type=Optional[date],
190
- function=lambda d: d[(d.RSI < 30) & (d.RSI > 0)].last_valid_index(),
251
+ function=lambda d: (d.RSI < 30) & (d.RSI > 0),
191
252
  ),
192
253
  Signal(
193
254
  name="RSI_OVERBOUGHT",
194
255
  description="RSI Overbought Signal",
195
256
  type_info="Overbought",
196
257
  type=Optional[date],
197
- function=lambda d: d[
198
- (d.RSI < 100) & (d.RSI > 70)
199
- ].last_valid_index(),
258
+ function=lambda d: (d.RSI < 100) & (d.RSI > 70),
200
259
  ),
201
260
  Signal(
202
261
  name="RSI_NEUTRAL",
203
262
  description="RSI Neutral Signal",
204
263
  type_info="Overbought",
205
264
  type=Optional[date],
206
- function=lambda d: d[
207
- (d.RSI < 60) & (d.RSI > 40)
208
- ].last_valid_index(),
265
+ function=lambda d: (d.RSI < 60) & (d.RSI > 40),
209
266
  ),
210
267
  ],
211
268
  ),
@@ -220,18 +277,14 @@ def indicators_factory() -> List[Indicator]:
220
277
  description="Stoch Oversold Signal",
221
278
  type_info="Oversold",
222
279
  type=Optional[date],
223
- function=lambda d: d[
224
- (d.SLOW_K < 20) & (d.SLOW_K > 0)
225
- ].last_valid_index(),
280
+ function=lambda d: (d.SLOW_K < 20) & (d.SLOW_K > 0),
226
281
  ),
227
282
  Signal(
228
283
  name="STOCH_OVERBOUGHT",
229
284
  description="Stoch Overbought Signal",
230
285
  type_info="Overbought",
231
286
  type=Optional[date],
232
- function=lambda d: d[
233
- (d.SLOW_K < 100) & (d.SLOW_K > 80)
234
- ].last_valid_index(),
287
+ function=lambda d: (d.SLOW_K < 100) & (d.SLOW_K > 80),
235
288
  ),
236
289
  ],
237
290
  ),
@@ -246,14 +299,14 @@ def indicators_factory() -> List[Indicator]:
246
299
  description="MFI Oversold Signal",
247
300
  type_info="Oversold",
248
301
  type=Optional[date],
249
- function=lambda d: d[(d.MFI < 20)].last_valid_index(),
302
+ function=lambda d: (d.MFI < 20),
250
303
  ),
251
304
  Signal(
252
305
  name="MFI_OVERBOUGHT",
253
306
  description="MFI Overbought Signal",
254
307
  type_info="Overbought",
255
308
  type=Optional[date],
256
- function=lambda d: d[(d.MFI > 80)].last_valid_index(),
309
+ function=lambda d: (d.MFI > 80),
257
310
  ),
258
311
  ],
259
312
  ),
@@ -268,35 +321,30 @@ def indicators_factory() -> List[Indicator]:
268
321
  description="Golden cross: SMA 50 crosses above SMA 200",
269
322
  type_info="Oversold",
270
323
  type=Optional[date],
271
- function=lambda d: cross(d.SMA_50, d.SMA_200),
324
+ function=lambda d: cross_simple(d.SMA_50, d.SMA_200),
272
325
  ),
273
326
  Signal(
274
327
  name="DEATH_CROSS",
275
328
  description="Death cross: SMA 50 crosses below SMA 200",
276
329
  type_info="Overbought",
277
330
  type=Optional[date],
278
- function=lambda d: cross(d.SMA_50, d.SMA_200, above=False),
279
- ),
280
- Signal(
281
- name="MOMENTUM_TIME_SPAN",
282
- description="Momentum time span",
283
- type_info="Overbought",
284
- type=Optional[date],
285
- function=lambda d: momentum(d),
331
+ function=lambda d: cross_simple(d.SMA_50, d.SMA_200, above=False),
286
332
  ),
287
333
  Signal(
288
334
  name="SMA_50_ABOVE_SMA_200",
289
335
  description="SMA 50 is above SMA 200",
290
336
  type_info="Overbought",
291
337
  type=Optional[date],
292
- function=lambda d: sma_50_above_sma_200(d),
338
+ function=lambda d: d.SMA_50 > d.SMA_200,
339
+ processing=ProcessingFunction(date=find_last_true_run_start),
293
340
  ),
294
341
  Signal(
295
342
  name="PRICE_ABOVE_SMA_50",
296
343
  description="Price is above SMA 50",
297
344
  type_info="Overbought",
298
345
  type=Optional[date],
299
- function=lambda d: price_above_sma50(d),
346
+ function=lambda d: d.SMA_50 < d.CLOSE,
347
+ processing=ProcessingFunction(date=find_last_true_run_start),
300
348
  ),
301
349
  ],
302
350
  ),
@@ -311,39 +359,44 @@ def indicators_factory() -> List[Indicator]:
311
359
  description="Current price is lower than the 200-day high",
312
360
  type_info="Oversold",
313
361
  type=Optional[date],
314
- function=lambda d: d[
315
- 0.6 * d["200_DAY_HIGH"] > d.LAST_PRICE
316
- ].last_valid_index(),
362
+ function=lambda d: 0.6 * d["200_DAY_HIGH"] > d.LAST_PRICE,
317
363
  ),
318
364
  Signal(
319
365
  name="LOWER_THAN_20_DAY_HIGH",
320
366
  description="Current price is lower than the 20-day high",
321
367
  type_info="Oversold",
322
368
  type=Optional[date],
323
- function=lambda d: d[
324
- 0.6 * d["20_DAY_HIGH"] > d.LAST_PRICE
325
- ].last_valid_index(),
369
+ function=lambda d: 0.6 * d["20_DAY_HIGH"] > d.LAST_PRICE,
326
370
  ),
327
371
  Signal(
328
372
  name="MEDIAN_WEEKLY_GROWTH",
329
373
  description="Median weekly growth",
330
374
  type_info="Oversold",
331
375
  type=Optional[float],
332
- function=lambda d: np.median(d.WEEKLY_GROWTH.unique()),
376
+ function=lambda d: d.WEEKLY_GROWTH,
377
+ processing=ProcessingFunction(
378
+ number=lambda v: np.median(v.unique())
379
+ ),
333
380
  ),
334
381
  Signal(
335
382
  name="MEDIAN_MONTHLY_GROWTH",
336
383
  description="Median monthly growth",
337
384
  type_info="Oversold",
338
385
  type=Optional[float],
339
- function=lambda d: np.median(d.MONTHLY_GROWTH.unique()),
386
+ function=lambda d: d.MONTHLY_GROWTH,
387
+ processing=ProcessingFunction(
388
+ number=lambda v: np.median(v.unique())
389
+ ),
340
390
  ),
341
391
  Signal(
342
392
  name="MEDIAN_YEARLY_GROWTH",
343
393
  description="Median yearly growth",
344
394
  type_info="Oversold",
345
395
  type=Optional[float],
346
- function=lambda d: np.median(d.YEARLY_GROWTH.unique()),
396
+ function=lambda d: d.YEARLY_GROWTH,
397
+ processing=ProcessingFunction(
398
+ number=lambda v: np.median(v.unique())
399
+ ),
347
400
  ),
348
401
  ],
349
402
  ),
@@ -358,49 +411,61 @@ def indicators_factory() -> List[Indicator]:
358
411
  type_info="Value",
359
412
  description="Median daily Rate of Change of the last 30 days",
360
413
  type=Optional[float],
361
- function=lambda d: np.median(d.ROC_1.tolist()[-30:]),
414
+ function=lambda d: d.ROC_1,
415
+ processing=ProcessingFunction(
416
+ number=lambda v: np.median(v.tolist()[-30:])
417
+ ),
362
418
  ),
363
419
  Signal(
364
420
  name="MEDIAN_RATE_OF_CHANGE_7_4",
365
421
  type_info="Value",
366
422
  description="Median weekly Rate of Change of the last 4 weeks",
367
423
  type=Optional[float],
368
- function=lambda d: np.median(d.ROC_7.tolist()[-4:]),
424
+ function=lambda d: d.ROC_7,
425
+ processing=ProcessingFunction(
426
+ number=lambda v: np.median(v.tolist()[-4:])
427
+ ),
369
428
  ),
370
429
  Signal(
371
430
  name="MEDIAN_RATE_OF_CHANGE_7_12",
372
431
  type_info="Value",
373
432
  description="Median weekly Rate of Change of the last 12 weeks",
374
433
  type=Optional[float],
375
- function=lambda d: np.median(d.ROC_7.tolist()[-12:]),
434
+ function=lambda d: d.ROC_7,
435
+ processing=ProcessingFunction(
436
+ number=lambda v: np.median(v.tolist()[-12:])
437
+ ),
376
438
  ),
377
439
  Signal(
378
440
  name="MEDIAN_RATE_OF_CHANGE_30",
379
441
  type_info="Value",
380
442
  description="Median monthly Rate of Change of the last 12 Months",
381
443
  type=Optional[float],
382
- function=lambda d: np.median(d.ROC_30.tolist()[-12:]),
444
+ function=lambda d: d.ROC_30,
445
+ processing=ProcessingFunction(
446
+ number=lambda v: np.median(v.tolist()[-12:])
447
+ ),
383
448
  ),
384
449
  Signal(
385
450
  name="RATE_OF_CHANGE_30",
386
451
  type_info="Value",
387
452
  description="30-day Rate of Change",
388
453
  type=Optional[float],
389
- function=lambda d: d.ROC_30.tolist()[-1],
454
+ function=lambda d: d.ROC_30,
390
455
  ),
391
456
  Signal(
392
457
  name="RATE_OF_CHANGE_7",
393
458
  type_info="Value",
394
459
  description="7-day Rate of Change",
395
460
  type=Optional[float],
396
- function=lambda d: d.ROC_7.tolist()[-1],
461
+ function=lambda d: d.ROC_7,
397
462
  ),
398
463
  Signal(
399
464
  name="MOMENTUM",
400
465
  type_info="Value",
401
466
  description="7-day Rate of Change",
402
467
  type=Optional[float],
403
- function=lambda d: d.MOM.iloc[-1],
468
+ function=lambda d: d.MOM,
404
469
  ),
405
470
  ],
406
471
  ),
@@ -415,16 +480,14 @@ def indicators_factory() -> List[Indicator]:
415
480
  type_info="Oversold",
416
481
  description="Bullish momentum in money flow",
417
482
  type=Optional[date],
418
- function=lambda d: cross_value(d.ADOSC, 0, above=True),
483
+ function=lambda d: cross_value_series(d.ADOSC, 0, above=True),
419
484
  ),
420
485
  Signal(
421
486
  name="POSITIVE_ADOSC_20_DAY_BREAKOUT",
422
487
  type_info="Oversold",
423
488
  description="20-day breakout confirmed by positive ADOSC",
424
489
  type=Optional[date],
425
- function=lambda d: d[
426
- (d.ADOSC_SIGNAL == True) # noqa: E712
427
- ].last_valid_index(),
490
+ function=lambda d: (d.ADOSC_SIGNAL == True), # noqa: E712
428
491
  ),
429
492
  ],
430
493
  ),
@@ -447,53 +510,49 @@ def indicators_factory() -> List[Indicator]:
447
510
  type_info="Long",
448
511
  description="Morning Star Candlestick Pattern",
449
512
  type=Optional[date],
450
- function=lambda d: d[(d.CDLMORNINGSTAR == 100)].last_valid_index(),
513
+ function=lambda d: d.CDLMORNINGSTAR == 100,
451
514
  ),
452
515
  Signal(
453
516
  name="CDL3LINESTRIKE",
454
517
  description="3 Line Strike Candlestick Pattern",
455
518
  type_info="Long",
456
519
  type=Optional[date],
457
- function=lambda d: d[(d.CDL3LINESTRIKE == 100)].last_valid_index(),
520
+ function=lambda d: d.CDL3LINESTRIKE == 100,
458
521
  ),
459
522
  Signal(
460
523
  name="CDL3WHITESOLDIERS",
461
524
  description="3 White Soldiers Candlestick Pattern",
462
525
  type_info="Long",
463
526
  type=Optional[date],
464
- function=lambda d: d[
465
- (d.CDL3WHITESOLDIERS == 100)
466
- ].last_valid_index(),
527
+ function=lambda d: d.CDL3WHITESOLDIERS == 100,
467
528
  ),
468
529
  Signal(
469
530
  name="CDLABANDONEDBABY",
470
531
  description="Abandoned Baby Candlestick Pattern",
471
532
  type_info="Long",
472
533
  type=Optional[date],
473
- function=lambda d: d[
474
- (d.CDLABANDONEDBABY == 100)
475
- ].last_valid_index(),
534
+ function=lambda d: d.CDLABANDONEDBABY == 100,
476
535
  ),
477
536
  Signal(
478
537
  name="CDLTASUKIGAP",
479
538
  description="Tasukigap Candlestick Pattern",
480
539
  type_info="Long",
481
540
  type=Optional[date],
482
- function=lambda d: d[(d.CDLTASUKIGAP == 100)].last_valid_index(),
541
+ function=lambda d: d.CDLTASUKIGAP == 100,
483
542
  ),
484
543
  Signal(
485
544
  name="CDLPIERCING",
486
545
  description="Piercing Candlestick Pattern",
487
546
  type_info="Long",
488
547
  type=Optional[date],
489
- function=lambda d: d[(d.CDLPIERCING == 100)].last_valid_index(),
548
+ function=lambda d: d.CDLPIERCING == 100,
490
549
  ),
491
550
  Signal(
492
551
  name="CDLENGULFING",
493
552
  description="Engulfing Candlestick Pattern",
494
553
  type_info="Long",
495
554
  type=Optional[date],
496
- function=lambda d: d[(d.CDLENGULFING == 100)].last_valid_index(),
555
+ function=lambda d: d.CDLENGULFING == 100,
497
556
  ),
498
557
  ],
499
558
  ),
@@ -503,7 +562,7 @@ def indicators_factory() -> List[Indicator]:
503
562
  class Indicators(BaseModel):
504
563
  indicators: List[Indicator] = Field(default_factory=indicators_factory)
505
564
 
506
- def compute(self, data: pd.DataFrame) -> None:
565
+ def _compute(self, data: pd.DataFrame) -> None:
507
566
  for indicator in self.indicators:
508
567
  try:
509
568
  indicator.compute(data)
@@ -514,8 +573,16 @@ class Indicators(BaseModel):
514
573
  f"Computed {indicator.name} with {len(indicator.signals)} signals"
515
574
  )
516
575
 
517
- def to_dict(self, data: pd.DataFrame) -> Dict[str, Any]:
518
- self.compute(data)
576
+ def compute_series(self, data: pd.DataFrame, symbol: str) -> List[SignalSeries]:
577
+ data__ = pd.concat(
578
+ [indicator.compute_series(data, symbol) for indicator in self.indicators]
579
+ )
580
+ return [
581
+ SignalSeries.model_validate(s) for s in data__.to_dict(orient="records")
582
+ ]
583
+
584
+ def compute(self, data: pd.DataFrame) -> Dict[str, Any]:
585
+ self._compute(data)
519
586
  res = {}
520
587
  for indicator in self.indicators:
521
588
  for signal in indicator.signals:
@@ -0,0 +1,51 @@
1
+ """
2
+
3
+ Revision ID: 3e1a14c41916
4
+ Revises: 040b15fba458
5
+ Create Date: 2025-07-17 15:07:44.125783
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+ import sqlmodel
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision: str = "3e1a14c41916"
18
+ down_revision: Union[str, None] = "040b15fba458"
19
+ branch_labels: Union[str, Sequence[str], None] = None
20
+ depends_on: Union[str, Sequence[str], None] = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.create_table(
26
+ "signalseries",
27
+ sa.Column("date", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
28
+ sa.Column("value", sa.Float(), nullable=False),
29
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
30
+ sa.Column("symbol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
31
+ sa.PrimaryKeyConstraint("date", "name", "symbol"),
32
+ )
33
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
34
+ batch_op.drop_index(batch_op.f("ix_analysis_momentum_time_span"))
35
+ batch_op.drop_column("momentum_time_span")
36
+
37
+ # ### end Alembic commands ###
38
+
39
+
40
+ def downgrade() -> None:
41
+ # ### commands auto generated by Alembic - please adjust! ###
42
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
43
+ batch_op.add_column(sa.Column("momentum_time_span", sa.DATE(), nullable=True))
44
+ batch_op.create_index(
45
+ batch_op.f("ix_analysis_momentum_time_span"),
46
+ ["momentum_time_span"],
47
+ unique=False,
48
+ )
49
+
50
+ op.drop_table("signalseries")
51
+ # ### end Alembic commands ###
bullish/database/crud.py CHANGED
@@ -16,6 +16,7 @@ from sqlmodel import Session, select
16
16
 
17
17
  from bullish.analysis.analysis import Analysis
18
18
  from bullish.analysis.constants import Industry, IndustryGroup, Sector, Country
19
+ from bullish.analysis.indicators import SignalSeries
19
20
  from bullish.analysis.industry_views import Type, IndustryView
20
21
 
21
22
  from bullish.database.schemas import (
@@ -23,6 +24,7 @@ from bullish.database.schemas import (
23
24
  JobTrackerORM,
24
25
  FilteredResultsORM,
25
26
  IndustryViewORM,
27
+ SignalSeriesORM,
26
28
  )
27
29
  from bullish.database.scripts.upgrade import upgrade
28
30
  from bullish.exceptions import DatabaseFileNotFoundError
@@ -261,3 +263,24 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
261
263
  )
262
264
  result = session.exec(stmt).all()
263
265
  return [IndustryView.model_validate(r) for r in result]
266
+
267
+ def write_signal_series(self, signal_series: List[SignalSeries]) -> None:
268
+ with Session(self._engine) as session:
269
+ stmt = (
270
+ insert(SignalSeriesORM)
271
+ .prefix_with("OR REPLACE")
272
+ .values([a.model_dump() for a in signal_series])
273
+ )
274
+ session.exec(stmt) # type: ignore
275
+ session.commit()
276
+
277
+ def read_signal_series(
278
+ self, name: str, start_date: date, end_date: date
279
+ ) -> List[str]:
280
+ with Session(self._engine) as session:
281
+ stmt = select(SignalSeriesORM.symbol).where(
282
+ SignalSeriesORM.name == name,
283
+ SignalSeriesORM.date >= start_date, # type: ignore
284
+ SignalSeriesORM.date <= end_date, # type: ignore
285
+ )
286
+ return list(set(session.exec(stmt).all()))
@@ -4,6 +4,7 @@ from sqlmodel import Field, SQLModel
4
4
  from sqlalchemy import Column, JSON
5
5
  from bullish.analysis.analysis import Analysis
6
6
  from bullish.analysis.filter import FilteredResults
7
+ from bullish.analysis.indicators import SignalSeries
7
8
  from bullish.analysis.industry_views import IndustryView
8
9
 
9
10
  from bullish.jobs.models import JobTracker
@@ -48,6 +49,14 @@ class FilteredResultsORM(SQLModel, FilteredResults, table=True):
48
49
  filter_query: Dict[str, Any] = Field(sa_column=Column(JSON)) # type: ignore
49
50
 
50
51
 
52
+ class SignalSeriesORM(SQLModel, SignalSeries, table=True):
53
+ __tablename__ = "signalseries"
54
+ __table_args__ = {"extend_existing": True} # noqa:RUF012
55
+ date: str = Field(primary_key=True) # type: ignore
56
+ name: str = Field(primary_key=True)
57
+ symbol: str = Field(primary_key=True)
58
+
59
+
51
60
  class IndustryViewORM(SQLModel, IndustryView, table=True):
52
61
  __tablename__ = "industryview"
53
62
  __table_args__ = {"extend_existing": True} # noqa:RUF012
@@ -12,6 +12,7 @@ from bearish.types import Sources # type: ignore
12
12
  from bullish.analysis.analysis import Analysis, AnalysisView
13
13
  from bullish.analysis.constants import Industry, Sector, IndustryGroup, Country
14
14
  from bullish.analysis.filter import FilterQuery, FilteredResults
15
+ from bullish.analysis.indicators import SignalSeries
15
16
  from bullish.analysis.industry_views import Type, IndustryView
16
17
  from bullish.jobs.models import JobTracker, JobTrackerStatus, add_icons
17
18
 
@@ -125,3 +126,11 @@ class BullishDbBase(BearishDbBase): # type: ignore
125
126
  def read_returns(
126
127
  self, type: Type, industry: Industry, country: Country
127
128
  ) -> List[IndustryView]: ...
129
+
130
+ @abc.abstractmethod
131
+ def write_signal_series(self, signal_series: List[SignalSeries]) -> None: ...
132
+
133
+ @abc.abstractmethod
134
+ def read_signal_series(
135
+ self, name: str, start_date: date, end_date: date
136
+ ) -> List[str]: ...
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: bullishpy
3
- Version: 0.14.0
3
+ Version: 0.15.0
4
4
  Summary:
5
5
  Author: aan
6
6
  Author-email: andoludovic.andriamamonjy@gmail.com
@@ -12,12 +12,13 @@ Requires-Dist: click (>=7.0,<=8.1)
12
12
  Requires-Dist: huey (>=2.5.3,<3.0.0)
13
13
  Requires-Dist: joblib (>=1.5.1,<2.0.0)
14
14
  Requires-Dist: pandas-ta (>=0.3.14b0,<0.4.0)
15
- Requires-Dist: plotly (>=6.1.2,<7.0.0)
15
+ Requires-Dist: plotly (>=4.12.0,<6.0.0)
16
16
  Requires-Dist: streamlit (>=1.45.1,<2.0.0)
17
17
  Requires-Dist: streamlit-file-browser (>=3.2.22,<4.0.0)
18
18
  Requires-Dist: streamlit-pydantic (>=v0.6.1-rc.3,<0.7.0)
19
19
  Requires-Dist: ta-lib (>=0.6.4,<0.7.0)
20
20
  Requires-Dist: tickermood (>=0.4.0,<0.5.0)
21
+ Requires-Dist: vectorbt (>=0.28.0,<0.29.0)
21
22
  Description-Content-Type: text/markdown
22
23
 
23
24
  ## Bullish
@@ -1,10 +1,10 @@
1
1
  bullish/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  bullish/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- bullish/analysis/analysis.py,sha256=ZTRf7YaEnFL9HBVkamY7JXys3XJtkLGNMkU4FbkV_04,19270
3
+ bullish/analysis/analysis.py,sha256=ag1wAfNDKWmPxJz7sd1mNhuh4CZfEleTp2_a3DIkEL4,20524
4
4
  bullish/analysis/constants.py,sha256=tVDPQEufH8lytMj4DdUdvXt79b7cvWaDwSUOpeqMWts,9851
5
5
  bullish/analysis/filter.py,sha256=kSG6fXZrnwqE1HvKQW6O3yVNV49qhVleer9M_7BIDpg,8381
6
- bullish/analysis/functions.py,sha256=A2eFBqNx5XohEhJFU_LvyU0_s0ozErtsiYUqVSb3Wvs,14367
7
- bullish/analysis/indicators.py,sha256=9-768_ntZRxNgeNXj3MbRO9QCq97uYKMHQ-9hQMu7Mo,20938
6
+ bullish/analysis/functions.py,sha256=ebCXxYeKlWhvcRRFMbX8E63bL7OquxyoWsYIy0o0SCA,14277
7
+ bullish/analysis/indicators.py,sha256=S3pUsWn4MC_BKPhY1iFQDNNuHcqulez8jOsfWJdht_8,23470
8
8
  bullish/analysis/industry_views.py,sha256=1B5V39Fm9rNQEsun1xrwELfOiKlGdTie0ZolS2UBh2w,6247
9
9
  bullish/analysis/predefined_filters.py,sha256=28e42hGaH7Qb6SPNeH7EK9YIhjERj-qpbY-7xLahvDM,8361
10
10
  bullish/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -20,6 +20,7 @@ bullish/database/alembic/versions/040b15fba458_.py,sha256=scSauB4wZe0sMFHOAMHkx-
20
20
  bullish/database/alembic/versions/08ac1116e055_.py,sha256=zMEiCbraMEAZItT4ibc3evAH7-7mkXpdgnZy4tPVYeg,27263
21
21
  bullish/database/alembic/versions/11d35a452b40_.py,sha256=j2PaU1RssLQ20OevGmBC7S9E9ocWiXpBue9SOS4AQoY,11521
22
22
  bullish/database/alembic/versions/17e51420e7ad_.py,sha256=xeiVIm1YUZb08opE9rocHZP1__9WQWXsKsXgeFV9cvs,2960
23
+ bullish/database/alembic/versions/3e1a14c41916_.py,sha256=TmpfLl4dBw-CqHsnxwnnBc00nlUtHb0Hh8cc3yLBRME,1668
23
24
  bullish/database/alembic/versions/49c83f9eb5ac_.py,sha256=kCBItp7KmqpJ03roy5ikQjhefZia1oKgfZwournQDq8,3890
24
25
  bullish/database/alembic/versions/4b0a2f40b7d3_.py,sha256=G0K7w7pOPYjPZkXTB8LWhxoxuWBPcPwOfnubTBtdeEY,1827
25
26
  bullish/database/alembic/versions/5b10ee7604c1_.py,sha256=YlqaagPasR3RKASv7acME1jPS8p26VoTE2BvpOwdCpY,1463
@@ -30,8 +31,8 @@ bullish/database/alembic/versions/d663166c531d_.py,sha256=U92l6QXqPniAYrPeu2Bt77
30
31
  bullish/database/alembic/versions/ec25c8fa449f_.py,sha256=8Yts74KEjK4jg20zIo90_0atw-sOBuE3hgCKl-rfS5E,2271
31
32
  bullish/database/alembic/versions/ee5baabb35f8_.py,sha256=nBMEY-_C8AsSXVPyaDdUkwrFFo2gxShzJhmrjejDwtc,1632
32
33
  bullish/database/alembic/versions/fc191121f522_.py,sha256=0sstF6TpAJ09-Mt-Vek9SdSWksvi4C58a5D92rBtuY8,1894
33
- bullish/database/crud.py,sha256=ubRXV88GAo4prDQPylouEn8DBvoyNtM6hx12HPhD_2w,9889
34
- bullish/database/schemas.py,sha256=gI6hWYv1C4G9xRXiNTSLxXftkgIOANDyfct2_KwSavo,2442
34
+ bullish/database/crud.py,sha256=EIXCnhvPAxwldicUG4fwsdiXiq08TjXoZ8wSt27ph0g,10808
35
+ bullish/database/schemas.py,sha256=ySTaw77X9rvLg-4PAKaOH6fPe8Bgi8kTtPc8DvsR6F8,2791
35
36
  bullish/database/scripts/create_revision.py,sha256=rggIf-3koPqJNth8FIg89EOfnIM7a9QrvL8X7UJsP0g,628
36
37
  bullish/database/scripts/stamp.py,sha256=PWgVUEBumjNUMjTnGw46qmU3p221LeN-KspnW_gFuu4,839
37
38
  bullish/database/scripts/upgrade.py,sha256=-Gz7aFNPEt9y9e1kltqXE76-j_8QeNtet_VlwY5AWjo,806
@@ -40,14 +41,14 @@ bullish/exceptions.py,sha256=4z_i-dD-CDz1bkGmZH9DOf1L_awlCPCgdUDPF7dhWAI,106
40
41
  bullish/figures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
42
  bullish/figures/figures.py,sha256=imrvIIcL9L-z-3vzWK5hDEsNttZs60QxlFI-PLw0hJQ,4829
42
43
  bullish/interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
- bullish/interface/interface.py,sha256=UB2ATVtUsnXetnLCrSmNVrFpIvCw_0kuVxKHZC7sT7U,4233
44
+ bullish/interface/interface.py,sha256=QbjC_tWLbhvQr8vaiQl8ymrb6f_vR_KCxEmzJ5lA1Zg,4528
44
45
  bullish/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
45
46
  bullish/jobs/app.py,sha256=5MJ5KXUo7JSNAvOPgkpIMasD11VTrjQvGzM7vmCY65E,77
46
47
  bullish/jobs/models.py,sha256=ndrGTMP08S57yGLGEG9TQt8Uw2slc4HvbG-TZtEEuN0,744
47
48
  bullish/jobs/tasks.py,sha256=V_b0c8_GQC0-KIxaHDlLFhtkclQJOsck0gXaW6OlC_w,3055
48
49
  bullish/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
50
  bullish/utils/checks.py,sha256=Va10_xDVVnxYkOD2hafvyQ-TFV8FQpOkr4huJ7XgpDM,2188
50
- bullishpy-0.14.0.dist-info/METADATA,sha256=BH2BdzlEth8FoZFyxME0sbOlPnqqPdbHoxdHJ3X_GHg,784
51
- bullishpy-0.14.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
52
- bullishpy-0.14.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
53
- bullishpy-0.14.0.dist-info/RECORD,,
51
+ bullishpy-0.15.0.dist-info/METADATA,sha256=j-VPiqIl7LJyJ56b4oolBx4ZAsJljbxw-NA6wKzmir8,828
52
+ bullishpy-0.15.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
53
+ bullishpy-0.15.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
54
+ bullishpy-0.15.0.dist-info/RECORD,,