bullishpy 0.66.0__py3-none-any.whl → 0.68.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -528,6 +528,13 @@ class SubjectAnalysis(BaseModel):
528
528
  summary: Annotated[Optional[Dict[str, Any]], BeforeValidator(json_loads)] = None
529
529
  upside: Optional[float] = None
530
530
 
531
+ oai_high_price_target: Optional[float] = None
532
+ oai_low_price_target: Optional[float] = None
533
+ oai_news_date: Optional[datetime] = None
534
+ oai_recent_news: Optional[str] = None
535
+ oai_recommendation: Optional[str] = None
536
+ oai_explanation: Optional[str] = None
537
+
531
538
  def compute_upside(self, last_price: float) -> None:
532
539
  if self.high_price_target is not None:
533
540
  self.upside = (
@@ -541,12 +548,17 @@ class SubjectAnalysis(BaseModel):
541
548
  return None
542
549
  return "".join(
543
550
  [
544
- f"<p>{scrub(t.get('content').replace("\n",""))}</p>" # type: ignore
551
+ f"<p>{t.get('content').replace("\n","")}</p>" # type: ignore
545
552
  for t in self.news_summary
546
553
  if t.get("content")
547
554
  ]
548
555
  )
549
556
 
557
+ def to_date(self) -> Optional[date]:
558
+ if self.news_date:
559
+ return self.news_date.date()
560
+ return None
561
+
550
562
 
551
563
  class Analysis(SubjectAnalysis, AnalysisEarningsDate, AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis): # type: ignore
552
564
 
@@ -180,7 +180,6 @@ class GeneralFilter(BaseModel):
180
180
  price_per_earning_ratio: Optional[List[float]] = Field(default=[0.0, 1000.0])
181
181
 
182
182
 
183
-
184
183
  class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysisFilters): # type: ignore
185
184
 
186
185
  def valid(self) -> bool:
@@ -434,3 +434,38 @@ def add_indicators(data: pd.DataFrame) -> pd.DataFrame:
434
434
  f"Expected columns {expected_columns} not found in data columns {data.columns.tolist()}"
435
435
  )
436
436
  return data
437
+
438
+
439
+ class Line(BaseModel):
440
+ value: float
441
+ previous: float
442
+
443
+
444
+ class SupportResistance(BaseModel):
445
+ support: Line
446
+ resistance: Line
447
+
448
+
449
+ def support_resistance(df: pd.DataFrame, window: int = 5) -> SupportResistance:
450
+
451
+ w = window * 2 + 1
452
+ highs = df.high.rolling(w, center=True).max()
453
+ lows = df.low.rolling(w, center=True).min()
454
+ swing_high_mask = df.high == highs
455
+ swing_low_mask = df.low == lows
456
+
457
+ raw_res = df.loc[swing_high_mask, "high"].to_numpy()
458
+ raw_sup = df.loc[swing_low_mask, "low"].to_numpy()
459
+ return SupportResistance(
460
+ support=Line(value=float(raw_sup[-1]), previous=float(raw_sup[-2])),
461
+ resistance=Line(value=float(raw_res[-1]), previous=float(raw_res[-2])),
462
+ )
463
+
464
+
465
+ def bollinger_bands(
466
+ data: pd.DataFrame, window: int = 20, std_dev: float = 2.0
467
+ ) -> pd.DataFrame:
468
+ bbands = ta.bbands(
469
+ data.close, timeperiod=window, nbdevup=std_dev, nbdevdn=std_dev, matype=0
470
+ )
471
+ return bbands # type: ignore
@@ -175,6 +175,20 @@ def indicators_factory() -> List[Indicator]:
175
175
  type=Optional[date],
176
176
  function=lambda d: (d.ADX_14 > 20) & (d.MINUS_DI > d.PLUS_DI),
177
177
  ),
178
+ Signal(
179
+ name="ADX_14",
180
+ description="ADX 14",
181
+ type_info="Short",
182
+ type=Optional[date],
183
+ function=lambda d: (d.ADX_14 > 25),
184
+ ),
185
+ Signal(
186
+ name="ADX_14_OVERBOUGHT",
187
+ description="ADX 14 OVERBOUGHT",
188
+ type_info="Short",
189
+ type=Optional[date],
190
+ function=lambda d: (d.ADX_14 > 50),
191
+ ),
178
192
  ],
179
193
  ),
180
194
  Indicator(
@@ -0,0 +1,89 @@
1
+ import json
2
+ import logging
3
+ import os
4
+ from datetime import date
5
+ from typing import Optional, List, TYPE_CHECKING
6
+
7
+ from pydantic import BaseModel, Field
8
+ from openai import OpenAI
9
+
10
+ if TYPE_CHECKING:
11
+ from bullish.database.crud import BullishDb
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def prompt(ticker: str) -> str:
17
+ return f"""
18
+ You are a financial analysis assistant.
19
+
20
+ Using the latest reliable public data from the web — including analyst price targets from multiple reputable
21
+ sources — analyze the stock ticker {ticker}.
22
+
23
+ Return ONLY valid JSON matching EXACTLY the schema below — no explanations, no preamble, no markdown, no code
24
+ fences, no extra text:
25
+
26
+ {{
27
+ "high_price_target": float, // Analyst consensus high price target in USD (based on multiple sources)
28
+ "low_price_target": float, // Analyst consensus low price target in USD (based on multiple sources)
29
+ "recent_news": str, // Detailed, multi-sentence summary of recent news affecting the company;
30
+ include credible source names inline
31
+ "recommendation": str, // One of: "Strong Buy", "Buy", "Hold", "Sell", "Strong Sell"
32
+ "explanation": str // Concise explanation for the recommendation above, covering key pros/cons
33
+ for investors
34
+ }}
35
+
36
+ Formatting rules:
37
+ - Output must be a single valid JSON object with no surrounding text or formatting.
38
+ - Use plain numbers for high_price_target and low_price_target (no currency symbols, no commas).
39
+ - All text fields must be professional, investor-oriented, and reference credible named sources in `recent_news`.
40
+ - If exact data is unavailable, estimate based on web search results and note uncertainty in the relevant field.
41
+ """
42
+
43
+
44
+ class OpenAINews(BaseModel):
45
+ symbol: str
46
+ news_date: date = Field(default_factory=date.today)
47
+ high_price_target: Optional[float] = None
48
+ low_price_target: Optional[float] = None
49
+ recent_news: Optional[str] = None
50
+ recommendation: Optional[str] = None
51
+ explanation: Optional[str] = None
52
+
53
+ def valid(self) -> bool:
54
+ return bool(
55
+ self.model_dump(
56
+ exclude_none=True,
57
+ exclude_unset=True,
58
+ exclude_defaults=True,
59
+ exclude={"symbol"},
60
+ )
61
+ )
62
+
63
+ @classmethod
64
+ def from_ticker(cls, ticker: str) -> "OpenAINews":
65
+ if "OPENAI_API_KEY" not in os.environ:
66
+ return cls(symbol=ticker)
67
+ print(f"Fetching OpenAI news for {ticker}...")
68
+ client = OpenAI()
69
+ resp = client.responses.create(
70
+ model="gpt-4o-mini", input=prompt(ticker), tools=[{"type": "web_search"}] # type: ignore
71
+ )
72
+ try:
73
+ return cls.model_validate(json.loads(resp.output_text) | {"symbol": ticker})
74
+ except Exception as e:
75
+ logger.error(f"Failed to parse OpenAI response for {ticker}: {e}")
76
+ return cls(symbol=ticker)
77
+
78
+ @classmethod
79
+ def from_tickers(cls, tickers: List[str]) -> List["OpenAINews"]:
80
+ return [cls.from_ticker(t) for t in tickers]
81
+
82
+
83
+ def get_open_ai_news(bullish_db: "BullishDb", tickers: List[str]) -> bool:
84
+ news = OpenAINews.from_tickers(tickers)
85
+ valid_news = [n for n in news if n.valid()]
86
+ if valid_news:
87
+ bullish_db.write_many_openai_news(valid_news)
88
+ return True
89
+ return False
@@ -135,17 +135,29 @@ class NamedFilterQuery(FilterQuery):
135
135
  self.model_dump() | {"name": f"{self.name} ({suffix})", **properties}
136
136
  )
137
137
 
138
- def top_performers(self) -> "NamedFilterQuery":
138
+ def week_top_performers(self) -> "NamedFilterQuery":
139
+ properties = {
140
+ "volume_above_average": DATE_THRESHOLD,
141
+ "weekly_growth": [1, 100],
142
+ }
143
+ return self._custom_variant("Week Top Performers", properties)
144
+
145
+ def month_top_performers(self) -> "NamedFilterQuery":
146
+ properties = {
147
+ "monthly_growth": [8, 100],
148
+ }
149
+ return self._custom_variant("Month Top Performers", properties)
150
+
151
+ def year_top_performers(self) -> "NamedFilterQuery":
139
152
  properties = {
140
153
  "volume_above_average": DATE_THRESHOLD,
141
154
  "sma_50_above_sma_200": [
142
155
  datetime.date.today() - datetime.timedelta(days=5000),
143
156
  datetime.date.today(),
144
157
  ],
145
- "weekly_growth": [1, 100],
146
- "monthly_growth": [8, 100],
158
+ "yearly_growth": [30, 100],
147
159
  }
148
- return self._custom_variant("Top Performers", properties)
160
+ return self._custom_variant("Yearly Top Performers", properties)
149
161
 
150
162
  def poor_performers(self) -> "NamedFilterQuery":
151
163
  properties = {
@@ -161,7 +173,7 @@ class NamedFilterQuery(FilterQuery):
161
173
  }
162
174
  return self._custom_variant("Poor Performers", properties)
163
175
 
164
- def fundamentals(self) -> "NamedFilterQuery":
176
+ def yearly_fundamentals(self) -> "NamedFilterQuery":
165
177
  properties = {
166
178
  "income": [
167
179
  "positive_operating_income",
@@ -170,20 +182,63 @@ class NamedFilterQuery(FilterQuery):
170
182
  "growing_operating_income",
171
183
  ],
172
184
  "cash_flow": ["positive_free_cash_flow", "growing_operating_cash_flow"],
185
+ "properties": [
186
+ "positive_return_on_equity",
187
+ "operating_cash_flow_is_higher_than_net_income",
188
+ ],
189
+ }
190
+ return self._custom_variant("Yearly Fundamentals", properties)
191
+
192
+ def quarterly_fundamentals(self) -> "NamedFilterQuery":
193
+ properties = {
194
+ "income": [
195
+ "quarterly_positive_operating_income",
196
+ "quarterly_positive_net_income",
197
+ ],
198
+ "cash_flow": [
199
+ "quarterly_positive_free_cash_flow",
200
+ ],
201
+ "properties": [
202
+ "quarterly_operating_cash_flow_is_higher_than_net_income",
203
+ ],
204
+ }
205
+ return self._custom_variant("Quarterly Fundamentals", properties)
206
+
207
+ def growing_quarterly_fundamentals(self) -> "NamedFilterQuery":
208
+ properties = {
209
+ "income": [
210
+ "quarterly_positive_operating_income",
211
+ "quarterly_positive_net_income",
212
+ "quarterly_growing_net_income",
213
+ ],
214
+ "cash_flow": [
215
+ "quarterly_positive_free_cash_flow",
216
+ "quarterly_growing_operating_cash_flow",
217
+ ],
218
+ "properties": [
219
+ "quarterly_operating_cash_flow_is_higher_than_net_income",
220
+ ],
221
+ }
222
+ return self._custom_variant("Growing Quarterly Fundamentals", properties)
223
+
224
+ def min_fundamentals(self) -> "NamedFilterQuery":
225
+ properties = {
226
+ "income": [
227
+ "positive_operating_income",
228
+ "positive_net_income",
229
+ ],
230
+ "cash_flow": [
231
+ "positive_free_cash_flow",
232
+ ],
173
233
  "eps": [
174
- "growing_basic_eps",
175
- "growing_diluted_eps",
176
- "positive_basic_eps",
177
- "positive_diluted_eps",
234
+ "positive_diluted_eps", # or positive_basic_eps if diluted not available
178
235
  ],
179
236
  "properties": [
180
- "positive_return_on_assets",
181
237
  "positive_return_on_equity",
182
- "positive_debt_to_equity",
183
238
  "operating_cash_flow_is_higher_than_net_income",
184
239
  ],
185
240
  }
186
- return self._custom_variant("Fundamentals", properties)
241
+ return self._custom_variant("Min Fundamentals", properties)
187
242
 
188
243
  def high_growth(self) -> "NamedFilterQuery":
189
244
  properties = {"industry": list(get_args(HighGrowthIndustry))}
@@ -218,6 +273,12 @@ class NamedFilterQuery(FilterQuery):
218
273
  def rsi_oversold_(self) -> "NamedFilterQuery":
219
274
  return self.update_indicator_filter("RSI Oversold", "rsi_oversold")
220
275
 
276
+ def rsi_overbought_(self) -> "NamedFilterQuery":
277
+ return self.update_indicator_filter("RSI Overbought", "rsi_overbought")
278
+
279
+ def adx(self) -> "NamedFilterQuery":
280
+ return self.update_indicator_filter("ADX 14", "adx_14")
281
+
221
282
  def earnings_date(self) -> "NamedFilterQuery":
222
283
  return NamedFilterQuery.model_validate(
223
284
  self.model_dump()
@@ -231,25 +292,35 @@ class NamedFilterQuery(FilterQuery):
231
292
  )
232
293
 
233
294
  def variants(
234
- self, variants: Optional[List[List[str]]] = None
295
+ self,
296
+ variants: Optional[List[List[str]]] = None,
297
+ filters: Optional[List[str]] = None,
235
298
  ) -> List["NamedFilterQuery"]:
299
+ if filters and self.name not in filters:
300
+ return [self]
236
301
  variants = variants or [["europe"], ["us"]]
237
302
 
238
303
  _variants = {v for variant in variants for v in _get_variants(variant)}
239
- filters = []
304
+ filters_ = []
240
305
  for attributes in _variants:
241
- filter = self
306
+ filter__ = self
242
307
  for attr in attributes:
243
- filter = getattr(filter, attr)()
244
- filters.append(filter)
308
+ filter__ = getattr(filter__, attr)()
309
+ filters_.append(filter__)
245
310
 
246
- return filters
311
+ return [self, *filters_]
247
312
 
248
313
 
249
314
  def load_custom_filters() -> List[NamedFilterQuery]:
250
315
  if "CUSTOM_FILTERS_PATH" in os.environ:
251
316
  custom_filters_path = os.environ["CUSTOM_FILTERS_PATH"]
252
- return read_custom_filters(Path(custom_filters_path))
317
+ return [
318
+ variant
319
+ for f in read_custom_filters(Path(custom_filters_path))
320
+ for variant in f.variants(
321
+ variants=[["rsi_overbought_"]], filters=["portfolio", "Portfolio"]
322
+ )
323
+ ]
253
324
  return []
254
325
 
255
326
 
@@ -269,10 +340,10 @@ SMALL_CAP = NamedFilterQuery(
269
340
  order_by_desc="market_capitalization",
270
341
  ).variants(
271
342
  variants=[
272
- ["europe", "top_performers", "fundamentals"],
273
- ["us", "top_performers", "fundamentals"],
274
- ["europe", "earnings_date"],
275
- ["us", "earnings_date"],
343
+ ["week_top_performers", "min_fundamentals"],
344
+ ["month_top_performers", "min_fundamentals"],
345
+ ["earnings_date", "min_fundamentals"],
346
+ ["rsi_oversold_", "min_fundamentals"],
276
347
  ]
277
348
  )
278
349
 
@@ -282,16 +353,13 @@ LARGE_CAPS = NamedFilterQuery(
282
353
  market_capitalization=[1e10, 1e14],
283
354
  ).variants(
284
355
  variants=[
285
- ["europe", "rsi_oversold_", "macd", "fundamentals"],
286
- ["us", "rsi_oversold_", "macd", "fundamentals"],
287
- ["europe", "rsi_neutral_", "macd", "fundamentals"],
288
- ["us", "rsi_neutral_", "macd", "fundamentals"],
289
- ["europe", "rsi_30", "macd", "fundamentals"],
290
- ["us", "rsi_30", "macd", "fundamentals"],
291
- ["europe", "top_performers", "cheap"],
292
- ["us", "top_performers", "cheap"],
293
- ["europe", "earnings_date"],
294
- ["us", "earnings_date"],
356
+ ["rsi_oversold_", "macd", "yearly_fundamentals"],
357
+ ["rsi_neutral_", "macd", "adx", "yearly_fundamentals"],
358
+ ["rsi_30", "macd", "adx", "yearly_fundamentals"],
359
+ ["rsi_oversold_", "macd", "quarterly_fundamentals"],
360
+ ["rsi_neutral_", "macd", "adx", "quarterly_fundamentals"],
361
+ ["rsi_30", "macd", "adx", "quarterly_fundamentals"],
362
+ ["earnings_date", "quarterly_fundamentals", "yearly_fundamentals"],
295
363
  ]
296
364
  )
297
365
 
@@ -301,10 +369,10 @@ MID_CAPS = NamedFilterQuery(
301
369
  market_capitalization=[5e8, 1e10],
302
370
  ).variants(
303
371
  variants=[
304
- ["europe", "top_performers", "fundamentals"],
305
- ["us", "top_performers", "fundamentals"],
306
- ["europe", "earnings_date"],
307
- ["us", "earnings_date"],
372
+ ["week_top_performers"],
373
+ ["month_top_performers"],
374
+ ["earnings_date", "quarterly_fundamentals", "yearly_fundamentals"],
375
+ ["rsi_oversold_", "macd", "adx"],
308
376
  ]
309
377
  )
310
378
 
bullish/app/app.py CHANGED
@@ -290,15 +290,21 @@ def dialog_plot_figure() -> None:
290
290
  <div class="news-hover" >
291
291
  📰 <span class="label">News</span>
292
292
  <div class="tooltip">
293
- <h2>Date: {st.session_state.ticker_news.news_date.date()}</h2>
293
+ <h2>Date: {st.session_state.ticker_news.to_date()}</h2>
294
294
  <h2>Price targets</h2>
295
295
  <p>High price target: {st.session_state.ticker_news.high_price_target}</p>
296
296
  <p>Low price target: {st.session_state.ticker_news.low_price_target}</p>
297
+ <p>OpenAI High price target: {st.session_state.ticker_news.oai_high_price_target}</p>
298
+ <p>OpenAI Low price target: {st.session_state.ticker_news.oai_low_price_target}</p>
297
299
  <h2>Recommendation: {st.session_state.ticker_news.recommendation}</h2>
300
+ <h2>OpenAI Recommendation: {st.session_state.ticker_news.oai_recommendation}</h2>
298
301
  <h2>Consensus: {st.session_state.ticker_news.consensus}</h2>
299
302
  <h2>Explanation & reasons</h2>
300
303
  <p>{st.session_state.ticker_news.explanation}</p>
301
304
  <p>{st.session_state.ticker_news.reason}</p>
305
+ <p>{st.session_state.ticker_news.oai_explanation}</p>
306
+ <h2>Recent news</h2>
307
+ <p>{st.session_state.ticker_news.oai_recent_news}</p>
302
308
  <h2>News summaries</h2>
303
309
  {st.session_state.ticker_news.to_news()}
304
310
  </div>
@@ -0,0 +1,43 @@
1
+ """
2
+
3
+ Revision ID: b36c310f49ec
4
+ Revises: 260fcff7212e
5
+ Create Date: 2025-08-14 22:39:38.207093
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+ import sqlmodel
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision: str = "b36c310f49ec"
18
+ down_revision: Union[str, None] = "cc28171c21a4"
19
+ branch_labels: Union[str, Sequence[str], None] = None
20
+ depends_on: Union[str, Sequence[str], None] = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.create_table(
26
+ "openai",
27
+ sa.Column("symbol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
28
+ sa.Column("news_date", sa.Date(), nullable=False),
29
+ sa.Column("high_price_target", sa.Float(), nullable=True),
30
+ sa.Column("low_price_target", sa.Float(), nullable=True),
31
+ sa.Column("recent_news", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
32
+ sa.Column("recommendation", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
33
+ sa.Column("explanation", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
34
+ sa.PrimaryKeyConstraint("symbol", "news_date"),
35
+ )
36
+
37
+ # ### end Alembic commands ###
38
+
39
+
40
+ def downgrade() -> None:
41
+ # ### commands auto generated by Alembic - please adjust! ###
42
+ op.drop_table("openai")
43
+ # ### end Alembic commands ###
@@ -0,0 +1,87 @@
1
+ """
2
+
3
+ Revision ID: c828e29e1105
4
+ Revises: b36c310f49ec
5
+ Create Date: 2025-08-15 17:57:09.541454
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+ import sqlmodel
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision: str = "c828e29e1105"
18
+ down_revision: Union[str, None] = "b36c310f49ec"
19
+ branch_labels: Union[str, Sequence[str], None] = None
20
+ depends_on: Union[str, Sequence[str], None] = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+
26
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
27
+ batch_op.add_column(
28
+ sa.Column("oai_high_price_target", sa.Float(), nullable=True)
29
+ )
30
+ batch_op.add_column(
31
+ sa.Column("oai_low_price_target", sa.Float(), nullable=True)
32
+ )
33
+ batch_op.add_column(sa.Column("oai_news_date", sa.DateTime(), nullable=True))
34
+ batch_op.add_column(
35
+ sa.Column(
36
+ "oai_recent_news", sqlmodel.sql.sqltypes.AutoString(), nullable=True
37
+ )
38
+ )
39
+ batch_op.add_column(
40
+ sa.Column(
41
+ "oai_recommendation", sqlmodel.sql.sqltypes.AutoString(), nullable=True
42
+ )
43
+ )
44
+ batch_op.add_column(
45
+ sa.Column(
46
+ "oai_explanation", sqlmodel.sql.sqltypes.AutoString(), nullable=True
47
+ )
48
+ )
49
+ batch_op.create_index(
50
+ "ix_analysis_oai_explanation", ["oai_explanation"], unique=False
51
+ )
52
+ batch_op.create_index(
53
+ "ix_analysis_oai_high_price_target", ["oai_high_price_target"], unique=False
54
+ )
55
+ batch_op.create_index(
56
+ "ix_analysis_oai_low_price_target", ["oai_low_price_target"], unique=False
57
+ )
58
+ batch_op.create_index(
59
+ "ix_analysis_oai_news_date", ["oai_news_date"], unique=False
60
+ )
61
+ batch_op.create_index(
62
+ "ix_analysis_oai_recent_news", ["oai_recent_news"], unique=False
63
+ )
64
+ batch_op.create_index(
65
+ "ix_analysis_oai_recommendation", ["oai_recommendation"], unique=False
66
+ )
67
+
68
+ # ### end Alembic commands ###
69
+
70
+
71
+ def downgrade() -> None:
72
+ # ### commands auto generated by Alembic - please adjust! ###
73
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
74
+ batch_op.drop_index("ix_analysis_oai_recommendation")
75
+ batch_op.drop_index("ix_analysis_oai_recent_news")
76
+ batch_op.drop_index("ix_analysis_oai_news_date")
77
+ batch_op.drop_index("ix_analysis_oai_low_price_target")
78
+ batch_op.drop_index("ix_analysis_oai_high_price_target")
79
+ batch_op.drop_index("ix_analysis_oai_explanation")
80
+ batch_op.drop_column("oai_explanation")
81
+ batch_op.drop_column("oai_recommendation")
82
+ batch_op.drop_column("oai_recent_news")
83
+ batch_op.drop_column("oai_news_date")
84
+ batch_op.drop_column("oai_low_price_target")
85
+ batch_op.drop_column("oai_high_price_target")
86
+
87
+ # ### end Alembic commands ###
@@ -0,0 +1,43 @@
1
+ """
2
+
3
+ Revision ID: cc28171c21a4
4
+ Revises: 260fcff7212e
5
+ Create Date: 2025-08-15 17:04:59.467407
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "cc28171c21a4"
17
+ down_revision: Union[str, None] = "260fcff7212e"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
25
+ batch_op.add_column(sa.Column("adx_14", sa.Date(), nullable=True))
26
+ batch_op.add_column(sa.Column("adx_14_overbought", sa.Date(), nullable=True))
27
+ batch_op.create_index("ix_analysis_adx_14", ["adx_14"], unique=False)
28
+ batch_op.create_index(
29
+ "ix_analysis_adx_14_overbought", ["adx_14_overbought"], unique=False
30
+ )
31
+
32
+ # ### end Alembic commands ###
33
+
34
+
35
+ def downgrade() -> None:
36
+ # ### commands auto generated by Alembic - please adjust! ###
37
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
38
+ batch_op.drop_index("ix_analysis_adx_14_overbought")
39
+ batch_op.drop_index("ix_analysis_adx_14")
40
+ batch_op.drop_column("adx_14_overbought")
41
+ batch_op.drop_column("adx_14")
42
+
43
+ # ### end Alembic commands ###
bullish/database/crud.py CHANGED
@@ -3,7 +3,7 @@ import logging
3
3
  from datetime import date
4
4
  from functools import cached_property
5
5
  from pathlib import Path
6
- from typing import TYPE_CHECKING, Any, List, Optional
6
+ from typing import TYPE_CHECKING, Any, List, Optional, Dict
7
7
 
8
8
  import pandas as pd
9
9
  from bearish.database.crud import BearishDb # type: ignore
@@ -22,6 +22,7 @@ from bullish.analysis.constants import Industry, IndustryGroup, Sector, Country
22
22
  from bullish.analysis.filter import FilteredResults
23
23
  from bullish.analysis.indicators import SignalSeries
24
24
  from bullish.analysis.industry_views import Type, IndustryView
25
+
25
26
  from bullish.database.schemas import (
26
27
  AnalysisORM,
27
28
  JobTrackerORM,
@@ -29,6 +30,7 @@ from bullish.database.schemas import (
29
30
  IndustryViewORM,
30
31
  SignalSeriesORM,
31
32
  BacktestResultORM,
33
+ OpenAINewsORM,
32
34
  )
33
35
  from bullish.database.scripts.upgrade import upgrade
34
36
  from bullish.exceptions import DatabaseFileNotFoundError
@@ -38,6 +40,7 @@ from tickermood.database.scripts.upgrade import upgrade as tickermood_upgrade #
38
40
 
39
41
  if TYPE_CHECKING:
40
42
  from bullish.analysis.backtest import BacktestResult, BacktestResultQuery
43
+ from bullish.analysis.openai import OpenAINews
41
44
 
42
45
  logger = logging.getLogger(__name__)
43
46
 
@@ -358,11 +361,52 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
358
361
  LIMIT 1
359
362
  """
360
363
  )
364
+ sql_oai = text(
365
+ """
366
+ SELECT *
367
+ FROM openai
368
+ WHERE symbol = :symbol
369
+ ORDER BY news_date DESC
370
+ LIMIT 1
371
+ """
372
+ )
361
373
 
362
374
  with Session(self._engine) as session:
363
375
  row = session.execute(sql, {"symbol": symbol}).mappings().one_or_none()
376
+ row_oai = (
377
+ session.execute(sql_oai, {"symbol": symbol}).mappings().one_or_none()
378
+ )
379
+ row_dict = {}
364
380
  if row:
365
381
  row_dict = dict(row)
366
382
  row_dict = row_dict | {"news_date": row_dict["date"]}
367
- return SubjectAnalysis.model_validate(row_dict)
368
- return None
383
+ if row_oai:
384
+ row_dict_oai = dict(row_oai)
385
+ row_dict = row_dict | {
386
+ "oai_news_date": row_dict_oai.get("news_date"),
387
+ "oai_recent_news": row_dict_oai.get("recent_news"),
388
+ "oai_recommendation": row_dict_oai.get("recommendation"),
389
+ "oai_explanation": row_dict_oai.get("explanation"),
390
+ "oai_high_price_target": row_dict_oai.get("high_price_target"),
391
+ "oai_low_price_target": row_dict_oai.get("low_price_target"),
392
+ }
393
+
394
+ return SubjectAnalysis.model_validate(row_dict)
395
+
396
+ def write_many_openai_news(self, openai_news: List["OpenAINews"]) -> None:
397
+ with Session(self._engine) as session:
398
+ stmt = (
399
+ insert(OpenAINewsORM)
400
+ .prefix_with("OR REPLACE")
401
+ .values([a.model_dump() for a in openai_news])
402
+ )
403
+ session.exec(stmt) # type: ignore
404
+ session.commit()
405
+
406
+ def update_analysis(self, symbol: str, fields: Dict[str, Any]) -> None:
407
+ with Session(self._engine) as session:
408
+ stmt = (
409
+ update(AnalysisORM).where(AnalysisORM.symbol == symbol).values(**fields) # type: ignore
410
+ )
411
+ session.exec(stmt) # type: ignore
412
+ session.commit()
@@ -1,3 +1,4 @@
1
+ from datetime import date
1
2
  from typing import Dict, Any, List, Optional
2
3
 
3
4
  from sqlmodel import Field, SQLModel
@@ -7,6 +8,7 @@ from bullish.analysis.backtest import BacktestResult
7
8
  from bullish.analysis.filter import FilteredResults
8
9
  from bullish.analysis.indicators import SignalSeries
9
10
  from bullish.analysis.industry_views import IndustryView
11
+ from bullish.analysis.openai import OpenAINews
10
12
 
11
13
  from bullish.jobs.models import JobTracker
12
14
  from sqlalchemy import Index
@@ -22,6 +24,13 @@ dynamic_indexes = tuple(
22
24
  )
23
25
 
24
26
 
27
+ class OpenAINewsORM(SQLModel, OpenAINews, table=True):
28
+ __tablename__ = "openai"
29
+ __table_args__ = {"extend_existing": True} # noqa:RUF012
30
+ symbol: str = Field(primary_key=True)
31
+ news_date: date = Field(primary_key=True)
32
+
33
+
25
34
  class AnalysisORM(BaseTable, Analysis, table=True):
26
35
  __tablename__ = "analysis"
27
36
  __table_args__ = {"extend_existing": True} # noqa:RUF012
@@ -5,7 +5,10 @@ import pandas as pd
5
5
  import plotly.graph_objects as go
6
6
  from plotly.subplots import make_subplots
7
7
 
8
- from bullish.analysis.functions import add_indicators
8
+ from bullish.analysis.functions import (
9
+ add_indicators,
10
+ support_resistance,
11
+ )
9
12
  from datetime import date
10
13
 
11
14
 
@@ -17,6 +20,8 @@ def plot(
17
20
  industry_data: Optional[pd.DataFrame] = None,
18
21
  ) -> go.Figure:
19
22
  data = add_indicators(data)
23
+ supports = support_resistance(data)
24
+
20
25
  fig = make_subplots(
21
26
  rows=7,
22
27
  cols=1,
@@ -36,8 +41,8 @@ def plot(
36
41
  f"RSI ({symbol} [{name}])",
37
42
  f"MACD ({symbol} [{name}])",
38
43
  f"ADX ({symbol} [{name}])",
39
- f"OBV ({symbol} [{name}])",
40
- f"Industry ({symbol} [{name}])",
44
+ f"ATR ({symbol} [{name}])",
45
+ f"ADOSC ({symbol} [{name}])",
41
46
  ),
42
47
  )
43
48
  # Row 1: Candlestick + SMAs
@@ -114,28 +119,15 @@ def plot(
114
119
  col=1,
115
120
  )
116
121
  fig.add_trace(
117
- go.Scatter(x=data.index, y=data.OBV, name="OBV", mode="lines"),
122
+ go.Scatter(x=data.index, y=data.ATR, name="ATR", mode="lines"),
118
123
  row=6,
119
124
  col=1,
120
125
  )
121
126
  fig.add_trace(
122
127
  go.Scatter(x=data.index, y=data.ADOSC, name="ADOSC", mode="lines"),
123
- row=6,
128
+ row=7,
124
129
  col=1,
125
130
  )
126
- if industry_data is not None and not industry_data.empty:
127
- for c in industry_data.columns:
128
- fig.add_trace(
129
- go.Scatter(
130
- x=industry_data.index,
131
- y=industry_data[c],
132
- name=c,
133
- mode="lines",
134
- opacity=0.5 if c != "symbol" else 1.0,
135
- ),
136
- row=7,
137
- col=1,
138
- )
139
131
 
140
132
  if dates is not None and dates:
141
133
  for date in dates:
@@ -166,5 +158,23 @@ def plot(
166
158
  fig.add_hline(y=70, line_dash="dash", line_color="red", row=3, col=1)
167
159
  fig.add_hline(y=30, line_dash="dash", line_color="green", row=3, col=1)
168
160
  fig.add_hline(y=25, line_dash="dash", line_color="red", row=5, col=1)
161
+ fig.add_hline(
162
+ y=supports.support.value,
163
+ line_dash="dash",
164
+ line_color="rgba(26, 188, 156, 1)", # teal, fully opaque
165
+ annotation_text=f"Support ({supports.support.value:.2f})",
166
+ line_width=0.75,
167
+ row=1,
168
+ col=1,
169
+ )
170
+ fig.add_hline(
171
+ y=supports.resistance.value,
172
+ line_dash="dash",
173
+ line_color="rgba(230, 126, 34, 1)", # orange, fully opaque
174
+ annotation_text=f"Resistance ({supports.resistance.value:.2f})",
175
+ line_width=0.75,
176
+ row=1,
177
+ col=1,
178
+ )
169
179
 
170
180
  return fig
@@ -1,7 +1,7 @@
1
1
  import abc
2
2
  import logging
3
3
  from datetime import date
4
- from typing import List, Optional
4
+ from typing import List, Optional, Dict, Any
5
5
 
6
6
  import pandas as pd
7
7
  from bearish.interface.interface import BearishDbBase # type: ignore
@@ -15,6 +15,7 @@ from bullish.analysis.constants import Industry, Sector, IndustryGroup, Country
15
15
  from bullish.analysis.filter import FilterQuery, FilteredResults
16
16
  from bullish.analysis.indicators import SignalSeries
17
17
  from bullish.analysis.industry_views import Type, IndustryView
18
+ from bullish.analysis.openai import OpenAINews
18
19
  from bullish.jobs.models import JobTracker, JobTrackerStatus, add_icons
19
20
 
20
21
  logger = logging.getLogger(__name__)
@@ -155,3 +156,8 @@ class BullishDbBase(BearishDbBase): # type: ignore
155
156
 
156
157
  @abc.abstractmethod
157
158
  def read_subject(self, symbol: str) -> Optional[SubjectAnalysis]: ...
159
+ @abc.abstractmethod
160
+ def write_many_openai_news(self, openai_news: List[OpenAINews]) -> None: ...
161
+
162
+ @abc.abstractmethod
163
+ def update_analysis(self, symbol: str, fields: Dict[str, Any]) -> None: ...
bullish/jobs/tasks.py CHANGED
@@ -15,6 +15,7 @@ from .models import JobTrackerStatus, JobTracker, JobType
15
15
  from ..analysis.analysis import run_analysis, run_signal_series_analysis
16
16
  from ..analysis.backtest import run_many_tests, BackTestConfig
17
17
  from ..analysis.industry_views import compute_industry_view
18
+ from ..analysis.openai import get_open_ai_news
18
19
  from ..analysis.predefined_filters import predefined_filters, load_custom_filters
19
20
  from ..database.crud import BullishDb
20
21
  from bullish.analysis.filter import FilterUpdate
@@ -186,6 +187,23 @@ def news(
186
187
  headless: bool = True,
187
188
  task: Optional[Task] = None,
188
189
  ) -> None:
190
+ bullish_db = BullishDb(database_path=database_path)
191
+ if get_open_ai_news(bullish_db, symbols):
192
+ for symbol in symbols:
193
+ subject = bullish_db.read_subject(symbol)
194
+ if subject:
195
+ logger.debug(
196
+ f"extracting news for {symbol} subject: {subject.model_dump()}"
197
+ )
198
+ bullish_db.update_analysis(
199
+ symbol,
200
+ subject.model_dump(
201
+ exclude_none=True,
202
+ exclude_unset=True,
203
+ exclude_defaults=True,
204
+ exclude={"symbol"},
205
+ ),
206
+ )
189
207
  base_news(
190
208
  database_path=database_path,
191
209
  job_type=job_type,
@@ -195,7 +213,7 @@ def news(
195
213
  )
196
214
 
197
215
 
198
- @huey.periodic_task(crontab(minute="0", hour="3"), context=True) # type: ignore
216
+ @huey.periodic_task(crontab(minute="0", hour="8"), context=True) # type: ignore
199
217
  def cron_news(
200
218
  task: Optional[Task] = None,
201
219
  ) -> None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: bullishpy
3
- Version: 0.66.0
3
+ Version: 0.68.0
4
4
  Summary:
5
5
  Author: aan
6
6
  Author-email: andoludovic.andriamamonjy@gmail.com
@@ -1,15 +1,16 @@
1
1
  bullish/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  bullish/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- bullish/analysis/analysis.py,sha256=Bcupt-qROPddj1hGTNAY8vhu0pnFqNvXoDtUNhRXErY,24217
3
+ bullish/analysis/analysis.py,sha256=SVAWg0a3-B3mtDwEDatTtjxie_EhH37l02O7gXMvxOw,24614
4
4
  bullish/analysis/backtest.py,sha256=x91ek5kOzJHvYq0TmJh1Q8wBDDduIaieE0zDaoZFXew,14325
5
5
  bullish/analysis/constants.py,sha256=j3vQwjGhY-4dEEV-TkeKMDUTo2GM7M97Hcpi19LDcFQ,11458
6
- bullish/analysis/filter.py,sha256=DB6uqPDdHbeaBri43uJvc-RkSEjNsnpyyW0trtHPcUo,9327
7
- bullish/analysis/functions.py,sha256=CuMgOjpQeg4KsDMUBdHRlxL1dRlos16KRyLhQe8PYUQ,14819
8
- bullish/analysis/indicators.py,sha256=Seig6LaY6qr8QhgWF_qKNR68GSer9HN0VZIkAMfdUXc,27649
6
+ bullish/analysis/filter.py,sha256=VvQALnYNyYylXkorYR3oGhsF4L_sAUSE7-aop4Trp9o,9326
7
+ bullish/analysis/functions.py,sha256=lrbPvTo3GLtylDCfeIKoXCKF5gaY5QFFToNqtuj7xhI,15794
8
+ bullish/analysis/indicators.py,sha256=kdjDVhIFiDBhezJJg9ifGheMC6oCR0gC87d_FiW_tjI,28183
9
9
  bullish/analysis/industry_views.py,sha256=-B4CCAYz2arGQtWTXLLMpox0loO_MGdVQd2ycCRMOQQ,6799
10
- bullish/analysis/predefined_filters.py,sha256=zIp1po5SOIwqfEbzmrdV6frfxUAb7RZ8o5HFyoyoGqQ,11512
10
+ bullish/analysis/openai.py,sha256=RK-1GbuBUKY1jR1S9PE4w6zMp7l_sY9BlSh9bLT8HbI,3313
11
+ bullish/analysis/predefined_filters.py,sha256=E65qrTSaDFuUxoaeZ8D72K5AobumobpQdpcTIF308D4,14053
11
12
  bullish/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
- bullish/app/app.py,sha256=7hWVVd2jBM-Es9S904ck1mtIMSadWgFqwns0bTwrKOU,16720
13
+ bullish/app/app.py,sha256=dnTzlyKrG2XTKDeLnJwfdvIf24eXM8fd29bvJWmMr8k,17190
13
14
  bullish/cli.py,sha256=yYqiEQAvOIQ-pTn77RPuE449gwaEGBeQwNHHAJ5yQDM,2739
14
15
  bullish/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
16
  bullish/database/alembic/README,sha256=heMzebYwlGhnE8_4CWJ4LS74WoEZjBy-S-mIJRxAEKI,39
@@ -31,33 +32,36 @@ bullish/database/alembic/versions/6d252e23f543_.py,sha256=izF-ejdXk733INkAokGqjA
31
32
  bullish/database/alembic/versions/73564b60fe24_.py,sha256=MTlDRDNHj3E9gK7IMeAzv2UxxxYtWiu3gI_9xTLE-wg,1008
32
33
  bullish/database/alembic/versions/79bc71ec6f9e_.py,sha256=4nShut2NEd1F3piSckIIBtke0GEsFAxYw5TZl5YYRzc,1140
33
34
  bullish/database/alembic/versions/ae444f338124_.py,sha256=u8RphcniLCQce-HvN666QgCJpLsv6A91-a4R-Nif4bU,3672
35
+ bullish/database/alembic/versions/b36c310f49ec_.py,sha256=L0B3wyo9i0R14_H5fcDAxAm_5P1zIFsHUY888Do-pbI,1379
34
36
  bullish/database/alembic/versions/b76079e9845f_.py,sha256=W8eeTABjI9tT1dp3hlK7g7tiKqDhmA8AoUX9Sw-ykLI,1165
35
37
  bullish/database/alembic/versions/bf6b86dd5463_.py,sha256=fKB8knCprGmiL6AEyFdhybVmB7QX_W4MPFF9sPzUrSM,1094
38
+ bullish/database/alembic/versions/c828e29e1105_.py,sha256=rO9qwNay8HohSVHIJgYq7VWhtgn-jpF10h98WCu-wjU,3052
39
+ bullish/database/alembic/versions/cc28171c21a4_.py,sha256=ZsHFzqo6cfTXDodxaXRzkoKl0zK2TR15nD4SJeDlRi0,1401
36
40
  bullish/database/alembic/versions/d0e58e050845_.py,sha256=x_LS3J27FNyy_WD99uvZzNehly-jpgn9abOYN-VjjZc,1164
37
41
  bullish/database/alembic/versions/d663166c531d_.py,sha256=U92l6QXqPniAYrPeu2Bt77ReDbXveLj4aGXtgd806JY,1915
38
42
  bullish/database/alembic/versions/ec25c8fa449f_.py,sha256=8Yts74KEjK4jg20zIo90_0atw-sOBuE3hgCKl-rfS5E,2271
39
43
  bullish/database/alembic/versions/ee5baabb35f8_.py,sha256=nBMEY-_C8AsSXVPyaDdUkwrFFo2gxShzJhmrjejDwtc,1632
40
44
  bullish/database/alembic/versions/fc191121f522_.py,sha256=0sstF6TpAJ09-Mt-Vek9SdSWksvi4C58a5D92rBtuY8,1894
41
45
  bullish/database/alembic/versions/ff0cc4ba40ec_.py,sha256=74lxga54ig_LoNZYK9toJL9iRwGbNRezh1zvO1YI40U,2719
42
- bullish/database/crud.py,sha256=69dq-vvhPQI3aopGIwaBSowBW37EGUnN0f7olVbOmEM,14180
43
- bullish/database/schemas.py,sha256=fQ4RZeOjlFoIor7rjwpisbHRNDd7-zbyDdzNKaiNGQQ,3637
46
+ bullish/database/crud.py,sha256=5fcnOQftnf1YtRA_ndcKOihyO0sREnu_vLyC-_6Quik,15888
47
+ bullish/database/schemas.py,sha256=HudFJ9lsIkVaEYjQUWammrsDnYSmEe4hOCbim3dN_4A,3946
44
48
  bullish/database/scripts/create_revision.py,sha256=rggIf-3koPqJNth8FIg89EOfnIM7a9QrvL8X7UJsP0g,628
45
49
  bullish/database/scripts/stamp.py,sha256=PWgVUEBumjNUMjTnGw46qmU3p221LeN-KspnW_gFuu4,839
46
50
  bullish/database/scripts/upgrade.py,sha256=-Gz7aFNPEt9y9e1kltqXE76-j_8QeNtet_VlwY5AWjo,806
47
51
  bullish/database/settings.py,sha256=nMudufmF7iC_62_PHrGSMjlqDLN2I0qTbtz9JKZHSko,164
48
52
  bullish/exceptions.py,sha256=4z_i-dD-CDz1bkGmZH9DOf1L_awlCPCgdUDPF7dhWAI,106
49
53
  bullish/figures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
- bullish/figures/figures.py,sha256=EpJQOiSqSp7cHvZoGlZrF6UVpyv-fFyDApAfskqdUkU,4562
54
+ bullish/figures/figures.py,sha256=aeMAZGr8HkcF6CIf8ed4cnxJ1YkOY2-euP5egwm0ELk,4750
51
55
  bullish/interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
52
- bullish/interface/interface.py,sha256=R2qVEMyBl9mBlPUO40zXp4vhfLKH7pgl_u2BmAVlD4w,5250
56
+ bullish/interface/interface.py,sha256=6uZAY19WNtDRKdOitqzqMEo6JTep2M3HC8iFUKYntHA,5518
53
57
  bullish/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
54
58
  bullish/jobs/app.py,sha256=5MJ5KXUo7JSNAvOPgkpIMasD11VTrjQvGzM7vmCY65E,77
55
59
  bullish/jobs/models.py,sha256=rBXxtGFBpgZprrxq5_X2Df-bh8BLYEfw-VLMRucrqa8,784
56
- bullish/jobs/tasks.py,sha256=7_zKZaLpbmh7XxvjhfWcowdDAp9sEABULB2PSkasfbM,6509
60
+ bullish/jobs/tasks.py,sha256=13oK53fBXd5pjMLLMeOoQ4vG-yH-6dfrodhb9KzAYVw,7230
57
61
  bullish/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
62
  bullish/utils/checks.py,sha256=g-5QXNWNe1_BwHKrc2PtvPiLraL0tqGgxnzG7u-Wkgo,2189
59
- bullishpy-0.66.0.dist-info/LICENSE,sha256=nYb7AJFegu6ndlQhbbk54MjT-GH-0x9RF6Ls-ggJ_g4,1075
60
- bullishpy-0.66.0.dist-info/METADATA,sha256=tR47cS9eMeBPdoTLVQ7y99ZvPtMAl8lTpxKLnL3faKk,3009
61
- bullishpy-0.66.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
62
- bullishpy-0.66.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
63
- bullishpy-0.66.0.dist-info/RECORD,,
63
+ bullishpy-0.68.0.dist-info/LICENSE,sha256=nYb7AJFegu6ndlQhbbk54MjT-GH-0x9RF6Ls-ggJ_g4,1075
64
+ bullishpy-0.68.0.dist-info/METADATA,sha256=VSQAgksxrpSo6pc0TdVZoL7WAbZgPS2n-a4ShDUxX64,3009
65
+ bullishpy-0.68.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
66
+ bullishpy-0.68.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
67
+ bullishpy-0.68.0.dist-info/RECORD,,