bullishpy 0.67.0__tar.gz → 0.68.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

Files changed (66) hide show
  1. {bullishpy-0.67.0 → bullishpy-0.68.0}/PKG-INFO +1 -1
  2. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/analysis/analysis.py +13 -1
  3. bullishpy-0.68.0/bullish/analysis/openai.py +89 -0
  4. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/analysis/predefined_filters.py +83 -31
  5. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/app/app.py +7 -1
  6. bullishpy-0.68.0/bullish/database/alembic/versions/b36c310f49ec_.py +43 -0
  7. bullishpy-0.68.0/bullish/database/alembic/versions/c828e29e1105_.py +87 -0
  8. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/crud.py +47 -3
  9. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/schemas.py +9 -0
  10. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/interface/interface.py +7 -1
  11. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/jobs/tasks.py +19 -1
  12. {bullishpy-0.67.0 → bullishpy-0.68.0}/pyproject.toml +1 -1
  13. {bullishpy-0.67.0 → bullishpy-0.68.0}/LICENSE +0 -0
  14. {bullishpy-0.67.0 → bullishpy-0.68.0}/README.md +0 -0
  15. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/__init__.py +0 -0
  16. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/analysis/__init__.py +0 -0
  17. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/analysis/backtest.py +0 -0
  18. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/analysis/constants.py +0 -0
  19. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/analysis/filter.py +0 -0
  20. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/analysis/functions.py +0 -0
  21. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/analysis/indicators.py +0 -0
  22. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/analysis/industry_views.py +0 -0
  23. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/app/__init__.py +0 -0
  24. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/cli.py +0 -0
  25. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/__init__.py +0 -0
  26. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/README +0 -0
  27. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/alembic.ini +0 -0
  28. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/env.py +0 -0
  29. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/script.py.mako +0 -0
  30. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/037dbd721317_.py +0 -0
  31. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/040b15fba458_.py +0 -0
  32. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/08ac1116e055_.py +0 -0
  33. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/11d35a452b40_.py +0 -0
  34. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/12889a2cbd7d_.py +0 -0
  35. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/17e51420e7ad_.py +0 -0
  36. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/260fcff7212e_.py +0 -0
  37. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/49c83f9eb5ac_.py +0 -0
  38. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/4b0a2f40b7d3_.py +0 -0
  39. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/4ee82b171449_.py +0 -0
  40. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/5b10ee7604c1_.py +0 -0
  41. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/6d252e23f543_.py +0 -0
  42. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/73564b60fe24_.py +0 -0
  43. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/79bc71ec6f9e_.py +0 -0
  44. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/ae444f338124_.py +0 -0
  45. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/b76079e9845f_.py +0 -0
  46. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/bf6b86dd5463_.py +0 -0
  47. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/cc28171c21a4_.py +0 -0
  48. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/d0e58e050845_.py +0 -0
  49. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/d663166c531d_.py +0 -0
  50. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/ec25c8fa449f_.py +0 -0
  51. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/ee5baabb35f8_.py +0 -0
  52. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/fc191121f522_.py +0 -0
  53. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/alembic/versions/ff0cc4ba40ec_.py +0 -0
  54. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/scripts/create_revision.py +0 -0
  55. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/scripts/stamp.py +0 -0
  56. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/scripts/upgrade.py +0 -0
  57. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/database/settings.py +0 -0
  58. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/exceptions.py +0 -0
  59. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/figures/__init__.py +0 -0
  60. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/figures/figures.py +0 -0
  61. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/interface/__init__.py +0 -0
  62. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/jobs/__init__.py +0 -0
  63. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/jobs/app.py +0 -0
  64. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/jobs/models.py +0 -0
  65. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/utils/__init__.py +0 -0
  66. {bullishpy-0.67.0 → bullishpy-0.68.0}/bullish/utils/checks.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: bullishpy
3
- Version: 0.67.0
3
+ Version: 0.68.0
4
4
  Summary:
5
5
  Author: aan
6
6
  Author-email: andoludovic.andriamamonjy@gmail.com
@@ -528,6 +528,13 @@ class SubjectAnalysis(BaseModel):
528
528
  summary: Annotated[Optional[Dict[str, Any]], BeforeValidator(json_loads)] = None
529
529
  upside: Optional[float] = None
530
530
 
531
+ oai_high_price_target: Optional[float] = None
532
+ oai_low_price_target: Optional[float] = None
533
+ oai_news_date: Optional[datetime] = None
534
+ oai_recent_news: Optional[str] = None
535
+ oai_recommendation: Optional[str] = None
536
+ oai_explanation: Optional[str] = None
537
+
531
538
  def compute_upside(self, last_price: float) -> None:
532
539
  if self.high_price_target is not None:
533
540
  self.upside = (
@@ -541,12 +548,17 @@ class SubjectAnalysis(BaseModel):
541
548
  return None
542
549
  return "".join(
543
550
  [
544
- f"<p>{scrub(t.get('content').replace("\n",""))}</p>" # type: ignore
551
+ f"<p>{t.get('content').replace("\n","")}</p>" # type: ignore
545
552
  for t in self.news_summary
546
553
  if t.get("content")
547
554
  ]
548
555
  )
549
556
 
557
+ def to_date(self) -> Optional[date]:
558
+ if self.news_date:
559
+ return self.news_date.date()
560
+ return None
561
+
550
562
 
551
563
  class Analysis(SubjectAnalysis, AnalysisEarningsDate, AnalysisView, BaseEquity, TechnicalAnalysis, FundamentalAnalysis): # type: ignore
552
564
 
@@ -0,0 +1,89 @@
1
+ import json
2
+ import logging
3
+ import os
4
+ from datetime import date
5
+ from typing import Optional, List, TYPE_CHECKING
6
+
7
+ from pydantic import BaseModel, Field
8
+ from openai import OpenAI
9
+
10
+ if TYPE_CHECKING:
11
+ from bullish.database.crud import BullishDb
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def prompt(ticker: str) -> str:
17
+ return f"""
18
+ You are a financial analysis assistant.
19
+
20
+ Using the latest reliable public data from the web — including analyst price targets from multiple reputable
21
+ sources — analyze the stock ticker {ticker}.
22
+
23
+ Return ONLY valid JSON matching EXACTLY the schema below — no explanations, no preamble, no markdown, no code
24
+ fences, no extra text:
25
+
26
+ {{
27
+ "high_price_target": float, // Analyst consensus high price target in USD (based on multiple sources)
28
+ "low_price_target": float, // Analyst consensus low price target in USD (based on multiple sources)
29
+ "recent_news": str, // Detailed, multi-sentence summary of recent news affecting the company;
30
+ include credible source names inline
31
+ "recommendation": str, // One of: "Strong Buy", "Buy", "Hold", "Sell", "Strong Sell"
32
+ "explanation": str // Concise explanation for the recommendation above, covering key pros/cons
33
+ for investors
34
+ }}
35
+
36
+ Formatting rules:
37
+ - Output must be a single valid JSON object with no surrounding text or formatting.
38
+ - Use plain numbers for high_price_target and low_price_target (no currency symbols, no commas).
39
+ - All text fields must be professional, investor-oriented, and reference credible named sources in `recent_news`.
40
+ - If exact data is unavailable, estimate based on web search results and note uncertainty in the relevant field.
41
+ """
42
+
43
+
44
+ class OpenAINews(BaseModel):
45
+ symbol: str
46
+ news_date: date = Field(default_factory=date.today)
47
+ high_price_target: Optional[float] = None
48
+ low_price_target: Optional[float] = None
49
+ recent_news: Optional[str] = None
50
+ recommendation: Optional[str] = None
51
+ explanation: Optional[str] = None
52
+
53
+ def valid(self) -> bool:
54
+ return bool(
55
+ self.model_dump(
56
+ exclude_none=True,
57
+ exclude_unset=True,
58
+ exclude_defaults=True,
59
+ exclude={"symbol"},
60
+ )
61
+ )
62
+
63
+ @classmethod
64
+ def from_ticker(cls, ticker: str) -> "OpenAINews":
65
+ if "OPENAI_API_KEY" not in os.environ:
66
+ return cls(symbol=ticker)
67
+ print(f"Fetching OpenAI news for {ticker}...")
68
+ client = OpenAI()
69
+ resp = client.responses.create(
70
+ model="gpt-4o-mini", input=prompt(ticker), tools=[{"type": "web_search"}] # type: ignore
71
+ )
72
+ try:
73
+ return cls.model_validate(json.loads(resp.output_text) | {"symbol": ticker})
74
+ except Exception as e:
75
+ logger.error(f"Failed to parse OpenAI response for {ticker}: {e}")
76
+ return cls(symbol=ticker)
77
+
78
+ @classmethod
79
+ def from_tickers(cls, tickers: List[str]) -> List["OpenAINews"]:
80
+ return [cls.from_ticker(t) for t in tickers]
81
+
82
+
83
+ def get_open_ai_news(bullish_db: "BullishDb", tickers: List[str]) -> bool:
84
+ news = OpenAINews.from_tickers(tickers)
85
+ valid_news = [n for n in news if n.valid()]
86
+ if valid_news:
87
+ bullish_db.write_many_openai_news(valid_news)
88
+ return True
89
+ return False
@@ -135,17 +135,29 @@ class NamedFilterQuery(FilterQuery):
135
135
  self.model_dump() | {"name": f"{self.name} ({suffix})", **properties}
136
136
  )
137
137
 
138
- def top_performers(self) -> "NamedFilterQuery":
138
+ def week_top_performers(self) -> "NamedFilterQuery":
139
+ properties = {
140
+ "volume_above_average": DATE_THRESHOLD,
141
+ "weekly_growth": [1, 100],
142
+ }
143
+ return self._custom_variant("Week Top Performers", properties)
144
+
145
+ def month_top_performers(self) -> "NamedFilterQuery":
146
+ properties = {
147
+ "monthly_growth": [8, 100],
148
+ }
149
+ return self._custom_variant("Month Top Performers", properties)
150
+
151
+ def year_top_performers(self) -> "NamedFilterQuery":
139
152
  properties = {
140
153
  "volume_above_average": DATE_THRESHOLD,
141
154
  "sma_50_above_sma_200": [
142
155
  datetime.date.today() - datetime.timedelta(days=5000),
143
156
  datetime.date.today(),
144
157
  ],
145
- "weekly_growth": [1, 100],
146
- "monthly_growth": [8, 100],
158
+ "yearly_growth": [30, 100],
147
159
  }
148
- return self._custom_variant("Top Performers", properties)
160
+ return self._custom_variant("Yearly Top Performers", properties)
149
161
 
150
162
  def poor_performers(self) -> "NamedFilterQuery":
151
163
  properties = {
@@ -161,7 +173,7 @@ class NamedFilterQuery(FilterQuery):
161
173
  }
162
174
  return self._custom_variant("Poor Performers", properties)
163
175
 
164
- def fundamentals(self) -> "NamedFilterQuery":
176
+ def yearly_fundamentals(self) -> "NamedFilterQuery":
165
177
  properties = {
166
178
  "income": [
167
179
  "positive_operating_income",
@@ -170,20 +182,63 @@ class NamedFilterQuery(FilterQuery):
170
182
  "growing_operating_income",
171
183
  ],
172
184
  "cash_flow": ["positive_free_cash_flow", "growing_operating_cash_flow"],
185
+ "properties": [
186
+ "positive_return_on_equity",
187
+ "operating_cash_flow_is_higher_than_net_income",
188
+ ],
189
+ }
190
+ return self._custom_variant("Yearly Fundamentals", properties)
191
+
192
+ def quarterly_fundamentals(self) -> "NamedFilterQuery":
193
+ properties = {
194
+ "income": [
195
+ "quarterly_positive_operating_income",
196
+ "quarterly_positive_net_income",
197
+ ],
198
+ "cash_flow": [
199
+ "quarterly_positive_free_cash_flow",
200
+ ],
201
+ "properties": [
202
+ "quarterly_operating_cash_flow_is_higher_than_net_income",
203
+ ],
204
+ }
205
+ return self._custom_variant("Quarterly Fundamentals", properties)
206
+
207
+ def growing_quarterly_fundamentals(self) -> "NamedFilterQuery":
208
+ properties = {
209
+ "income": [
210
+ "quarterly_positive_operating_income",
211
+ "quarterly_positive_net_income",
212
+ "quarterly_growing_net_income",
213
+ ],
214
+ "cash_flow": [
215
+ "quarterly_positive_free_cash_flow",
216
+ "quarterly_growing_operating_cash_flow",
217
+ ],
218
+ "properties": [
219
+ "quarterly_operating_cash_flow_is_higher_than_net_income",
220
+ ],
221
+ }
222
+ return self._custom_variant("Growing Quarterly Fundamentals", properties)
223
+
224
+ def min_fundamentals(self) -> "NamedFilterQuery":
225
+ properties = {
226
+ "income": [
227
+ "positive_operating_income",
228
+ "positive_net_income",
229
+ ],
230
+ "cash_flow": [
231
+ "positive_free_cash_flow",
232
+ ],
173
233
  "eps": [
174
- "growing_basic_eps",
175
- "growing_diluted_eps",
176
- "positive_basic_eps",
177
- "positive_diluted_eps",
234
+ "positive_diluted_eps", # or positive_basic_eps if diluted not available
178
235
  ],
179
236
  "properties": [
180
- "positive_return_on_assets",
181
237
  "positive_return_on_equity",
182
- "positive_debt_to_equity",
183
238
  "operating_cash_flow_is_higher_than_net_income",
184
239
  ],
185
240
  }
186
- return self._custom_variant("Fundamentals", properties)
241
+ return self._custom_variant("Min Fundamentals", properties)
187
242
 
188
243
  def high_growth(self) -> "NamedFilterQuery":
189
244
  properties = {"industry": list(get_args(HighGrowthIndustry))}
@@ -253,7 +308,7 @@ class NamedFilterQuery(FilterQuery):
253
308
  filter__ = getattr(filter__, attr)()
254
309
  filters_.append(filter__)
255
310
 
256
- return filters_
311
+ return [self, *filters_]
257
312
 
258
313
 
259
314
  def load_custom_filters() -> List[NamedFilterQuery]:
@@ -285,10 +340,10 @@ SMALL_CAP = NamedFilterQuery(
285
340
  order_by_desc="market_capitalization",
286
341
  ).variants(
287
342
  variants=[
288
- ["europe", "top_performers", "fundamentals"],
289
- ["us", "top_performers", "fundamentals"],
290
- ["europe", "earnings_date"],
291
- ["us", "earnings_date"],
343
+ ["week_top_performers", "min_fundamentals"],
344
+ ["month_top_performers", "min_fundamentals"],
345
+ ["earnings_date", "min_fundamentals"],
346
+ ["rsi_oversold_", "min_fundamentals"],
292
347
  ]
293
348
  )
294
349
 
@@ -298,16 +353,13 @@ LARGE_CAPS = NamedFilterQuery(
298
353
  market_capitalization=[1e10, 1e14],
299
354
  ).variants(
300
355
  variants=[
301
- ["europe", "rsi_oversold_", "macd", "fundamentals"],
302
- ["us", "rsi_oversold_", "macd", "adx", "fundamentals"],
303
- ["europe", "rsi_neutral_", "macd", "adx", "fundamentals"],
304
- ["us", "rsi_neutral_", "macd", "adx", "fundamentals"],
305
- ["europe", "rsi_30", "macd", "adx", "fundamentals"],
306
- ["us", "rsi_30", "macd", "adx", "fundamentals"],
307
- ["europe", "top_performers", "cheap"],
308
- ["us", "top_performers", "cheap"],
309
- ["europe", "earnings_date"],
310
- ["us", "earnings_date"],
356
+ ["rsi_oversold_", "macd", "yearly_fundamentals"],
357
+ ["rsi_neutral_", "macd", "adx", "yearly_fundamentals"],
358
+ ["rsi_30", "macd", "adx", "yearly_fundamentals"],
359
+ ["rsi_oversold_", "macd", "quarterly_fundamentals"],
360
+ ["rsi_neutral_", "macd", "adx", "quarterly_fundamentals"],
361
+ ["rsi_30", "macd", "adx", "quarterly_fundamentals"],
362
+ ["earnings_date", "quarterly_fundamentals", "yearly_fundamentals"],
311
363
  ]
312
364
  )
313
365
 
@@ -317,10 +369,10 @@ MID_CAPS = NamedFilterQuery(
317
369
  market_capitalization=[5e8, 1e10],
318
370
  ).variants(
319
371
  variants=[
320
- ["europe", "top_performers", "fundamentals"],
321
- ["us", "top_performers", "fundamentals"],
322
- ["europe", "earnings_date"],
323
- ["us", "earnings_date"],
372
+ ["week_top_performers"],
373
+ ["month_top_performers"],
374
+ ["earnings_date", "quarterly_fundamentals", "yearly_fundamentals"],
375
+ ["rsi_oversold_", "macd", "adx"],
324
376
  ]
325
377
  )
326
378
 
@@ -290,15 +290,21 @@ def dialog_plot_figure() -> None:
290
290
  <div class="news-hover" >
291
291
  📰 <span class="label">News</span>
292
292
  <div class="tooltip">
293
- <h2>Date: {st.session_state.ticker_news.news_date.date()}</h2>
293
+ <h2>Date: {st.session_state.ticker_news.to_date()}</h2>
294
294
  <h2>Price targets</h2>
295
295
  <p>High price target: {st.session_state.ticker_news.high_price_target}</p>
296
296
  <p>Low price target: {st.session_state.ticker_news.low_price_target}</p>
297
+ <p>OpenAI High price target: {st.session_state.ticker_news.oai_high_price_target}</p>
298
+ <p>OpenAI Low price target: {st.session_state.ticker_news.oai_low_price_target}</p>
297
299
  <h2>Recommendation: {st.session_state.ticker_news.recommendation}</h2>
300
+ <h2>OpenAI Recommendation: {st.session_state.ticker_news.oai_recommendation}</h2>
298
301
  <h2>Consensus: {st.session_state.ticker_news.consensus}</h2>
299
302
  <h2>Explanation & reasons</h2>
300
303
  <p>{st.session_state.ticker_news.explanation}</p>
301
304
  <p>{st.session_state.ticker_news.reason}</p>
305
+ <p>{st.session_state.ticker_news.oai_explanation}</p>
306
+ <h2>Recent news</h2>
307
+ <p>{st.session_state.ticker_news.oai_recent_news}</p>
302
308
  <h2>News summaries</h2>
303
309
  {st.session_state.ticker_news.to_news()}
304
310
  </div>
@@ -0,0 +1,43 @@
1
+ """
2
+
3
+ Revision ID: b36c310f49ec
4
+ Revises: 260fcff7212e
5
+ Create Date: 2025-08-14 22:39:38.207093
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+ import sqlmodel
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision: str = "b36c310f49ec"
18
+ down_revision: Union[str, None] = "cc28171c21a4"
19
+ branch_labels: Union[str, Sequence[str], None] = None
20
+ depends_on: Union[str, Sequence[str], None] = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+ op.create_table(
26
+ "openai",
27
+ sa.Column("symbol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
28
+ sa.Column("news_date", sa.Date(), nullable=False),
29
+ sa.Column("high_price_target", sa.Float(), nullable=True),
30
+ sa.Column("low_price_target", sa.Float(), nullable=True),
31
+ sa.Column("recent_news", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
32
+ sa.Column("recommendation", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
33
+ sa.Column("explanation", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
34
+ sa.PrimaryKeyConstraint("symbol", "news_date"),
35
+ )
36
+
37
+ # ### end Alembic commands ###
38
+
39
+
40
+ def downgrade() -> None:
41
+ # ### commands auto generated by Alembic - please adjust! ###
42
+ op.drop_table("openai")
43
+ # ### end Alembic commands ###
@@ -0,0 +1,87 @@
1
+ """
2
+
3
+ Revision ID: c828e29e1105
4
+ Revises: b36c310f49ec
5
+ Create Date: 2025-08-15 17:57:09.541454
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+ import sqlmodel
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision: str = "c828e29e1105"
18
+ down_revision: Union[str, None] = "b36c310f49ec"
19
+ branch_labels: Union[str, Sequence[str], None] = None
20
+ depends_on: Union[str, Sequence[str], None] = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ # ### commands auto generated by Alembic - please adjust! ###
25
+
26
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
27
+ batch_op.add_column(
28
+ sa.Column("oai_high_price_target", sa.Float(), nullable=True)
29
+ )
30
+ batch_op.add_column(
31
+ sa.Column("oai_low_price_target", sa.Float(), nullable=True)
32
+ )
33
+ batch_op.add_column(sa.Column("oai_news_date", sa.DateTime(), nullable=True))
34
+ batch_op.add_column(
35
+ sa.Column(
36
+ "oai_recent_news", sqlmodel.sql.sqltypes.AutoString(), nullable=True
37
+ )
38
+ )
39
+ batch_op.add_column(
40
+ sa.Column(
41
+ "oai_recommendation", sqlmodel.sql.sqltypes.AutoString(), nullable=True
42
+ )
43
+ )
44
+ batch_op.add_column(
45
+ sa.Column(
46
+ "oai_explanation", sqlmodel.sql.sqltypes.AutoString(), nullable=True
47
+ )
48
+ )
49
+ batch_op.create_index(
50
+ "ix_analysis_oai_explanation", ["oai_explanation"], unique=False
51
+ )
52
+ batch_op.create_index(
53
+ "ix_analysis_oai_high_price_target", ["oai_high_price_target"], unique=False
54
+ )
55
+ batch_op.create_index(
56
+ "ix_analysis_oai_low_price_target", ["oai_low_price_target"], unique=False
57
+ )
58
+ batch_op.create_index(
59
+ "ix_analysis_oai_news_date", ["oai_news_date"], unique=False
60
+ )
61
+ batch_op.create_index(
62
+ "ix_analysis_oai_recent_news", ["oai_recent_news"], unique=False
63
+ )
64
+ batch_op.create_index(
65
+ "ix_analysis_oai_recommendation", ["oai_recommendation"], unique=False
66
+ )
67
+
68
+ # ### end Alembic commands ###
69
+
70
+
71
+ def downgrade() -> None:
72
+ # ### commands auto generated by Alembic - please adjust! ###
73
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
74
+ batch_op.drop_index("ix_analysis_oai_recommendation")
75
+ batch_op.drop_index("ix_analysis_oai_recent_news")
76
+ batch_op.drop_index("ix_analysis_oai_news_date")
77
+ batch_op.drop_index("ix_analysis_oai_low_price_target")
78
+ batch_op.drop_index("ix_analysis_oai_high_price_target")
79
+ batch_op.drop_index("ix_analysis_oai_explanation")
80
+ batch_op.drop_column("oai_explanation")
81
+ batch_op.drop_column("oai_recommendation")
82
+ batch_op.drop_column("oai_recent_news")
83
+ batch_op.drop_column("oai_news_date")
84
+ batch_op.drop_column("oai_low_price_target")
85
+ batch_op.drop_column("oai_high_price_target")
86
+
87
+ # ### end Alembic commands ###
@@ -3,7 +3,7 @@ import logging
3
3
  from datetime import date
4
4
  from functools import cached_property
5
5
  from pathlib import Path
6
- from typing import TYPE_CHECKING, Any, List, Optional
6
+ from typing import TYPE_CHECKING, Any, List, Optional, Dict
7
7
 
8
8
  import pandas as pd
9
9
  from bearish.database.crud import BearishDb # type: ignore
@@ -22,6 +22,7 @@ from bullish.analysis.constants import Industry, IndustryGroup, Sector, Country
22
22
  from bullish.analysis.filter import FilteredResults
23
23
  from bullish.analysis.indicators import SignalSeries
24
24
  from bullish.analysis.industry_views import Type, IndustryView
25
+
25
26
  from bullish.database.schemas import (
26
27
  AnalysisORM,
27
28
  JobTrackerORM,
@@ -29,6 +30,7 @@ from bullish.database.schemas import (
29
30
  IndustryViewORM,
30
31
  SignalSeriesORM,
31
32
  BacktestResultORM,
33
+ OpenAINewsORM,
32
34
  )
33
35
  from bullish.database.scripts.upgrade import upgrade
34
36
  from bullish.exceptions import DatabaseFileNotFoundError
@@ -38,6 +40,7 @@ from tickermood.database.scripts.upgrade import upgrade as tickermood_upgrade #
38
40
 
39
41
  if TYPE_CHECKING:
40
42
  from bullish.analysis.backtest import BacktestResult, BacktestResultQuery
43
+ from bullish.analysis.openai import OpenAINews
41
44
 
42
45
  logger = logging.getLogger(__name__)
43
46
 
@@ -358,11 +361,52 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
358
361
  LIMIT 1
359
362
  """
360
363
  )
364
+ sql_oai = text(
365
+ """
366
+ SELECT *
367
+ FROM openai
368
+ WHERE symbol = :symbol
369
+ ORDER BY news_date DESC
370
+ LIMIT 1
371
+ """
372
+ )
361
373
 
362
374
  with Session(self._engine) as session:
363
375
  row = session.execute(sql, {"symbol": symbol}).mappings().one_or_none()
376
+ row_oai = (
377
+ session.execute(sql_oai, {"symbol": symbol}).mappings().one_or_none()
378
+ )
379
+ row_dict = {}
364
380
  if row:
365
381
  row_dict = dict(row)
366
382
  row_dict = row_dict | {"news_date": row_dict["date"]}
367
- return SubjectAnalysis.model_validate(row_dict)
368
- return None
383
+ if row_oai:
384
+ row_dict_oai = dict(row_oai)
385
+ row_dict = row_dict | {
386
+ "oai_news_date": row_dict_oai.get("news_date"),
387
+ "oai_recent_news": row_dict_oai.get("recent_news"),
388
+ "oai_recommendation": row_dict_oai.get("recommendation"),
389
+ "oai_explanation": row_dict_oai.get("explanation"),
390
+ "oai_high_price_target": row_dict_oai.get("high_price_target"),
391
+ "oai_low_price_target": row_dict_oai.get("low_price_target"),
392
+ }
393
+
394
+ return SubjectAnalysis.model_validate(row_dict)
395
+
396
+ def write_many_openai_news(self, openai_news: List["OpenAINews"]) -> None:
397
+ with Session(self._engine) as session:
398
+ stmt = (
399
+ insert(OpenAINewsORM)
400
+ .prefix_with("OR REPLACE")
401
+ .values([a.model_dump() for a in openai_news])
402
+ )
403
+ session.exec(stmt) # type: ignore
404
+ session.commit()
405
+
406
+ def update_analysis(self, symbol: str, fields: Dict[str, Any]) -> None:
407
+ with Session(self._engine) as session:
408
+ stmt = (
409
+ update(AnalysisORM).where(AnalysisORM.symbol == symbol).values(**fields) # type: ignore
410
+ )
411
+ session.exec(stmt) # type: ignore
412
+ session.commit()
@@ -1,3 +1,4 @@
1
+ from datetime import date
1
2
  from typing import Dict, Any, List, Optional
2
3
 
3
4
  from sqlmodel import Field, SQLModel
@@ -7,6 +8,7 @@ from bullish.analysis.backtest import BacktestResult
7
8
  from bullish.analysis.filter import FilteredResults
8
9
  from bullish.analysis.indicators import SignalSeries
9
10
  from bullish.analysis.industry_views import IndustryView
11
+ from bullish.analysis.openai import OpenAINews
10
12
 
11
13
  from bullish.jobs.models import JobTracker
12
14
  from sqlalchemy import Index
@@ -22,6 +24,13 @@ dynamic_indexes = tuple(
22
24
  )
23
25
 
24
26
 
27
+ class OpenAINewsORM(SQLModel, OpenAINews, table=True):
28
+ __tablename__ = "openai"
29
+ __table_args__ = {"extend_existing": True} # noqa:RUF012
30
+ symbol: str = Field(primary_key=True)
31
+ news_date: date = Field(primary_key=True)
32
+
33
+
25
34
  class AnalysisORM(BaseTable, Analysis, table=True):
26
35
  __tablename__ = "analysis"
27
36
  __table_args__ = {"extend_existing": True} # noqa:RUF012
@@ -1,7 +1,7 @@
1
1
  import abc
2
2
  import logging
3
3
  from datetime import date
4
- from typing import List, Optional
4
+ from typing import List, Optional, Dict, Any
5
5
 
6
6
  import pandas as pd
7
7
  from bearish.interface.interface import BearishDbBase # type: ignore
@@ -15,6 +15,7 @@ from bullish.analysis.constants import Industry, Sector, IndustryGroup, Country
15
15
  from bullish.analysis.filter import FilterQuery, FilteredResults
16
16
  from bullish.analysis.indicators import SignalSeries
17
17
  from bullish.analysis.industry_views import Type, IndustryView
18
+ from bullish.analysis.openai import OpenAINews
18
19
  from bullish.jobs.models import JobTracker, JobTrackerStatus, add_icons
19
20
 
20
21
  logger = logging.getLogger(__name__)
@@ -155,3 +156,8 @@ class BullishDbBase(BearishDbBase): # type: ignore
155
156
 
156
157
  @abc.abstractmethod
157
158
  def read_subject(self, symbol: str) -> Optional[SubjectAnalysis]: ...
159
+ @abc.abstractmethod
160
+ def write_many_openai_news(self, openai_news: List[OpenAINews]) -> None: ...
161
+
162
+ @abc.abstractmethod
163
+ def update_analysis(self, symbol: str, fields: Dict[str, Any]) -> None: ...
@@ -15,6 +15,7 @@ from .models import JobTrackerStatus, JobTracker, JobType
15
15
  from ..analysis.analysis import run_analysis, run_signal_series_analysis
16
16
  from ..analysis.backtest import run_many_tests, BackTestConfig
17
17
  from ..analysis.industry_views import compute_industry_view
18
+ from ..analysis.openai import get_open_ai_news
18
19
  from ..analysis.predefined_filters import predefined_filters, load_custom_filters
19
20
  from ..database.crud import BullishDb
20
21
  from bullish.analysis.filter import FilterUpdate
@@ -186,6 +187,23 @@ def news(
186
187
  headless: bool = True,
187
188
  task: Optional[Task] = None,
188
189
  ) -> None:
190
+ bullish_db = BullishDb(database_path=database_path)
191
+ if get_open_ai_news(bullish_db, symbols):
192
+ for symbol in symbols:
193
+ subject = bullish_db.read_subject(symbol)
194
+ if subject:
195
+ logger.debug(
196
+ f"extracting news for {symbol} subject: {subject.model_dump()}"
197
+ )
198
+ bullish_db.update_analysis(
199
+ symbol,
200
+ subject.model_dump(
201
+ exclude_none=True,
202
+ exclude_unset=True,
203
+ exclude_defaults=True,
204
+ exclude={"symbol"},
205
+ ),
206
+ )
189
207
  base_news(
190
208
  database_path=database_path,
191
209
  job_type=job_type,
@@ -195,7 +213,7 @@ def news(
195
213
  )
196
214
 
197
215
 
198
- @huey.periodic_task(crontab(minute="0", hour="3"), context=True) # type: ignore
216
+ @huey.periodic_task(crontab(minute="0", hour="8"), context=True) # type: ignore
199
217
  def cron_news(
200
218
  task: Optional[Task] = None,
201
219
  ) -> None:
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "bullishpy"
3
- version = "0.67.0"
3
+ version = "0.68.0"
4
4
  description = ""
5
5
  authors = ["aan <andoludovic.andriamamonjy@gmail.com>"]
6
6
  readme = "README.md"
File without changes
File without changes
File without changes