bullishpy 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -0,0 +1,148 @@
1
+ """
2
+
3
+ Revision ID: 037dbd721317
4
+ Revises:
5
+ Create Date: 2025-06-14 16:50:56.919222
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ import sqlalchemy as sa
11
+ import sqlmodel
12
+ from alembic import op
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "037dbd721317"
16
+ down_revision: Union[str, None] = None
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.create_table(
24
+ "analysis",
25
+ sa.Column("quarterly_positive_free_cash_flow", sa.Float(), nullable=True),
26
+ sa.Column("quarterly_growing_operating_cash_flow", sa.Float(), nullable=True),
27
+ sa.Column(
28
+ "quarterly_operating_cash_flow_is_higher_than_net_income",
29
+ sa.Float(),
30
+ nullable=True,
31
+ ),
32
+ sa.Column("quarterly_mean_capex_ratio", sa.Float(), nullable=True),
33
+ sa.Column("quarterly_max_capex_ratio", sa.Float(), nullable=True),
34
+ sa.Column("quarterly_min_capex_ratio", sa.Float(), nullable=True),
35
+ sa.Column("quarterly_mean_dividend_payout_ratio", sa.Float(), nullable=True),
36
+ sa.Column("quarterly_max_dividend_payout_ratio", sa.Float(), nullable=True),
37
+ sa.Column("quarterly_min_dividend_payout_ratio", sa.Float(), nullable=True),
38
+ sa.Column("quarterly_positive_net_income", sa.Float(), nullable=True),
39
+ sa.Column("quarterly_positive_operating_income", sa.Float(), nullable=True),
40
+ sa.Column("quarterly_growing_net_income", sa.Float(), nullable=True),
41
+ sa.Column("quarterly_growing_operating_income", sa.Float(), nullable=True),
42
+ sa.Column("quarterly_positive_diluted_eps", sa.Float(), nullable=True),
43
+ sa.Column("quarterly_positive_basic_eps", sa.Float(), nullable=True),
44
+ sa.Column("quarterly_growing_basic_eps", sa.Float(), nullable=True),
45
+ sa.Column("quarterly_growing_diluted_eps", sa.Float(), nullable=True),
46
+ sa.Column("quarterly_positive_debt_to_equity", sa.Float(), nullable=True),
47
+ sa.Column("quarterly_positive_return_on_assets", sa.Float(), nullable=True),
48
+ sa.Column("quarterly_positive_return_on_equity", sa.Float(), nullable=True),
49
+ sa.Column("quarterly_earning_per_share", sa.Float(), nullable=True),
50
+ sa.Column("positive_free_cash_flow", sa.Float(), nullable=True),
51
+ sa.Column("growing_operating_cash_flow", sa.Float(), nullable=True),
52
+ sa.Column(
53
+ "operating_cash_flow_is_higher_than_net_income", sa.Float(), nullable=True
54
+ ),
55
+ sa.Column("mean_capex_ratio", sa.Float(), nullable=True),
56
+ sa.Column("max_capex_ratio", sa.Float(), nullable=True),
57
+ sa.Column("min_capex_ratio", sa.Float(), nullable=True),
58
+ sa.Column("mean_dividend_payout_ratio", sa.Float(), nullable=True),
59
+ sa.Column("max_dividend_payout_ratio", sa.Float(), nullable=True),
60
+ sa.Column("min_dividend_payout_ratio", sa.Float(), nullable=True),
61
+ sa.Column("positive_net_income", sa.Float(), nullable=True),
62
+ sa.Column("positive_operating_income", sa.Float(), nullable=True),
63
+ sa.Column("growing_net_income", sa.Float(), nullable=True),
64
+ sa.Column("growing_operating_income", sa.Float(), nullable=True),
65
+ sa.Column("positive_diluted_eps", sa.Float(), nullable=True),
66
+ sa.Column("positive_basic_eps", sa.Float(), nullable=True),
67
+ sa.Column("growing_basic_eps", sa.Float(), nullable=True),
68
+ sa.Column("growing_diluted_eps", sa.Float(), nullable=True),
69
+ sa.Column("positive_debt_to_equity", sa.Float(), nullable=True),
70
+ sa.Column("positive_return_on_assets", sa.Float(), nullable=True),
71
+ sa.Column("positive_return_on_equity", sa.Float(), nullable=True),
72
+ sa.Column("earning_per_share", sa.Float(), nullable=True),
73
+ sa.Column("rsi_last_value", sa.Float(), nullable=True),
74
+ sa.Column("macd_12_26_9_buy_date", sa.Date(), nullable=True),
75
+ sa.Column("ma_50_200_buy_date", sa.Date(), nullable=True),
76
+ sa.Column("slope_7", sa.Float(), nullable=True),
77
+ sa.Column("slope_14", sa.Float(), nullable=True),
78
+ sa.Column("slope_30", sa.Float(), nullable=True),
79
+ sa.Column("slope_60", sa.Float(), nullable=True),
80
+ sa.Column("last_adx", sa.Float(), nullable=True),
81
+ sa.Column("last_dmp", sa.Float(), nullable=True),
82
+ sa.Column("last_dmn", sa.Float(), nullable=True),
83
+ sa.Column("last_price", sa.Float(), nullable=True),
84
+ sa.Column("last_price_date", sa.Date(), nullable=True),
85
+ sa.Column("year_to_date_growth", sa.Float(), nullable=True),
86
+ sa.Column("last_52_weeks_growth", sa.Float(), nullable=True),
87
+ sa.Column("last_week_growth", sa.Float(), nullable=True),
88
+ sa.Column("last_month_growth", sa.Float(), nullable=True),
89
+ sa.Column("last_year_growth", sa.Float(), nullable=True),
90
+ sa.Column("year_to_date_max_growth", sa.Float(), nullable=True),
91
+ sa.Column("last_week_max_growth", sa.Float(), nullable=True),
92
+ sa.Column("last_month_max_growth", sa.Float(), nullable=True),
93
+ sa.Column("last_year_max_growth", sa.Float(), nullable=True),
94
+ sa.Column("macd_12_26_9_buy", sa.Float(), nullable=True),
95
+ sa.Column("star_yoy", sa.Float(), nullable=True),
96
+ sa.Column("star_wow", sa.Float(), nullable=True),
97
+ sa.Column("star_mom", sa.Float(), nullable=True),
98
+ sa.Column("symbol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
99
+ sa.Column("exchange", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
100
+ sa.Column("source", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
101
+ sa.Column("date", sa.Date(), nullable=False),
102
+ sa.Column("created_at", sa.Date(), nullable=False),
103
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
104
+ sa.Column("isin", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
105
+ sa.Column("base_symbol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
106
+ sa.Column("modifier", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
107
+ sa.Column("summary", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
108
+ sa.Column("currency", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
109
+ sa.Column("market", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
110
+ sa.Column("country", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
111
+ sa.Column("sector", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
112
+ sa.Column("industry_group", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
113
+ sa.Column("industry", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
114
+ sa.Column("website", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
115
+ sa.Column("market_capitalization", sa.Float(), nullable=True),
116
+ sa.Column("book_value", sa.Float(), nullable=True),
117
+ sa.Column("price_to_book", sa.Float(), nullable=True),
118
+ sa.Column("trailing_price_to_earnings", sa.Float(), nullable=True),
119
+ sa.Column("dividend_yield", sa.Float(), nullable=True),
120
+ sa.Column("dividend_rate", sa.Float(), nullable=True),
121
+ sa.Column("trailing_earnings_per_share", sa.Float(), nullable=True),
122
+ sa.Column("forward_earnings_per_share", sa.Float(), nullable=True),
123
+ sa.Column("return_on_equity", sa.Float(), nullable=True),
124
+ sa.Column("operating_margins", sa.Float(), nullable=True),
125
+ sa.Column("gross_margins", sa.Float(), nullable=True),
126
+ sa.Column("revenue_growth", sa.Float(), nullable=True),
127
+ sa.Column("price_per_earning_ratio", sa.Float(), nullable=True),
128
+ sa.PrimaryKeyConstraint("symbol", "source"),
129
+ )
130
+ op.create_table(
131
+ "view",
132
+ sa.Column("symbol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
133
+ sa.Column("exchange", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
134
+ sa.Column("source", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
135
+ sa.Column("date", sa.Date(), nullable=False),
136
+ sa.Column("created_at", sa.Date(), nullable=False),
137
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
138
+ sa.Column("isin", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
139
+ sa.Column("view_name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
140
+ sa.PrimaryKeyConstraint("symbol", "source"),
141
+ )
142
+
143
+
144
+ def downgrade() -> None:
145
+ # ### commands auto generated by Alembic - please adjust! ###
146
+ op.drop_table("view")
147
+ op.drop_table("analysis")
148
+ # ### end Alembic commands ###
@@ -0,0 +1,50 @@
1
+ """
2
+
3
+ Revision ID: 4b0a2f40b7d3
4
+ Revises: 037dbd721317
5
+ Create Date: 2025-06-20 09:17:53.566652
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "4b0a2f40b7d3"
16
+ down_revision: Union[str, None] = "037dbd721317"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.create_table(
24
+ "jobtracker",
25
+ sa.Column("job_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
26
+ sa.Column("type", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
27
+ sa.Column("status", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
28
+ sa.Column("started_at", sa.DateTime(), nullable=False),
29
+ sa.PrimaryKeyConstraint("job_id"),
30
+ )
31
+ op.drop_table("view")
32
+ # ### end Alembic commands ###
33
+
34
+
35
+ def downgrade() -> None:
36
+ # ### commands auto generated by Alembic - please adjust! ###
37
+ op.drop_table("jobtracker")
38
+ op.create_table(
39
+ "view",
40
+ sa.Column("symbol", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
41
+ sa.Column("exchange", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
42
+ sa.Column("source", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
43
+ sa.Column("date", sa.Date(), nullable=False),
44
+ sa.Column("created_at", sa.Date(), nullable=False),
45
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
46
+ sa.Column("isin", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
47
+ sa.Column("view_name", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
48
+ sa.PrimaryKeyConstraint("symbol", "source"),
49
+ )
50
+ # ### end Alembic commands ###
@@ -0,0 +1,37 @@
1
+ """
2
+
3
+ Revision ID: 73564b60fe24
4
+ Revises: 4b0a2f40b7d3
5
+ Create Date: 2025-06-20 17:08:28.818293
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ import sqlmodel
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "73564b60fe24"
16
+ down_revision: Union[str, None] = "4b0a2f40b7d3"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # ### commands auto generated by Alembic - please adjust! ###
23
+ op.create_table(
24
+ "filteredresults",
25
+ sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
26
+ sa.Column("filter_query", sa.JSON(), nullable=True),
27
+ sa.Column("symbols", sa.JSON(), nullable=True),
28
+ sa.PrimaryKeyConstraint("name"),
29
+ )
30
+
31
+ # ### end Alembic commands ###
32
+
33
+
34
+ def downgrade() -> None:
35
+ # ### commands auto generated by Alembic - please adjust! ###
36
+ op.drop_table("filteredresults")
37
+ # ### end Alembic commands ###
@@ -0,0 +1,146 @@
1
+ import logging
2
+ from functools import cached_property
3
+ from pathlib import Path
4
+ from typing import TYPE_CHECKING, Any, List, Optional
5
+
6
+ import pandas as pd
7
+ from bearish.database.crud import BearishDb # type: ignore
8
+ from bearish.models.base import Ticker # type: ignore
9
+ from pydantic import ConfigDict
10
+ from sqlalchemy import Engine, create_engine, insert, delete, update
11
+ from sqlmodel import Session, select
12
+
13
+ from bullish.analysis.analysis import Analysis
14
+ from bullish.database.schemas import AnalysisORM, JobTrackerORM, FilteredResultsORM
15
+ from bullish.database.scripts.upgrade import upgrade
16
+ from bullish.exceptions import DatabaseFileNotFoundError
17
+ from bullish.analysis.filter import FilteredResults
18
+ from bullish.interface.interface import BullishDbBase
19
+ from bullish.jobs.models import JobTracker, JobTrackerStatus
20
+
21
+ if TYPE_CHECKING:
22
+ pass
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+ BATCH_SIZE = 5000
27
+
28
+
29
+ class BullishDb(BearishDb, BullishDbBase): # type: ignore
30
+ model_config = ConfigDict(arbitrary_types_allowed=True)
31
+ database_path: Path
32
+
33
+ def valid(self) -> bool:
34
+ """Check if the database is valid."""
35
+ return self.database_path.exists() and self.database_path.is_file()
36
+
37
+ @cached_property
38
+ def _engine(self) -> Engine:
39
+ if not self.valid():
40
+ raise DatabaseFileNotFoundError("Database file not found.")
41
+ database_url = f"sqlite:///{Path(self.database_path)}"
42
+ upgrade(self.database_path)
43
+ engine = create_engine(database_url)
44
+ return engine
45
+
46
+ def model_post_init(self, __context: Any) -> None:
47
+ self._engine # noqa: B018
48
+
49
+ def _write_analysis(self, analysis: Analysis) -> None:
50
+ with Session(self._engine) as session:
51
+ stmt = (
52
+ insert(AnalysisORM)
53
+ .prefix_with("OR REPLACE")
54
+ .values(analysis.model_dump())
55
+ )
56
+ session.exec(stmt) # type: ignore
57
+ session.commit()
58
+
59
+ def _read_analysis(self, ticker: Ticker) -> Optional[Analysis]:
60
+ with Session(self._engine) as session:
61
+ query = select(AnalysisORM).where(AnalysisORM.symbol == ticker.symbol)
62
+ analysis = session.exec(query).first()
63
+ if not analysis:
64
+ return None
65
+ return Analysis.model_validate(analysis)
66
+
67
+ def _read_analysis_data(
68
+ self, columns: List[str], symbols: Optional[List[str]] = None
69
+ ) -> pd.DataFrame:
70
+ columns_ = ",".join(columns)
71
+ if symbols:
72
+ symbols_str = ",".join([f"'{s}'" for s in symbols])
73
+ query = f"""SELECT {columns_} FROM analysis WHERE symbol IN ({symbols_str})""" # noqa: S608
74
+ else:
75
+ query = f"""SELECT {columns_} FROM analysis""" # noqa: S608
76
+ return pd.read_sql_query(query, self._engine)
77
+
78
+ def _read_filter_query(self, query: str) -> pd.DataFrame:
79
+ return pd.read_sql(
80
+ query,
81
+ con=self._engine,
82
+ )
83
+
84
+ def _read_job_trackers(self) -> pd.DataFrame:
85
+ query = "SELECT * FROM jobtracker ORDER BY started_at DESC"
86
+ return pd.read_sql_query(query, self._engine)
87
+
88
+ def write_job_tracker(self, job_tracker: JobTracker) -> None:
89
+ with Session(self._engine) as session:
90
+ stmt = (
91
+ insert(JobTrackerORM)
92
+ .prefix_with("OR REPLACE")
93
+ .values(job_tracker.model_dump())
94
+ )
95
+ session.exec(stmt) # type: ignore
96
+ session.commit()
97
+
98
+ def delete_job_trackers(self, job_ids: List[str]) -> None:
99
+ with Session(self._engine) as session:
100
+ stmt = delete(JobTrackerORM).where(JobTrackerORM.job_id.in_(job_ids)) # type: ignore
101
+ result = session.execute(stmt)
102
+
103
+ if result.rowcount > 0: # type: ignore
104
+ session.commit()
105
+ else:
106
+ logger.warning(f"Job tracker(s) with ID(s) {job_ids} not found.")
107
+
108
+ def update_job_tracker_status(self, job_tracker_status: JobTrackerStatus) -> None:
109
+ with Session(self._engine) as session:
110
+ stmt = (
111
+ update(JobTrackerORM)
112
+ .where(JobTrackerORM.job_id == job_tracker_status.job_id) # type: ignore
113
+ .values(status=job_tracker_status.status)
114
+ )
115
+ result = session.execute(stmt)
116
+
117
+ if result.rowcount > 0: # type: ignore
118
+ session.commit()
119
+ else:
120
+ logger.warning(
121
+ f"Job tracker with ID {job_tracker_status.job_id} not found."
122
+ )
123
+
124
+ def read_filtered_results(self, name: str) -> Optional[FilteredResults]:
125
+ with Session(self._engine) as session:
126
+ stmt = select(FilteredResultsORM).where(FilteredResultsORM.name == name)
127
+ result = session.execute(stmt).scalar_one_or_none()
128
+
129
+ if result:
130
+ return FilteredResults.model_validate(
131
+ result.model_dump()
132
+ ) # if you're using Pydantic or DTOs
133
+ return None
134
+
135
+ def read_list_filtered_results(self) -> List[str]:
136
+ with Session(self._engine) as session:
137
+ stmt = select(FilteredResultsORM.name)
138
+ result = session.execute(stmt).scalars().all()
139
+ return list(result)
140
+
141
+ def write_filtered_results(self, filtered_results: FilteredResults) -> None:
142
+ with Session(self._engine) as session:
143
+ data = filtered_results.model_dump()
144
+ stmt = insert(FilteredResultsORM).prefix_with("OR REPLACE").values(data)
145
+ session.exec(stmt) # type: ignore
146
+ session.commit()
@@ -0,0 +1,33 @@
1
+ from typing import Dict, Any
2
+
3
+ from sqlmodel import Field, SQLModel
4
+ from sqlalchemy import Column, JSON
5
+ from bullish.analysis.analysis import Analysis
6
+ from bullish.analysis.filter import FilteredResults
7
+ from bullish.jobs.models import JobTracker
8
+
9
+
10
+ class BaseTable(SQLModel):
11
+ symbol: str = Field(primary_key=True)
12
+ source: str = Field(primary_key=True)
13
+
14
+
15
+ class AnalysisORM(BaseTable, Analysis, table=True):
16
+ __tablename__ = "analysis"
17
+ __table_args__ = {"extend_existing": True} # noqa:RUF012
18
+
19
+
20
+ class JobTrackerORM(SQLModel, JobTracker, table=True):
21
+ __tablename__ = "jobtracker"
22
+ __table_args__ = {"extend_existing": True} # noqa:RUF012
23
+ job_id: str = Field(primary_key=True)
24
+ type: str # type: ignore
25
+ status: str # type: ignore
26
+
27
+
28
+ class FilteredResultsORM(SQLModel, FilteredResults, table=True):
29
+ __tablename__ = "filteredresults"
30
+ __table_args__ = {"extend_existing": True} # noqa:RUF012
31
+ name: str = Field(primary_key=True)
32
+ symbols: list[str] = Field(sa_column=Column(JSON))
33
+ filter_query: Dict[str, Any] = Field(sa_column=Column(JSON)) # type: ignore
@@ -0,0 +1,20 @@
1
+ import os
2
+ from pathlib import Path
3
+
4
+ from alembic import command
5
+ from alembic.config import Config
6
+
7
+ from bullish.database.settings import TEST_DATABASE_URL
8
+
9
+
10
+ def create_revision(database_url: str, message: str) -> None:
11
+ os.environ.update({"DATABASE_URL": database_url})
12
+ root_folder = Path(__file__).parents[1]
13
+ alembic_cfg = Config(root_folder / "alembic" / "alembic.ini")
14
+ alembic_cfg.set_main_option("script_location", str(root_folder / "alembic"))
15
+ command.revision(alembic_cfg, message=message, autogenerate=True)
16
+
17
+
18
+ if __name__ == "__main__":
19
+ message = ""
20
+ create_revision(TEST_DATABASE_URL, message)
@@ -0,0 +1,25 @@
1
+ import os
2
+ import sqlite3
3
+ from pathlib import Path
4
+
5
+ from alembic.config import Config
6
+
7
+ from bullish.database.settings import TEST_DATABASE_PATH
8
+
9
+
10
+ def stamp(database_path: Path) -> None:
11
+ database_url = f"sqlite:///{database_path}"
12
+ root_folder = Path(__file__).parents[1]
13
+ os.environ.update({"DATABASE_URL": database_url})
14
+ with sqlite3.connect(database_path) as conn:
15
+ conn.execute("DROP TABLE IF EXISTS alembic_version;")
16
+ conn.execute("DROP TABLE IF EXISTS view;")
17
+ conn.execute("DROP TABLE IF EXISTS analysis;")
18
+ conn.execute("DROP TABLE IF EXISTS filteredresults;")
19
+ conn.commit()
20
+ alembic_cfg = Config(root_folder / "alembic" / "alembic.ini")
21
+ alembic_cfg.set_main_option("script_location", str(root_folder / "alembic"))
22
+
23
+
24
+ if __name__ == "__main__":
25
+ stamp(TEST_DATABASE_PATH)
@@ -0,0 +1,27 @@
1
+ import os
2
+ from pathlib import Path
3
+
4
+ from alembic import command
5
+ from alembic.config import Config
6
+ from alembic.util import CommandError
7
+
8
+ from bullish.database.scripts.stamp import stamp
9
+
10
+ DATABASE_PATH = Path(__file__).parents[3] / "tests" / "data" / "bear.db"
11
+
12
+
13
+ def upgrade(database_path: Path) -> None:
14
+ root_folder = Path(__file__).parents[1]
15
+ database_url = f"sqlite:///{database_path}"
16
+ os.environ.update({"DATABASE_URL": database_url})
17
+ alembic_cfg = Config(root_folder / "alembic" / "alembic.ini")
18
+ alembic_cfg.set_main_option("script_location", str(root_folder / "alembic"))
19
+ try:
20
+ command.upgrade(alembic_cfg, "head")
21
+ except CommandError:
22
+ stamp(database_path)
23
+ command.upgrade(alembic_cfg, "head")
24
+
25
+
26
+ if __name__ == "__main__":
27
+ upgrade(DATABASE_PATH)
@@ -0,0 +1,4 @@
1
+ from pathlib import Path
2
+
3
+ TEST_DATABASE_PATH = Path(__file__).parents[2].joinpath("tests", "data", "bear.db")
4
+ TEST_DATABASE_URL = f"sqlite:///{TEST_DATABASE_PATH}"
bullish/exceptions.py ADDED
@@ -0,0 +1,2 @@
1
+ class DatabaseFileNotFoundError(FileNotFoundError):
2
+ """Raised when the database file is not found."""
File without changes
@@ -0,0 +1,107 @@
1
+ from typing import Optional
2
+
3
+ import pandas as pd
4
+ import plotly.graph_objects as go
5
+ from plotly.subplots import make_subplots
6
+
7
+
8
+ def plot(
9
+ data: pd.DataFrame,
10
+ symbol: str,
11
+ name: Optional[str] = None,
12
+ dates: Optional[pd.Series] = None, # type: ignore
13
+ ) -> go.Figure:
14
+ data.ta.sma(50, append=True)
15
+ data.ta.sma(200, append=True)
16
+ data.ta.adx(append=True)
17
+ data.ta.macd(append=True)
18
+ data.ta.rsi(append=True)
19
+ fig = make_subplots(
20
+ rows=4,
21
+ cols=1,
22
+ shared_xaxes=True,
23
+ vertical_spacing=0.1,
24
+ specs=[
25
+ [{"rowspan": 2}], # Row 1: main chart
26
+ [None], # Row 2: skipped (part of row 1)
27
+ [{}], # Row 3: RSI
28
+ [{}], # Row 4: MACD
29
+ ],
30
+ subplot_titles=(
31
+ f"Price + SMAs ({symbol} [{name}])",
32
+ f"RSI ({symbol} [{name}])",
33
+ f"MACD ({symbol} [{name}])",
34
+ ),
35
+ )
36
+ # Row 1: Candlestick + SMAs
37
+ fig.add_trace(
38
+ go.Candlestick(
39
+ x=data.index,
40
+ open=data.open,
41
+ high=data.high,
42
+ low=data.low,
43
+ close=data.close,
44
+ name="Candlestick",
45
+ ),
46
+ row=1,
47
+ col=1,
48
+ )
49
+ fig.update_xaxes(rangeslider_thickness=0.04, row=1, col=1)
50
+ fig.add_trace(
51
+ go.Scatter(x=data.index, y=data.SMA_50, name="SMA 50", mode="lines"),
52
+ row=1,
53
+ col=1,
54
+ )
55
+
56
+ fig.add_trace(
57
+ go.Scatter(x=data.index, y=data.SMA_200, name="SMA 200", mode="lines"),
58
+ row=1,
59
+ col=1,
60
+ )
61
+
62
+ # Row 2: RSI
63
+ fig.add_trace(
64
+ go.Scatter(x=data.index, y=data.RSI_14, name="RSI 14", mode="lines"),
65
+ row=3,
66
+ col=1,
67
+ )
68
+
69
+ # Row 3: MACD
70
+ fig.add_trace(
71
+ go.Scatter(x=data.index, y=data.MACD_12_26_9, name="MACD", mode="lines"),
72
+ row=4,
73
+ col=1,
74
+ )
75
+
76
+ fig.add_trace(
77
+ go.Scatter(
78
+ x=data.index, y=data.MACDs_12_26_9, name="MACD Signal", mode="lines"
79
+ ),
80
+ row=4,
81
+ col=1,
82
+ )
83
+
84
+ fig.add_trace(
85
+ go.Bar(x=data.index, y=data.MACDh_12_26_9, name="MACD Histogram", opacity=0.5),
86
+ row=4,
87
+ col=1,
88
+ )
89
+ if dates is not None and not dates.empty:
90
+ for date in dates:
91
+ fig.add_vline(
92
+ x=date, line_dash="dashdot", line_color="MediumPurple", line_width=3
93
+ )
94
+
95
+ # Layout tweaks
96
+ fig.update_layout(
97
+ height=900,
98
+ showlegend=True,
99
+ title="Technical Indicator Dashboard",
100
+ margin={"t": 60, "b": 40},
101
+ )
102
+
103
+ # Optional: Add horizontal lines for RSI (e.g., 70/30 levels)
104
+ fig.add_hline(y=70, line_dash="dash", line_color="red", row=3, col=1)
105
+ fig.add_hline(y=30, line_dash="dash", line_color="green", row=3, col=1)
106
+
107
+ return fig
File without changes