bullishpy 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -0,0 +1,123 @@
1
+ from functools import cached_property
2
+ from typing import Literal, Set, get_args, Dict, Any, Optional, List
3
+
4
+ from bearish.types import SeriesLength # type: ignore
5
+ from pydantic import BaseModel, Field
6
+
7
+ Industry = Literal[
8
+ "Food & Staples Retailing",
9
+ "Packaged Foods",
10
+ "Grocery Stores",
11
+ "Household Products",
12
+ "Household & Personal Products",
13
+ "Confectioners",
14
+ "Beverages",
15
+ "Beverages - Non - Alcoholic",
16
+ "Beverages - Wineries & Distilleries",
17
+ "Pharmaceuticals",
18
+ "Health Care Providers & Services",
19
+ "Health Care Equipment & Supplies",
20
+ "Healthcare Plans",
21
+ "Medical Devices",
22
+ "Medical Instruments & Supplies",
23
+ "Medical Care Facilities",
24
+ "Diagnostics & Research",
25
+ "Drug Manufacturers - General",
26
+ "Drug Manufacturers - Specialty & Generic",
27
+ "Pharmaceutical Retailers",
28
+ "Health Information Services",
29
+ "Medical Distribution",
30
+ "Electric Utilities",
31
+ "Gas Utilities",
32
+ "Water Utilities",
33
+ "Utilities - Diversified",
34
+ "Utilities - Regulated Electric",
35
+ "Utilities - Regulated Gas",
36
+ "Utilities - Renewable",
37
+ "Utilities - Independent Power Producers",
38
+ "Waste Management",
39
+ "Pollution & Treatment Controls",
40
+ "Security & Protection Services",
41
+ "Insurance",
42
+ "Insurance - Property & Casual",
43
+ ]
44
+
45
+ Country = Literal[
46
+ "Germany",
47
+ "France",
48
+ "Netherlands",
49
+ "Belgium",
50
+ "Italy",
51
+ "Spain",
52
+ "Switzerland",
53
+ "Sweden",
54
+ "Denmark",
55
+ "Norway",
56
+ "Finland",
57
+ "Portugal",
58
+ "Austria",
59
+ "United states",
60
+ ]
61
+ SIGNS = {
62
+ "price_per_earning_ratio": "<",
63
+ "market_capitalization": ">",
64
+ "industry": " IN ",
65
+ "country": " IN ",
66
+ }
67
+
68
+
69
+ class FilterQuery(BaseModel):
70
+ positive_free_cash_flow: bool = Field(
71
+ False, description="The username for the database."
72
+ )
73
+ positive_net_income: bool = False
74
+ positive_operating_income: bool = False
75
+ quarterly_positive_free_cash_flow: bool = False
76
+ quarterly_positive_net_income: bool = False
77
+ quarterly_positive_operating_income: bool = False
78
+ growing_net_income: bool = False
79
+ quarterly_operating_cash_flow_is_higher_than_net_income: bool = False
80
+ operating_cash_flow_is_higher_than_net_income: bool = False
81
+ rsi_last_value_exists: bool = False
82
+ market_capitalization: int = Field(
83
+ 0, ge=0, multiple_of=1000, description="Positive integer with step count of 10."
84
+ )
85
+ price_per_earning_ratio: int = Field(
86
+ 0, ge=0, multiple_of=10, description="Positive integer with step count of 10."
87
+ )
88
+ industry: Set[Industry] = Field(None, description="Industry name.") # type: ignore
89
+ country: Set[Country] = Field(None, description="Country name.") # type: ignore
90
+
91
+ @cached_property
92
+ def query_parameters(self) -> Dict[str, Any]:
93
+ if not bool(self.industry):
94
+ self.industry = tuple(get_args(Industry)) # type: ignore
95
+ if not bool(self.country):
96
+ self.country = tuple(get_args(Country)) # type: ignore
97
+ return self.model_dump(exclude_defaults=True, exclude_unset=True)
98
+
99
+ def to_query(self) -> str:
100
+ query = " AND ".join(
101
+ [f"{k}{SIGNS.get(k,'=')}{v}" for k, v in self.query_parameters.items()]
102
+ )
103
+ return query
104
+
105
+
106
+ class FilterQueryStored(FilterQuery):
107
+ industry: Optional[List[Industry]] = None # type: ignore
108
+ country: Optional[List[Country]] = None # type: ignore
109
+
110
+
111
+ class FilterUpdate(BaseModel):
112
+ window_size: SeriesLength = Field("5d")
113
+ data_age_in_days: int = 1
114
+ update_financials: bool = False
115
+ update_analysis_only: bool = False
116
+
117
+
118
+ class FilteredResults(BaseModel):
119
+ name: str
120
+ filter_query: FilterQueryStored
121
+ symbols: list[str] = Field(
122
+ default_factory=list, description="List of filtered tickers."
123
+ )
File without changes
bullish/app/app.py ADDED
@@ -0,0 +1,218 @@
1
+ import shelve
2
+ import uuid
3
+ from pathlib import Path
4
+ from typing import Optional
5
+
6
+ import pandas as pd
7
+ import streamlit as st
8
+ import streamlit_pydantic as sp
9
+ from bearish.models.base import Ticker # type: ignore
10
+ from bearish.models.price.prices import Prices # type: ignore
11
+ from bearish.models.query.query import AssetQuery, Symbols # type: ignore
12
+ from streamlit_file_browser import st_file_browser # type: ignore
13
+
14
+ from bullish.database.crud import BullishDb
15
+ from bullish.figures.figures import plot
16
+ from bullish.analysis.filter import (
17
+ FilterQuery,
18
+ FilterUpdate,
19
+ FilteredResults,
20
+ FilterQueryStored,
21
+ )
22
+ from bullish.jobs.models import JobTracker
23
+ from bullish.jobs.tasks import update, news
24
+
25
+ CACHE_SHELVE = "user_cache"
26
+ DB_KEY = "db_path"
27
+
28
+ st.set_page_config(layout="wide")
29
+
30
+
31
+ @st.cache_resource
32
+ def db_id() -> str:
33
+ return f"{DB_KEY}_{uuid.uuid4()!s}"
34
+
35
+
36
+ @st.cache_resource
37
+ def bearish_db(database_path: Path) -> BullishDb:
38
+ return BullishDb(database_path=database_path)
39
+
40
+
41
+ def store_db(db_path: Path) -> None:
42
+ with shelve.open(CACHE_SHELVE) as storage: # noqa:S301
43
+ storage[db_id()] = str(db_path)
44
+
45
+
46
+ def load_db() -> Optional[str]:
47
+ with shelve.open(CACHE_SHELVE) as db: # noqa:S301
48
+ db_path = db.get(db_id())
49
+ return db_path
50
+
51
+
52
+ def assign_db_state() -> None:
53
+ if "database_path" not in st.session_state:
54
+ st.session_state.database_path = load_db()
55
+
56
+
57
+ @st.cache_data(hash_funcs={BullishDb: lambda obj: hash(obj.database_path)})
58
+ def load_analysis_data(bullish_db: BullishDb) -> pd.DataFrame:
59
+ return bullish_db.read_analysis_data()
60
+
61
+
62
+ def on_table_select() -> None:
63
+
64
+ row = st.session_state.selected_data["selection"]["rows"]
65
+
66
+ db = bearish_db(st.session_state.database_path)
67
+ if st.session_state.data.empty or (
68
+ not st.session_state.data.iloc[row]["symbol"].to_numpy()
69
+ ):
70
+ return
71
+
72
+ symbol = st.session_state.data.iloc[row]["symbol"].to_numpy()[0]
73
+ query = AssetQuery(symbols=Symbols(equities=[Ticker(symbol=symbol)]))
74
+ prices = db.read_series(query, months=24)
75
+ data = Prices(prices=prices).to_dataframe()
76
+
77
+ fig = plot(data, symbol)
78
+
79
+ st.session_state.ticker_figure = fig
80
+
81
+
82
+ @st.dialog("🔑 Provide database file to continue")
83
+ def dialog_pick_database() -> None:
84
+ current_working_directory = Path.cwd()
85
+ event = st_file_browser(
86
+ path=current_working_directory, key="A", glob_patterns="**/*.db"
87
+ )
88
+ if event:
89
+ db_path = Path(current_working_directory).joinpath(event["target"]["path"])
90
+ if not (db_path.exists() and db_path.is_file()):
91
+ st.error("Please choose a valid file.")
92
+ st.stop()
93
+ st.session_state.database_path = db_path
94
+ store_db(db_path)
95
+ st.rerun()
96
+ if event is None:
97
+ st.stop()
98
+
99
+
100
+ @st.dialog("📈 Price history and analysis", width="large")
101
+ def dialog_plot_figure() -> None:
102
+ st.markdown(
103
+ """
104
+ <style>
105
+ div[data-testid="stDialog"] div[role="dialog"]:has(.big-dialog) {
106
+ width: 90vw;
107
+ height: 110vh;
108
+ }
109
+ </style>
110
+ """,
111
+ unsafe_allow_html=True,
112
+ )
113
+ st.html("<span class='big-dialog'></span>")
114
+ st.plotly_chart(st.session_state.ticker_figure, use_container_width=True)
115
+ st.session_state.ticker_figure = None
116
+
117
+
118
+ def main() -> None: # noqa: PLR0915, C901
119
+ assign_db_state()
120
+ if st.session_state.database_path is None:
121
+ dialog_pick_database()
122
+ bearish_db_ = bearish_db(st.session_state.database_path)
123
+ charts_tab, jobs_tab = st.tabs(["Charts", "Jobs"])
124
+ if "data" not in st.session_state:
125
+ st.session_state.data = load_analysis_data(bearish_db_)
126
+ with st.sidebar:
127
+ with st.expander("Filter"):
128
+ view_query = sp.pydantic_form(key="my_form", model=FilterQuery)
129
+ if view_query:
130
+ st.session_state.data = bearish_db_.read_filter_query(view_query)
131
+ st.session_state.ticker_figure = None
132
+ st.session_state.filter_query = view_query
133
+ with st.container(border=True):
134
+ disabled = "filter_query" not in st.session_state
135
+ if "filter_query" in st.session_state:
136
+ disabled = st.session_state.filter_query is None
137
+ user_input = st.text_input("Enter your name:", disabled=disabled)
138
+ headless = st.checkbox("Headless mode", value=True, disabled=disabled)
139
+ if st.button("Save", disabled=disabled):
140
+ name = user_input.strip()
141
+ if not name:
142
+ st.error("This field is required.")
143
+ else:
144
+ symbols = st.session_state.data["symbol"].unique().tolist()
145
+ filtered_results = FilteredResults(
146
+ name=name,
147
+ filter_query=FilterQueryStored.model_validate(
148
+ st.session_state.filter_query.model_dump()
149
+ ),
150
+ symbols=symbols,
151
+ )
152
+
153
+ bearish_db_.write_filtered_results(filtered_results)
154
+ res = news(
155
+ database_path=st.session_state.database_path,
156
+ symbols=symbols,
157
+ headless=headless,
158
+ )
159
+ bearish_db_.write_job_tracker(
160
+ JobTracker(job_id=str(res.id), type="Fetching news")
161
+ )
162
+ st.session_state.filter_query = None
163
+ st.success(f"Hello, {user_input}!")
164
+ with st.expander("Load"):
165
+ existing_filtered_results = bearish_db_.read_list_filtered_results()
166
+ option = st.selectbox("Saved results", ["", *existing_filtered_results])
167
+ if option:
168
+ filtered_results_ = bearish_db_.read_filtered_results(option)
169
+ if filtered_results_:
170
+ st.session_state.data = bearish_db_.read_analysis_data(
171
+ symbols=filtered_results_.symbols
172
+ )
173
+
174
+ with st.expander("Update"):
175
+ update_query = sp.pydantic_form(key="update", model=FilterUpdate)
176
+ if (
177
+ update_query
178
+ and st.session_state.data is not None
179
+ and not st.session_state.data.empty
180
+ ):
181
+ symbols = st.session_state.data["symbol"].unique().tolist()
182
+ res = update(
183
+ database_path=st.session_state.database_path,
184
+ symbols=symbols,
185
+ update_query=update_query,
186
+ ) # enqueue & get result-handle
187
+ bearish_db_.write_job_tracker(
188
+ JobTracker(job_id=str(res.id), type="Update data")
189
+ )
190
+ st.success("Data update job has been enqueued.")
191
+ st.rerun()
192
+ with charts_tab:
193
+ st.header("✅ Data overview")
194
+ st.dataframe(
195
+ st.session_state.data,
196
+ on_select=on_table_select,
197
+ selection_mode="single-row",
198
+ key="selected_data",
199
+ use_container_width=True,
200
+ height=600,
201
+ )
202
+ if (
203
+ "ticker_figure" in st.session_state
204
+ and st.session_state.ticker_figure is not None
205
+ ):
206
+ dialog_plot_figure()
207
+
208
+ with jobs_tab:
209
+ job_trackers = bearish_db_.read_job_trackers()
210
+ st.dataframe(
211
+ job_trackers,
212
+ use_container_width=True,
213
+ hide_index=True,
214
+ )
215
+
216
+
217
+ if __name__ == "__main__":
218
+ main()
bullish/cli.py ADDED
@@ -0,0 +1,72 @@
1
+ from __future__ import annotations
2
+ import subprocess
3
+ import signal
4
+ import sys
5
+ import time
6
+ import atexit
7
+ from pathlib import Path
8
+ from typing import Any
9
+
10
+ import typer
11
+
12
+ app = typer.Typer(add_completion=False)
13
+ STREAMLIT_FILE = Path(__file__).parent.joinpath("app", "app.py")
14
+
15
+
16
+ @app.command()
17
+ def serve(
18
+ host: str = typer.Option("0.0.0.0", help="Streamlit host"), # noqa: S104
19
+ port: int = typer.Option(8501, help="Streamlit port"),
20
+ ) -> None:
21
+ children: list[subprocess.Popen] = [] # type: ignore
22
+
23
+ def _shutdown(*_: Any) -> None:
24
+ for p in children:
25
+ if p.poll() is None:
26
+ p.send_signal(signal.SIGINT)
27
+ try:
28
+ p.wait(timeout=5)
29
+ except subprocess.TimeoutExpired:
30
+ p.kill()
31
+ sys.exit(0)
32
+
33
+ signal.signal(signal.SIGINT, _shutdown)
34
+ signal.signal(signal.SIGTERM, _shutdown)
35
+ atexit.register(_shutdown)
36
+
37
+ try:
38
+ children.append(
39
+ subprocess.Popen(
40
+ [ # noqa: S603
41
+ sys.executable,
42
+ "-m",
43
+ "huey.bin.huey_consumer",
44
+ "bullish.jobs.tasks.huey",
45
+ ]
46
+ )
47
+ )
48
+ children.append(
49
+ subprocess.Popen(
50
+ [ # noqa: S603
51
+ sys.executable,
52
+ "-m",
53
+ "streamlit",
54
+ "run",
55
+ str(STREAMLIT_FILE),
56
+ "--server.address",
57
+ host,
58
+ "--server.port",
59
+ str(port),
60
+ ]
61
+ )
62
+ )
63
+ while True:
64
+ time.sleep(1)
65
+
66
+ except Exception as exc: # pragma: no cover
67
+ typer.secho(f"❌ Failed to start services: {exc}", fg=typer.colors.RED, err=True)
68
+ _shutdown()
69
+
70
+
71
+ if __name__ == "__main__":
72
+ app()
File without changes
@@ -0,0 +1 @@
1
+ Generic single-database configuration.
@@ -0,0 +1,117 @@
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts
5
+ # Use forward slashes (/) also on windows to provide an os agnostic path
6
+ script_location = .
7
+
8
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
9
+ # Uncomment the line below if you want the files to be prepended with date and time
10
+ # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
11
+ # for all available tokens
12
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
13
+
14
+ # sys.path path, will be prepended to sys.path if present.
15
+ # defaults to the current working directory.
16
+ prepend_sys_path = .
17
+
18
+ # timezone to use when rendering the date within the migration file
19
+ # as well as the filename.
20
+ # If specified, requires the python>=3.9 or backports.zoneinfo library.
21
+ # Any required deps can installed by adding `alembic[tz]` to the pip requirements
22
+ # string value is passed to ZoneInfo()
23
+ # leave blank for localtime
24
+ # timezone =
25
+
26
+ # max length of characters to apply to the "slug" field
27
+ # truncate_slug_length = 40
28
+
29
+ # set to 'true' to run the environment during
30
+ # the 'revision' command, regardless of autogenerate
31
+ # revision_environment = false
32
+
33
+ # set to 'true' to allow .pyc and .pyo files without
34
+ # a source .py file to be detected as revisions in the
35
+ # versions/ directory
36
+ # sourceless = false
37
+
38
+ # version location specification; This defaults
39
+ # to ./versions. When using multiple version
40
+ # directories, initial revisions must be specified with --version-path.
41
+ # The path separator used here should be the separator specified by "version_path_separator" below.
42
+ # version_locations = %(here)s/bar:%(here)s/bat:./versions
43
+
44
+ # version path separator; As mentioned above, this is the character used to split
45
+ # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
46
+ # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
47
+ # Valid values for version_path_separator are:
48
+ #
49
+ # version_path_separator = :
50
+ # version_path_separator = ;
51
+ # version_path_separator = space
52
+ # version_path_separator = newline
53
+ version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
54
+
55
+ # set to 'true' to search source files recursively
56
+ # in each "version_locations" directory
57
+ # new in Alembic version 1.10
58
+ # recursive_version_locations = false
59
+
60
+ # the output encoding used when revision files
61
+ # are written from script.py.mako
62
+ # output_encoding = utf-8
63
+
64
+ sqlalchemy.url = driver://user:pass@localhost/dbname
65
+
66
+
67
+ [post_write_hooks]
68
+ # post_write_hooks defines scripts or Python functions that are run
69
+ # on newly generated revision scripts. See the documentation for further
70
+ # detail and examples
71
+
72
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
73
+ # hooks = black
74
+ # black.type = console_scripts
75
+ # black.entrypoint = black
76
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
77
+
78
+ # lint with attempts to fix using "ruff" - use the exec runner, execute a binary
79
+ # hooks = ruff
80
+ # ruff.type = exec
81
+ # ruff.executable = %(here)s/.venv/bin/ruff
82
+ # ruff.options = --fix REVISION_SCRIPT_FILENAME
83
+
84
+ # Logging configuration
85
+ [loggers]
86
+ keys = root,sqlalchemy,alembic
87
+
88
+ [handlers]
89
+ keys = console
90
+
91
+ [formatters]
92
+ keys = generic
93
+
94
+ [logger_root]
95
+ level = WARNING
96
+ handlers = console
97
+ qualname =
98
+
99
+ [logger_sqlalchemy]
100
+ level = WARNING
101
+ handlers =
102
+ qualname = sqlalchemy.engine
103
+
104
+ [logger_alembic]
105
+ level = INFO
106
+ handlers =
107
+ qualname = alembic
108
+
109
+ [handler_console]
110
+ class = StreamHandler
111
+ args = (sys.stderr,)
112
+ level = NOTSET
113
+ formatter = generic
114
+
115
+ [formatter_generic]
116
+ format = %(levelname)-5.5s [%(name)s] %(message)s
117
+ datefmt = %H:%M:%S
@@ -0,0 +1,80 @@
1
+ import os
2
+
3
+ from alembic import context
4
+ from sqlalchemy import engine_from_config, pool
5
+ from sqlmodel import SQLModel
6
+
7
+ from bullish.database.schemas import * # noqa: F403
8
+
9
+ # this is the Alembic Config object, which provides
10
+ # access to the values within the .ini file in use.
11
+ config = context.config
12
+
13
+ # Interpret the config file for Python logging.
14
+ # This line sets up loggers basically.
15
+ # if config.config_file_name is not None:
16
+ # fileConfig(config.config_file_name)
17
+
18
+ # add your model's MetaData object here
19
+ # for 'autogenerate' support
20
+ # from myapp import mymodel
21
+ # target_metadata = mymodel.Base.metadata
22
+ target_metadata = SQLModel.metadata
23
+
24
+ # other values from the config, defined by the needs of env.py,
25
+ # can be acquired:
26
+ # my_important_option = config.get_main_option("my_important_option")
27
+ # ... etc.
28
+
29
+
30
+ def run_migrations_offline() -> None:
31
+ """Run migrations in 'offline' mode.
32
+
33
+ This configures the context with just a URL
34
+ and not an Engine, though an Engine is acceptable
35
+ here as well. By skipping the Engine creation
36
+ we don't even need a DBAPI to be available.
37
+
38
+ Calls to context.execute() here emit the given string to the
39
+ script output.
40
+
41
+ """
42
+ context.configure(
43
+ url=os.getenv("DATABASE_URL"),
44
+ target_metadata=target_metadata,
45
+ literal_binds=True,
46
+ dialect_opts={"paramstyle": "named"},
47
+ )
48
+
49
+ with context.begin_transaction():
50
+ context.run_migrations()
51
+
52
+
53
+ def run_migrations_online() -> None:
54
+ """Run migrations in 'online' mode.
55
+
56
+ In this scenario we need to create an Engine
57
+ and associate a connection with the context.
58
+
59
+ """
60
+ configuration = config.get_section(config.config_ini_section, {})
61
+ configuration["sqlalchemy.url"] = os.getenv("DATABASE_URL") # type: ignore
62
+ connectable = engine_from_config(
63
+ configuration,
64
+ prefix="sqlalchemy.",
65
+ poolclass=pool.NullPool,
66
+ )
67
+
68
+ with connectable.connect() as connection:
69
+ context.configure(
70
+ connection=connection, target_metadata=target_metadata, render_as_batch=True
71
+ )
72
+
73
+ with context.begin_transaction():
74
+ context.run_migrations()
75
+
76
+
77
+ if context.is_offline_mode():
78
+ run_migrations_offline()
79
+ else:
80
+ run_migrations_online()
@@ -0,0 +1,26 @@
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ ${imports if imports else ""}
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = ${repr(up_revision)}
16
+ down_revision: Union[str, None] = ${repr(down_revision)}
17
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
+
20
+
21
+ def upgrade() -> None:
22
+ ${upgrades if upgrades else "pass"}
23
+
24
+
25
+ def downgrade() -> None:
26
+ ${downgrades if downgrades else "pass"}