bullishpy 0.5.0__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of bullishpy might be problematic. Click here for more details.

@@ -0,0 +1,103 @@
1
+ """
2
+
3
+ Revision ID: 49c83f9eb5ac
4
+ Revises: ee5baabb35f8
5
+ Create Date: 2025-07-06 07:29:51.181393
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "49c83f9eb5ac"
17
+ down_revision: Union[str, None] = "ee5baabb35f8"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
25
+ batch_op.add_column(
26
+ sa.Column("median_rate_of_change_1", sa.Float(), nullable=True)
27
+ )
28
+ batch_op.add_column(
29
+ sa.Column("median_rate_of_change_7_4", sa.Float(), nullable=True)
30
+ )
31
+ batch_op.add_column(
32
+ sa.Column("median_rate_of_change_7_12", sa.Float(), nullable=True)
33
+ )
34
+ batch_op.add_column(
35
+ sa.Column("median_rate_of_change_30", sa.Float(), nullable=True)
36
+ )
37
+ batch_op.add_column(sa.Column("rate_of_change_7", sa.Float(), nullable=True))
38
+ batch_op.drop_index(batch_op.f("ix_analysis_rate_of_change_1"))
39
+ batch_op.drop_index(batch_op.f("ix_analysis_rate_of_change_7_12"))
40
+ batch_op.drop_index(batch_op.f("ix_analysis_rate_of_change_7_4"))
41
+ batch_op.create_index(
42
+ "ix_analysis_median_rate_of_change_1",
43
+ ["median_rate_of_change_1"],
44
+ unique=False,
45
+ )
46
+ batch_op.create_index(
47
+ "ix_analysis_median_rate_of_change_30",
48
+ ["median_rate_of_change_30"],
49
+ unique=False,
50
+ )
51
+ batch_op.create_index(
52
+ "ix_analysis_median_rate_of_change_7_12",
53
+ ["median_rate_of_change_7_12"],
54
+ unique=False,
55
+ )
56
+ batch_op.create_index(
57
+ "ix_analysis_median_rate_of_change_7_4",
58
+ ["median_rate_of_change_7_4"],
59
+ unique=False,
60
+ )
61
+ batch_op.create_index(
62
+ "ix_analysis_rate_of_change_7", ["rate_of_change_7"], unique=False
63
+ )
64
+ batch_op.drop_column("rate_of_change_7_4")
65
+ batch_op.drop_column("rate_of_change_7_12")
66
+ batch_op.drop_column("rate_of_change_1")
67
+
68
+ # ### end Alembic commands ###
69
+
70
+
71
+ def downgrade() -> None:
72
+ # ### commands auto generated by Alembic - please adjust! ###
73
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
74
+ batch_op.add_column(sa.Column("rate_of_change_1", sa.FLOAT(), nullable=True))
75
+ batch_op.add_column(sa.Column("rate_of_change_7_12", sa.FLOAT(), nullable=True))
76
+ batch_op.add_column(sa.Column("rate_of_change_7_4", sa.FLOAT(), nullable=True))
77
+ batch_op.drop_index("ix_analysis_rate_of_change_7")
78
+ batch_op.drop_index("ix_analysis_median_rate_of_change_7_4")
79
+ batch_op.drop_index("ix_analysis_median_rate_of_change_7_12")
80
+ batch_op.drop_index("ix_analysis_median_rate_of_change_30")
81
+ batch_op.drop_index("ix_analysis_median_rate_of_change_1")
82
+ batch_op.create_index(
83
+ batch_op.f("ix_analysis_rate_of_change_7_4"),
84
+ ["rate_of_change_7_4"],
85
+ unique=False,
86
+ )
87
+ batch_op.create_index(
88
+ batch_op.f("ix_analysis_rate_of_change_7_12"),
89
+ ["rate_of_change_7_12"],
90
+ unique=False,
91
+ )
92
+ batch_op.create_index(
93
+ batch_op.f("ix_analysis_rate_of_change_1"),
94
+ ["rate_of_change_1"],
95
+ unique=False,
96
+ )
97
+ batch_op.drop_column("rate_of_change_7")
98
+ batch_op.drop_column("median_rate_of_change_30")
99
+ batch_op.drop_column("median_rate_of_change_7_12")
100
+ batch_op.drop_column("median_rate_of_change_7_4")
101
+ batch_op.drop_column("median_rate_of_change_1")
102
+
103
+ # ### end Alembic commands ###
@@ -0,0 +1,51 @@
1
+ """
2
+
3
+ Revision ID: ee5baabb35f8
4
+ Revises: 08ac1116e055
5
+ Create Date: 2025-07-02 05:16:42.862366
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ from alembic import op
12
+ import sqlalchemy as sa
13
+ from sqlalchemy.dialects import sqlite
14
+
15
+ # revision identifiers, used by Alembic.
16
+ revision: str = "ee5baabb35f8"
17
+ down_revision: Union[str, None] = "08ac1116e055"
18
+ branch_labels: Union[str, Sequence[str], None] = None
19
+ depends_on: Union[str, Sequence[str], None] = None
20
+
21
+
22
+ def upgrade() -> None:
23
+ # ### commands auto generated by Alembic - please adjust! ###
24
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
25
+ batch_op.add_column(
26
+ sa.Column("adosc_crosses_above_0", sa.Date(), nullable=True)
27
+ )
28
+ batch_op.add_column(
29
+ sa.Column("positive_adosc_20_day_breakout", sa.Date(), nullable=True)
30
+ )
31
+ batch_op.create_index(
32
+ "ix_analysis_adosc_crosses_above_0", ["adosc_crosses_above_0"], unique=False
33
+ )
34
+ batch_op.create_index(
35
+ "ix_analysis_positive_adosc_20_day_breakout",
36
+ ["positive_adosc_20_day_breakout"],
37
+ unique=False,
38
+ )
39
+
40
+ # ### end Alembic commands ###
41
+
42
+
43
+ def downgrade() -> None:
44
+ # ### commands auto generated by Alembic - please adjust! ###
45
+ with op.batch_alter_table("analysis", schema=None) as batch_op:
46
+ batch_op.drop_index("ix_analysis_positive_adosc_20_day_breakout")
47
+ batch_op.drop_index("ix_analysis_adosc_crosses_above_0")
48
+ batch_op.drop_column("positive_adosc_20_day_breakout")
49
+ batch_op.drop_column("adosc_crosses_above_0")
50
+
51
+ # ### end Alembic commands ###
bullish/database/crud.py CHANGED
@@ -65,6 +65,11 @@ class BullishDb(BearishDb, BullishDbBase): # type: ignore
65
65
  return None
66
66
  return Analysis.model_validate(analysis)
67
67
 
68
+ def read_symbols(self) -> List[str]:
69
+ query = "SELECT DISTINCT symbol FROM analysis"
70
+ data = pd.read_sql_query(query, self._engine)
71
+ return data["symbol"].tolist()
72
+
68
73
  def _read_analysis_data(
69
74
  self, columns: List[str], symbols: Optional[List[str]] = None
70
75
  ) -> pd.DataFrame:
@@ -5,6 +5,7 @@ from sqlalchemy import Column, JSON
5
5
  from bullish.analysis.analysis import Analysis
6
6
  from bullish.analysis.filter import FilteredResults
7
7
  from bullish.jobs.models import JobTracker
8
+ from sqlalchemy import Index
8
9
 
9
10
 
10
11
  class BaseTable(SQLModel):
@@ -12,11 +13,23 @@ class BaseTable(SQLModel):
12
13
  source: str = Field(primary_key=True)
13
14
 
14
15
 
16
+ dynamic_indexes = tuple(
17
+ Index(f"ix_analysis_{col}", col) for col in Analysis.model_fields
18
+ )
19
+
20
+
15
21
  class AnalysisORM(BaseTable, Analysis, table=True):
16
22
  __tablename__ = "analysis"
17
23
  __table_args__ = {"extend_existing": True} # noqa:RUF012
18
24
 
19
25
 
26
+ AnalysisORM.__table_args__ = tuple( # type: ignore # noqa: RUF005
27
+ Index(f"ix_{AnalysisORM.__tablename__}_{col.name}", col)
28
+ for col in AnalysisORM.__table__.columns
29
+ if not col.primary_key and not col.index and col.name != "id"
30
+ ) + (AnalysisORM.__table_args__,)
31
+
32
+
20
33
  class JobTrackerORM(SQLModel, JobTracker, table=True):
21
34
  __tablename__ = "jobtracker"
22
35
  __table_args__ = {"extend_existing": True} # noqa:RUF012
@@ -4,33 +4,37 @@ import pandas as pd
4
4
  import plotly.graph_objects as go
5
5
  from plotly.subplots import make_subplots
6
6
 
7
+ from bullish.analysis.functions import add_indicators
8
+
7
9
 
8
10
  def plot(
9
11
  data: pd.DataFrame,
10
12
  symbol: str,
11
13
  name: Optional[str] = None,
12
- dates: Optional[pd.Series] = None, # type: ignore
14
+ dates: Optional[pd.Series] = None,
13
15
  ) -> go.Figure:
14
- data.ta.sma(50, append=True)
15
- data.ta.sma(200, append=True)
16
- data.ta.adx(append=True)
17
- data.ta.macd(append=True)
18
- data.ta.rsi(append=True)
16
+ data = add_indicators(data)
19
17
  fig = make_subplots(
20
- rows=4,
18
+ rows=7,
21
19
  cols=1,
22
20
  shared_xaxes=True,
23
- vertical_spacing=0.1,
21
+ vertical_spacing=0.05,
24
22
  specs=[
25
23
  [{"rowspan": 2}], # Row 1: main chart
26
24
  [None], # Row 2: skipped (part of row 1)
27
25
  [{}], # Row 3: RSI
28
26
  [{}], # Row 4: MACD
27
+ [{}], # Row 5: ADX
28
+ [{}], # Row 6: OBV
29
+ [{}], # Row 7: ATR
29
30
  ],
30
31
  subplot_titles=(
31
32
  f"Price + SMAs ({symbol} [{name}])",
32
33
  f"RSI ({symbol} [{name}])",
33
34
  f"MACD ({symbol} [{name}])",
35
+ f"ADX ({symbol} [{name}])",
36
+ f"OBV ({symbol} [{name}])",
37
+ f"ATR ({symbol} [{name}])",
34
38
  ),
35
39
  )
36
40
  # Row 1: Candlestick + SMAs
@@ -61,7 +65,7 @@ def plot(
61
65
 
62
66
  # Row 2: RSI
63
67
  fig.add_trace(
64
- go.Scatter(x=data.index, y=data.RSI_14, name="RSI 14", mode="lines"),
68
+ go.Scatter(x=data.index, y=data.RSI, name="RSI 14", mode="lines"),
65
69
  row=3,
66
70
  col=1,
67
71
  )
@@ -75,17 +79,52 @@ def plot(
75
79
 
76
80
  fig.add_trace(
77
81
  go.Scatter(
78
- x=data.index, y=data.MACDs_12_26_9, name="MACD Signal", mode="lines"
82
+ x=data.index, y=data.MACD_12_26_9_SIGNAL, name="MACD Signal", mode="lines"
79
83
  ),
80
84
  row=4,
81
85
  col=1,
82
86
  )
83
87
 
84
88
  fig.add_trace(
85
- go.Bar(x=data.index, y=data.MACDh_12_26_9, name="MACD Histogram", opacity=0.5),
89
+ go.Bar(
90
+ x=data.index, y=data.MACD_12_26_9_HIST, name="MACD Histogram", opacity=0.5
91
+ ),
86
92
  row=4,
87
93
  col=1,
88
94
  )
95
+
96
+ # Row 4: ADX
97
+ fig.add_trace(
98
+ go.Scatter(x=data.index, y=data.ADX_14, name="ADX_14", mode="lines"),
99
+ row=5,
100
+ col=1,
101
+ )
102
+
103
+ fig.add_trace(
104
+ go.Scatter(x=data.index, y=data.MINUS_DI, name="-DI", mode="lines"),
105
+ row=5,
106
+ col=1,
107
+ )
108
+ fig.add_trace(
109
+ go.Scatter(x=data.index, y=data.PLUS_DI, name="+DI", mode="lines"),
110
+ row=5,
111
+ col=1,
112
+ )
113
+ fig.add_trace(
114
+ go.Scatter(x=data.index, y=data.OBV, name="OBV", mode="lines"),
115
+ row=6,
116
+ col=1,
117
+ )
118
+ fig.add_trace(
119
+ go.Scatter(x=data.index, y=data.ADOSC, name="ADOSC", mode="lines"),
120
+ row=6,
121
+ col=1,
122
+ )
123
+ fig.add_trace(
124
+ go.Scatter(x=data.index, y=data.ATR, name="ATR", mode="lines"),
125
+ row=7,
126
+ col=1,
127
+ )
89
128
  if dates is not None and not dates.empty:
90
129
  for date in dates:
91
130
  fig.add_vline(
@@ -94,7 +133,7 @@ def plot(
94
133
 
95
134
  # Layout tweaks
96
135
  fig.update_layout(
97
- height=900,
136
+ height=1500,
98
137
  showlegend=True,
99
138
  title="Technical Indicator Dashboard",
100
139
  margin={"t": 60, "b": 40},
@@ -103,5 +142,6 @@ def plot(
103
142
  # Optional: Add horizontal lines for RSI (e.g., 70/30 levels)
104
143
  fig.add_hline(y=70, line_dash="dash", line_color="red", row=3, col=1)
105
144
  fig.add_hline(y=30, line_dash="dash", line_color="green", row=3, col=1)
145
+ fig.add_hline(y=25, line_dash="dash", line_color="red", row=5, col=1)
106
146
 
107
147
  return fig
@@ -80,3 +80,6 @@ class BullishDbBase(BearishDbBase): # type: ignore
80
80
 
81
81
  @abc.abstractmethod
82
82
  def write_filtered_results(self, filtered_results: FilteredResults) -> None: ...
83
+
84
+ @abc.abstractmethod
85
+ def read_symbols(self) -> List[str]: ...
bullish/jobs/tasks.py CHANGED
@@ -65,6 +65,16 @@ def update(
65
65
  run_analysis(bullish_db)
66
66
 
67
67
 
68
+ @huey.task(context=True) # type: ignore
69
+ @job_tracker
70
+ def analysis(
71
+ database_path: Path,
72
+ task: Optional[Task] = None,
73
+ ) -> None:
74
+ bullish_db = BullishDb(database_path=database_path)
75
+ run_analysis(bullish_db)
76
+
77
+
68
78
  @huey.task(context=True) # type: ignore
69
79
  @job_tracker
70
80
  def news(
File without changes
@@ -0,0 +1,64 @@
1
+ import sqlite3
2
+ from contextlib import contextmanager
3
+ from pathlib import Path
4
+ from sqlite3 import Connection
5
+ from typing import List, Generator
6
+
7
+ from bearish.database.schemas import * # type: ignore # noqa: F403
8
+ from bullish.database.schemas import * # noqa: F403
9
+
10
+
11
+ @contextmanager
12
+ def get_sqlite_connection(database_path: Path) -> Generator[Connection, None, None]:
13
+ conn = sqlite3.connect(database_path)
14
+ try:
15
+ yield conn
16
+ finally:
17
+ conn.close()
18
+
19
+
20
+ def get_table_names(module_name: str) -> List[str]:
21
+ return [
22
+ value.__tablename__
23
+ for _, value in locals().items()
24
+ if issubclass(value, SQLModel) # type: ignore # noqa: F405
25
+ and hasattr(value, "__tablename__")
26
+ and hasattr(value, "__table__")
27
+ and module_name in value.__module__
28
+ ]
29
+
30
+
31
+ def get_table_names_from_path(database_path: Path) -> List[str]:
32
+ with get_sqlite_connection(database_path) as conn:
33
+ cursor = conn.cursor()
34
+ cursor.execute(
35
+ "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';"
36
+ )
37
+ tables = [row[0] for row in cursor.fetchall()]
38
+ cursor.close()
39
+ return tables
40
+
41
+
42
+ def empty_analysis_table(database_path: Path) -> bool:
43
+ with get_sqlite_connection(database_path) as conn:
44
+ cursor = conn.cursor()
45
+
46
+ cursor.execute("SELECT COUNT(*) FROM analysis")
47
+ count = cursor.fetchone()[0]
48
+ conn.close()
49
+ return bool(count == 0)
50
+
51
+
52
+ def _compatible_table(database_path: Path, module_name: str) -> bool:
53
+ if not database_path.exists() and not database_path.is_file():
54
+ raise FileNotFoundError(f"Database file {database_path} does not exist.")
55
+ table_names = get_table_names(module_name)
56
+ return set(table_names).issubset(get_table_names_from_path(database_path))
57
+
58
+
59
+ def compatible_bearish_database(database_path: Path) -> bool:
60
+ return _compatible_table(database_path, "bearish.database.schemas")
61
+
62
+
63
+ def compatible_bullish_database(database_path: Path) -> bool:
64
+ return _compatible_table(database_path, "bullish.database.schemas")
@@ -1,13 +1,11 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: bullishpy
3
- Version: 0.5.0
3
+ Version: 0.7.0
4
4
  Summary:
5
5
  Author: aan
6
6
  Author-email: andoludovic.andriamamonjy@gmail.com
7
- Requires-Python: >=3.10,<3.13
7
+ Requires-Python: >=3.12,<3.13
8
8
  Classifier: Programming Language :: Python :: 3
9
- Classifier: Programming Language :: Python :: 3.10
10
- Classifier: Programming Language :: Python :: 3.11
11
9
  Classifier: Programming Language :: Python :: 3.12
12
10
  Requires-Dist: bearishpy (>=0.20.0,<0.21.0)
13
11
  Requires-Dist: huey (>=2.5.3,<3.0.0)
@@ -16,6 +14,7 @@ Requires-Dist: plotly (>=6.1.2,<7.0.0)
16
14
  Requires-Dist: streamlit (>=1.45.1,<2.0.0)
17
15
  Requires-Dist: streamlit-file-browser (>=3.2.22,<4.0.0)
18
16
  Requires-Dist: streamlit-pydantic (>=v0.6.1-rc.3,<0.7.0)
17
+ Requires-Dist: ta-lib (>=0.6.4,<0.7.0)
19
18
  Requires-Dist: tickermood (>=0.4.0,<0.5.0)
20
19
  Description-Content-Type: text/markdown
21
20
 
@@ -1,9 +1,12 @@
1
1
  bullish/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  bullish/analysis/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- bullish/analysis/analysis.py,sha256=XWK8TWAeLP5Is-Lz0bxACdE29tDbkY10ieZwj2hfxHY,22267
4
- bullish/analysis/filter.py,sha256=2UIB-yqdP8edKlzCm54jrgUmDytwYJh5KtGU0aunvPU,15757
3
+ bullish/analysis/analysis.py,sha256=ZqIMdAq1NJYgoQZ52PsV724VsXK8Jtno4alFFd_7_XU,17842
4
+ bullish/analysis/filter.py,sha256=DzhA-cuKzZhIL1kwi61PTz77BBo8B6G_7EI4OVkPrww,17123
5
+ bullish/analysis/functions.py,sha256=7mT6P6seLlgezjQlOyJNNL1J2Nydo6WeMM3A62t82Zc,11691
6
+ bullish/analysis/indicators.py,sha256=LUNxhuimBdMbaIl5WpOu3Ofmw1Nv5ZSn7iVqdBlxoRY,16785
7
+ bullish/analysis/predefined_filters.py,sha256=Ln1Wjz2x9RqU0_-u9e3TUNT2UT2VHnuogET0Q4Z_RAQ,2708
5
8
  bullish/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- bullish/app/app.py,sha256=yuEgq9j4GROL02eaER8wsAINATZVGN9al3fHp-470nc,9520
9
+ bullish/app/app.py,sha256=Es244AnLce32d-MUYbKXZeEPE8GlfCUO4SQ7aiqooGc,12539
7
10
  bullish/cli.py,sha256=C31Pj7XGzdLz2Y3nIPQ7CF1DvyGVU5EyLvzj423QbwQ,1915
8
11
  bullish/database/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
12
  bullish/database/alembic/README,sha256=heMzebYwlGhnE8_4CWJ4LS74WoEZjBy-S-mIJRxAEKI,39
@@ -11,25 +14,30 @@ bullish/database/alembic/alembic.ini,sha256=VuwqBJV5ObTyyRNrqv8Xr-TDIRfqPjP9R1mq
11
14
  bullish/database/alembic/env.py,sha256=TBsN4TyVppyc2QpWqViebd4-xxUT7Cs3GDYXQdKiAMs,2260
12
15
  bullish/database/alembic/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
13
16
  bullish/database/alembic/versions/037dbd721317_.py,sha256=U7EA4odH3t9w0-J4FmvBUt8HOuGDMn0rEAu_0vPUYaI,8595
17
+ bullish/database/alembic/versions/08ac1116e055_.py,sha256=zMEiCbraMEAZItT4ibc3evAH7-7mkXpdgnZy4tPVYeg,27263
14
18
  bullish/database/alembic/versions/11d35a452b40_.py,sha256=j2PaU1RssLQ20OevGmBC7S9E9ocWiXpBue9SOS4AQoY,11521
19
+ bullish/database/alembic/versions/49c83f9eb5ac_.py,sha256=kCBItp7KmqpJ03roy5ikQjhefZia1oKgfZwournQDq8,3890
15
20
  bullish/database/alembic/versions/4b0a2f40b7d3_.py,sha256=G0K7w7pOPYjPZkXTB8LWhxoxuWBPcPwOfnubTBtdeEY,1827
16
21
  bullish/database/alembic/versions/73564b60fe24_.py,sha256=MTlDRDNHj3E9gK7IMeAzv2UxxxYtWiu3gI_9xTLE-wg,1008
17
- bullish/database/crud.py,sha256=lrYAXftQRBUXs8RmFnrzSyV-oX5wU_UfyZjBBqPyevQ,5777
18
- bullish/database/schemas.py,sha256=FjFhCa_VijAu2EEWsQ6rShSRzdH6FS9IM66ibh2kx3g,1104
22
+ bullish/database/alembic/versions/ee5baabb35f8_.py,sha256=nBMEY-_C8AsSXVPyaDdUkwrFFo2gxShzJhmrjejDwtc,1632
23
+ bullish/database/crud.py,sha256=2D0uDxJlDv1eJFRmgipiCQxCvlC5ILB2OR4OORlIJf4,5967
24
+ bullish/database/schemas.py,sha256=bU-DW49NqpBp--1VN486LUdDmLeScrI8TF69afzjoTc,1507
19
25
  bullish/database/scripts/create_revision.py,sha256=rggIf-3koPqJNth8FIg89EOfnIM7a9QrvL8X7UJsP0g,628
20
26
  bullish/database/scripts/stamp.py,sha256=PWgVUEBumjNUMjTnGw46qmU3p221LeN-KspnW_gFuu4,839
21
27
  bullish/database/scripts/upgrade.py,sha256=-Gz7aFNPEt9y9e1kltqXE76-j_8QeNtet_VlwY5AWjo,806
22
28
  bullish/database/settings.py,sha256=nMudufmF7iC_62_PHrGSMjlqDLN2I0qTbtz9JKZHSko,164
23
29
  bullish/exceptions.py,sha256=4z_i-dD-CDz1bkGmZH9DOf1L_awlCPCgdUDPF7dhWAI,106
24
30
  bullish/figures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
- bullish/figures/figures.py,sha256=3Ifrnl4I7gq6DiMEWZs2P0589nEsaGKYNiq-Cxj35_g,2771
31
+ bullish/figures/figures.py,sha256=W4XJIs5wFtpX75OgoocVcuuA8Hdb_SNN58VW0LYI5oI,3808
26
32
  bullish/interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
- bullish/interface/interface.py,sha256=fi5tEBQGpJDshc-hfvZkPgrwWA9Q7Yn1lZ7jZ-bytm4,2736
33
+ bullish/interface/interface.py,sha256=T3MdJbX2ZF7hiMhnZ00ppXSXbDwyxaqD69a3M4bPswU,2806
28
34
  bullish/jobs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
35
  bullish/jobs/app.py,sha256=5MJ5KXUo7JSNAvOPgkpIMasD11VTrjQvGzM7vmCY65E,77
30
36
  bullish/jobs/models.py,sha256=ndrGTMP08S57yGLGEG9TQt8Uw2slc4HvbG-TZtEEuN0,744
31
- bullish/jobs/tasks.py,sha256=vRjbCBcQciTC9283El_ji7BKxx40IbLcJkMbMVoE5wA,2533
32
- bullishpy-0.5.0.dist-info/METADATA,sha256=Jez8tsiRYwfhea9IenqjU23lKrx8LpR2Q4N8j7me6SU,772
33
- bullishpy-0.5.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
34
- bullishpy-0.5.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
35
- bullishpy-0.5.0.dist-info/RECORD,,
37
+ bullish/jobs/tasks.py,sha256=gJEB342nCTw2KD3YgDXpQhGGITcXd14GAMiqRRVbZ-A,2757
38
+ bullish/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
+ bullish/utils/checks.py,sha256=rUyFNkx9F5DuOSxjldXymNHwRxIddN7PYbqPICnz1uM,2101
40
+ bullishpy-0.7.0.dist-info/METADATA,sha256=5hcbhT0fxguy-ooV3rTstjxXO96R-221_y9Ai8mLoh8,709
41
+ bullishpy-0.7.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
42
+ bullishpy-0.7.0.dist-info/entry_points.txt,sha256=eaPpmL6vmSBFo0FBtwibCXGqAW4LFJ83whJzT1VjD-0,43
43
+ bullishpy-0.7.0.dist-info/RECORD,,