bullishpy 0.13.0__tar.gz → 0.14.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bullishpy might be problematic. Click here for more details.
- {bullishpy-0.13.0 → bullishpy-0.14.0}/PKG-INFO +1 -1
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/analysis/analysis.py +3 -0
- bullishpy-0.13.0/bullish/analysis/filter.py → bullishpy-0.14.0/bullish/analysis/constants.py +1 -248
- bullishpy-0.14.0/bullish/analysis/filter.py +247 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/analysis/functions.py +10 -19
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/analysis/indicators.py +16 -8
- bullishpy-0.14.0/bullish/analysis/industry_views.py +201 -0
- bullishpy-0.14.0/bullish/analysis/predefined_filters.py +242 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/app/app.py +5 -1
- bullishpy-0.14.0/bullish/database/alembic/versions/040b15fba458_.py +61 -0
- bullishpy-0.14.0/bullish/database/alembic/versions/5b10ee7604c1_.py +44 -0
- bullishpy-0.14.0/bullish/database/alembic/versions/ec25c8fa449f_.py +63 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/crud.py +72 -4
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/schemas.py +17 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/figures/figures.py +28 -5
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/interface/interface.py +29 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/pyproject.toml +1 -1
- bullishpy-0.13.0/bullish/analysis/predefined_filters.py +0 -409
- {bullishpy-0.13.0 → bullishpy-0.14.0}/README.md +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/__init__.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/analysis/__init__.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/app/__init__.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/cli.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/__init__.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/README +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/alembic.ini +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/env.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/script.py.mako +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/037dbd721317_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/08ac1116e055_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/11d35a452b40_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/17e51420e7ad_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/49c83f9eb5ac_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/4b0a2f40b7d3_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/73564b60fe24_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/b76079e9845f_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/bf6b86dd5463_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/d663166c531d_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/ee5baabb35f8_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/alembic/versions/fc191121f522_.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/scripts/create_revision.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/scripts/stamp.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/scripts/upgrade.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/database/settings.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/exceptions.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/figures/__init__.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/interface/__init__.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/jobs/__init__.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/jobs/app.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/jobs/models.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/jobs/tasks.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/utils/__init__.py +0 -0
- {bullishpy-0.13.0 → bullishpy-0.14.0}/bullish/utils/checks.py +0 -0
|
@@ -45,6 +45,8 @@ from pydantic import BaseModel, BeforeValidator, Field, create_model
|
|
|
45
45
|
from bullish.analysis.indicators import Indicators, IndicatorModels
|
|
46
46
|
from joblib import Parallel, delayed # type: ignore
|
|
47
47
|
|
|
48
|
+
from bullish.analysis.industry_views import compute_industry_view
|
|
49
|
+
|
|
48
50
|
if TYPE_CHECKING:
|
|
49
51
|
from bullish.database.crud import BullishDb
|
|
50
52
|
|
|
@@ -494,6 +496,7 @@ def compute_analysis(database_path: Path, ticker: Ticker) -> Analysis:
|
|
|
494
496
|
|
|
495
497
|
|
|
496
498
|
def run_analysis(bullish_db: "BullishDb") -> None:
|
|
499
|
+
compute_industry_view(bullish_db)
|
|
497
500
|
price_trackers = set(bullish_db._read_tracker(TrackerQuery(), PriceTracker))
|
|
498
501
|
finance_trackers = set(bullish_db._read_tracker(TrackerQuery(), FinancialsTracker))
|
|
499
502
|
tickers = list(price_trackers.intersection(finance_trackers))
|
bullishpy-0.13.0/bullish/analysis/filter.py → bullishpy-0.14.0/bullish/analysis/constants.py
RENAMED
|
@@ -1,19 +1,4 @@
|
|
|
1
|
-
import
|
|
2
|
-
from datetime import date
|
|
3
|
-
from typing import Literal, get_args, Any, Optional, List, Tuple, Type, Dict
|
|
4
|
-
|
|
5
|
-
from bearish.types import SeriesLength # type: ignore
|
|
6
|
-
from pydantic import BaseModel, Field, ConfigDict
|
|
7
|
-
from pydantic import create_model
|
|
8
|
-
from pydantic.fields import FieldInfo
|
|
9
|
-
|
|
10
|
-
from bullish.analysis.analysis import (
|
|
11
|
-
YearlyFundamentalAnalysis,
|
|
12
|
-
QuarterlyFundamentalAnalysis,
|
|
13
|
-
TechnicalAnalysisModels,
|
|
14
|
-
TechnicalAnalysis,
|
|
15
|
-
AnalysisView,
|
|
16
|
-
)
|
|
1
|
+
from typing import Literal
|
|
17
2
|
|
|
18
3
|
Industry = Literal[
|
|
19
4
|
"Publishing",
|
|
@@ -162,7 +147,6 @@ Industry = Literal[
|
|
|
162
147
|
"REIT - Healthcare Facilities",
|
|
163
148
|
"Discount Stores",
|
|
164
149
|
]
|
|
165
|
-
|
|
166
150
|
IndustryGroup = Literal[
|
|
167
151
|
"publishing",
|
|
168
152
|
"internet-retail",
|
|
@@ -310,7 +294,6 @@ IndustryGroup = Literal[
|
|
|
310
294
|
"reit-healthcare-facilities",
|
|
311
295
|
"discount-stores",
|
|
312
296
|
]
|
|
313
|
-
|
|
314
297
|
Sector = Literal[
|
|
315
298
|
"Communication Services",
|
|
316
299
|
"Consumer Cyclical",
|
|
@@ -325,7 +308,6 @@ Sector = Literal[
|
|
|
325
308
|
"Financial Services",
|
|
326
309
|
"Conglomerates",
|
|
327
310
|
]
|
|
328
|
-
|
|
329
311
|
Country = Literal[
|
|
330
312
|
"Australia",
|
|
331
313
|
"China",
|
|
@@ -419,232 +401,3 @@ Country = Literal[
|
|
|
419
401
|
"Liberia",
|
|
420
402
|
"Kenya",
|
|
421
403
|
]
|
|
422
|
-
SIZE_RANGE = 2
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
def _get_type(name: str, info: FieldInfo) -> Tuple[Any, Any]:
|
|
426
|
-
alias = info.alias or " ".join(name.capitalize().split("_")).strip()
|
|
427
|
-
if info.annotation == Optional[float]: # type: ignore
|
|
428
|
-
ge = next((item.ge for item in info.metadata if hasattr(item, "ge")), 0)
|
|
429
|
-
le = next((item.le for item in info.metadata if hasattr(item, "le")), 100)
|
|
430
|
-
default = [ge, le]
|
|
431
|
-
return (
|
|
432
|
-
Optional[List[float]],
|
|
433
|
-
Field(default=default, alias=alias, description=info.description),
|
|
434
|
-
)
|
|
435
|
-
elif info.annotation == Optional[date]: # type: ignore
|
|
436
|
-
le = date.today()
|
|
437
|
-
ge = le - datetime.timedelta(days=30 * 2) # 30 days * 12 months
|
|
438
|
-
return (
|
|
439
|
-
List[date],
|
|
440
|
-
Field(default=[ge, le], alias=alias, description=info.description),
|
|
441
|
-
)
|
|
442
|
-
else:
|
|
443
|
-
raise NotImplementedError
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
FUNDAMENTAL_ANALYSIS_GROUP = ["income", "cash_flow", "eps"]
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
def _get_fundamental_analysis_boolean_fields() -> List[str]:
|
|
450
|
-
return [
|
|
451
|
-
name
|
|
452
|
-
for name, info in {
|
|
453
|
-
**YearlyFundamentalAnalysis.model_fields,
|
|
454
|
-
**QuarterlyFundamentalAnalysis.model_fields,
|
|
455
|
-
}.items()
|
|
456
|
-
if info.annotation == Optional[bool]
|
|
457
|
-
]
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
def _get_technical_analysis_float_fields() -> List[str]:
|
|
461
|
-
return [
|
|
462
|
-
name
|
|
463
|
-
for name, info in (
|
|
464
|
-
TechnicalAnalysis.model_fields | AnalysisView.model_fields
|
|
465
|
-
).items()
|
|
466
|
-
if info.annotation == Optional[float]
|
|
467
|
-
]
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
def get_boolean_field_group(group: str) -> List[str]:
|
|
471
|
-
groups = FUNDAMENTAL_ANALYSIS_GROUP.copy()
|
|
472
|
-
groups.remove(group)
|
|
473
|
-
return [
|
|
474
|
-
name
|
|
475
|
-
for name in _get_fundamental_analysis_boolean_fields()
|
|
476
|
-
if group in name and not any(g in name for g in groups)
|
|
477
|
-
]
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
INCOME_GROUP = get_boolean_field_group("income")
|
|
481
|
-
CASH_FLOW_GROUP = get_boolean_field_group("cash_flow")
|
|
482
|
-
EPS_GROUP = get_boolean_field_group("eps")
|
|
483
|
-
PROPERTIES_GROUP = list(
|
|
484
|
-
set(_get_fundamental_analysis_boolean_fields()).difference(
|
|
485
|
-
{*INCOME_GROUP, *CASH_FLOW_GROUP, *EPS_GROUP}
|
|
486
|
-
)
|
|
487
|
-
)
|
|
488
|
-
|
|
489
|
-
GROUP_MAPPING: Dict[str, List[str]] = {
|
|
490
|
-
"income": INCOME_GROUP,
|
|
491
|
-
"cash_flow": CASH_FLOW_GROUP,
|
|
492
|
-
"eps": EPS_GROUP,
|
|
493
|
-
"properties": PROPERTIES_GROUP,
|
|
494
|
-
"country": list(get_args(Country)),
|
|
495
|
-
"industry": list(get_args(Industry)),
|
|
496
|
-
"industry_group": list(get_args(IndustryGroup)),
|
|
497
|
-
"sector": list(get_args(Sector)),
|
|
498
|
-
"symbol": [],
|
|
499
|
-
"order_by_asc": _get_technical_analysis_float_fields(),
|
|
500
|
-
"order_by_desc": _get_technical_analysis_float_fields(),
|
|
501
|
-
}
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
def _create_fundamental_analysis_models() -> List[Type[BaseModel]]:
|
|
505
|
-
models = []
|
|
506
|
-
boolean_fields = {
|
|
507
|
-
"income": (Optional[List[str]], Field(default=None, description="Income")),
|
|
508
|
-
"cash_flow": (
|
|
509
|
-
Optional[List[str]],
|
|
510
|
-
Field(default=None, description="Cash flow"),
|
|
511
|
-
),
|
|
512
|
-
"eps": (
|
|
513
|
-
Optional[List[str]],
|
|
514
|
-
Field(default=None, description="Earnings per share"),
|
|
515
|
-
),
|
|
516
|
-
"properties": (
|
|
517
|
-
Optional[List[str]],
|
|
518
|
-
Field(default=None, description="General properties"),
|
|
519
|
-
),
|
|
520
|
-
}
|
|
521
|
-
yearly_fields = {
|
|
522
|
-
name: _get_type(name, info)
|
|
523
|
-
for name, info in YearlyFundamentalAnalysis.model_fields.items()
|
|
524
|
-
if info.annotation != Optional[bool] # type: ignore
|
|
525
|
-
}
|
|
526
|
-
quarterly_fields = {
|
|
527
|
-
name: _get_type(name, info)
|
|
528
|
-
for name, info in QuarterlyFundamentalAnalysis.model_fields.items()
|
|
529
|
-
if info.annotation != Optional[bool]
|
|
530
|
-
}
|
|
531
|
-
for property in [
|
|
532
|
-
(boolean_fields, "Selection filter", "SelectionFilter"),
|
|
533
|
-
(yearly_fields, "Yearly properties", "YearlyFilter"),
|
|
534
|
-
(quarterly_fields, "Quarterly properties", "QuarterlyFilter"),
|
|
535
|
-
]:
|
|
536
|
-
model_ = create_model( # type: ignore
|
|
537
|
-
property[-1],
|
|
538
|
-
__config__=ConfigDict(populate_by_name=True),
|
|
539
|
-
**property[0],
|
|
540
|
-
)
|
|
541
|
-
model_._description = property[1]
|
|
542
|
-
models.append(model_)
|
|
543
|
-
|
|
544
|
-
return models
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
def create_technical_analysis_models() -> List[Type[BaseModel]]:
|
|
548
|
-
models = []
|
|
549
|
-
for model in TechnicalAnalysisModels:
|
|
550
|
-
model_ = create_model( # type: ignore
|
|
551
|
-
f"{model.__name__}Filter", # type: ignore
|
|
552
|
-
__config__=ConfigDict(populate_by_name=True),
|
|
553
|
-
**{
|
|
554
|
-
name: _get_type(name, info) for name, info in model.model_fields.items() # type: ignore
|
|
555
|
-
},
|
|
556
|
-
)
|
|
557
|
-
|
|
558
|
-
model_._description = model._description # type: ignore
|
|
559
|
-
models.append(model_)
|
|
560
|
-
return models
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
TechnicalAnalysisFilters = create_technical_analysis_models()
|
|
564
|
-
FundamentalAnalysisFilters = _create_fundamental_analysis_models()
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
class GeneralFilter(BaseModel):
|
|
568
|
-
country: Optional[List[str]] = None
|
|
569
|
-
order_by_asc: Optional[str] = None
|
|
570
|
-
order_by_desc: Optional[str] = None
|
|
571
|
-
industry: Optional[List[str]] = None
|
|
572
|
-
industry_group: Optional[List[str]] = None
|
|
573
|
-
sector: Optional[List[str]] = None
|
|
574
|
-
symbol: Optional[List[str]] = None
|
|
575
|
-
market_capitalization: Optional[List[float]] = Field(default=[5e8, 1e12])
|
|
576
|
-
price_per_earning_ratio: Optional[List[float]] = Field(default=[0.0, 1000.0])
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysisFilters): # type: ignore
|
|
580
|
-
|
|
581
|
-
def valid(self) -> bool:
|
|
582
|
-
return any(
|
|
583
|
-
bool(v)
|
|
584
|
-
for _, v in self.model_dump(
|
|
585
|
-
exclude_defaults=True, exclude_unset=True
|
|
586
|
-
).items()
|
|
587
|
-
)
|
|
588
|
-
|
|
589
|
-
def to_query(self) -> str:
|
|
590
|
-
parameters = self.model_dump(exclude_defaults=True, exclude_unset=True)
|
|
591
|
-
query = []
|
|
592
|
-
order_by_desc = ""
|
|
593
|
-
order_by_asc = ""
|
|
594
|
-
for parameter, value in parameters.items():
|
|
595
|
-
if not value:
|
|
596
|
-
continue
|
|
597
|
-
|
|
598
|
-
if (
|
|
599
|
-
isinstance(value, list)
|
|
600
|
-
and all(isinstance(item, str) for item in value)
|
|
601
|
-
and parameter not in GeneralFilter.model_fields
|
|
602
|
-
):
|
|
603
|
-
query.append(" AND ".join([f"{v}=1" for v in value]))
|
|
604
|
-
elif (
|
|
605
|
-
isinstance(value, str) and bool(value) and parameter == "order_by_desc"
|
|
606
|
-
):
|
|
607
|
-
order_by_desc = f"ORDER BY {value} DESC"
|
|
608
|
-
elif isinstance(value, str) and bool(value) and parameter == "order_by_asc":
|
|
609
|
-
order_by_asc = f"ORDER BY {value} ASC"
|
|
610
|
-
elif (
|
|
611
|
-
isinstance(value, list)
|
|
612
|
-
and len(value) == SIZE_RANGE
|
|
613
|
-
and all(isinstance(item, (int, float)) for item in value)
|
|
614
|
-
):
|
|
615
|
-
query.append(f"{parameter} BETWEEN {value[0]} AND {value[1]}")
|
|
616
|
-
elif (
|
|
617
|
-
isinstance(value, list)
|
|
618
|
-
and len(value) == SIZE_RANGE
|
|
619
|
-
and all(isinstance(item, date) for item in value)
|
|
620
|
-
):
|
|
621
|
-
query.append(f"{parameter} BETWEEN '{value[0]}' AND '{value[1]}'")
|
|
622
|
-
elif (
|
|
623
|
-
isinstance(value, list)
|
|
624
|
-
and all(isinstance(item, str) for item in value)
|
|
625
|
-
and parameter in GeneralFilter.model_fields
|
|
626
|
-
):
|
|
627
|
-
general_filters = [f"'{v}'" for v in value]
|
|
628
|
-
query.append(f"{parameter} IN ({', '.join(general_filters)})")
|
|
629
|
-
else:
|
|
630
|
-
raise NotImplementedError
|
|
631
|
-
query_ = " AND ".join(query)
|
|
632
|
-
return f"{query_} {order_by_desc.strip()} {order_by_asc.strip()}".strip()
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
class FilterQueryStored(FilterQuery): ...
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
class FilterUpdate(BaseModel):
|
|
639
|
-
window_size: SeriesLength = Field("5d")
|
|
640
|
-
data_age_in_days: int = 1
|
|
641
|
-
update_financials: bool = False
|
|
642
|
-
update_analysis_only: bool = False
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
class FilteredResults(BaseModel):
|
|
646
|
-
name: str
|
|
647
|
-
filter_query: FilterQueryStored
|
|
648
|
-
symbols: list[str] = Field(
|
|
649
|
-
default_factory=list, description="List of filtered tickers."
|
|
650
|
-
)
|
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from datetime import date
|
|
3
|
+
from typing import get_args, Any, Optional, List, Tuple, Type, Dict
|
|
4
|
+
|
|
5
|
+
from bearish.types import SeriesLength # type: ignore
|
|
6
|
+
from pydantic import BaseModel, Field, ConfigDict
|
|
7
|
+
from pydantic import create_model
|
|
8
|
+
from pydantic.fields import FieldInfo
|
|
9
|
+
|
|
10
|
+
from bullish.analysis.analysis import (
|
|
11
|
+
YearlyFundamentalAnalysis,
|
|
12
|
+
QuarterlyFundamentalAnalysis,
|
|
13
|
+
TechnicalAnalysisModels,
|
|
14
|
+
TechnicalAnalysis,
|
|
15
|
+
AnalysisView,
|
|
16
|
+
)
|
|
17
|
+
from bullish.analysis.constants import Industry, IndustryGroup, Sector, Country
|
|
18
|
+
|
|
19
|
+
SIZE_RANGE = 2
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def _get_type(name: str, info: FieldInfo) -> Tuple[Any, Any]:
|
|
23
|
+
alias = info.alias or " ".join(name.capitalize().split("_")).strip()
|
|
24
|
+
if info.annotation == Optional[float]: # type: ignore
|
|
25
|
+
ge = next((item.ge for item in info.metadata if hasattr(item, "ge")), 0)
|
|
26
|
+
le = next((item.le for item in info.metadata if hasattr(item, "le")), 100)
|
|
27
|
+
default = [ge, le]
|
|
28
|
+
return (
|
|
29
|
+
Optional[List[float]],
|
|
30
|
+
Field(default=default, alias=alias, description=info.description),
|
|
31
|
+
)
|
|
32
|
+
elif info.annotation == Optional[date]: # type: ignore
|
|
33
|
+
le = date.today()
|
|
34
|
+
ge = le - datetime.timedelta(days=30 * 2) # 30 days * 12 months
|
|
35
|
+
return (
|
|
36
|
+
List[date],
|
|
37
|
+
Field(default=[ge, le], alias=alias, description=info.description),
|
|
38
|
+
)
|
|
39
|
+
else:
|
|
40
|
+
raise NotImplementedError
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
FUNDAMENTAL_ANALYSIS_GROUP = ["income", "cash_flow", "eps"]
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _get_fundamental_analysis_boolean_fields() -> List[str]:
|
|
47
|
+
return [
|
|
48
|
+
name
|
|
49
|
+
for name, info in {
|
|
50
|
+
**YearlyFundamentalAnalysis.model_fields,
|
|
51
|
+
**QuarterlyFundamentalAnalysis.model_fields,
|
|
52
|
+
}.items()
|
|
53
|
+
if info.annotation == Optional[bool]
|
|
54
|
+
]
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _get_technical_analysis_float_fields() -> List[str]:
|
|
58
|
+
return [
|
|
59
|
+
name
|
|
60
|
+
for name, info in (
|
|
61
|
+
TechnicalAnalysis.model_fields | AnalysisView.model_fields
|
|
62
|
+
).items()
|
|
63
|
+
if info.annotation == Optional[float]
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def get_boolean_field_group(group: str) -> List[str]:
|
|
68
|
+
groups = FUNDAMENTAL_ANALYSIS_GROUP.copy()
|
|
69
|
+
groups.remove(group)
|
|
70
|
+
return [
|
|
71
|
+
name
|
|
72
|
+
for name in _get_fundamental_analysis_boolean_fields()
|
|
73
|
+
if group in name and not any(g in name for g in groups)
|
|
74
|
+
]
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
INCOME_GROUP = get_boolean_field_group("income")
|
|
78
|
+
CASH_FLOW_GROUP = get_boolean_field_group("cash_flow")
|
|
79
|
+
EPS_GROUP = get_boolean_field_group("eps")
|
|
80
|
+
PROPERTIES_GROUP = list(
|
|
81
|
+
set(_get_fundamental_analysis_boolean_fields()).difference(
|
|
82
|
+
{*INCOME_GROUP, *CASH_FLOW_GROUP, *EPS_GROUP}
|
|
83
|
+
)
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
GROUP_MAPPING: Dict[str, List[str]] = {
|
|
87
|
+
"income": INCOME_GROUP,
|
|
88
|
+
"cash_flow": CASH_FLOW_GROUP,
|
|
89
|
+
"eps": EPS_GROUP,
|
|
90
|
+
"properties": PROPERTIES_GROUP,
|
|
91
|
+
"country": list(get_args(Country)),
|
|
92
|
+
"industry": list(get_args(Industry)),
|
|
93
|
+
"industry_group": list(get_args(IndustryGroup)),
|
|
94
|
+
"sector": list(get_args(Sector)),
|
|
95
|
+
"symbol": [],
|
|
96
|
+
"order_by_asc": _get_technical_analysis_float_fields(),
|
|
97
|
+
"order_by_desc": _get_technical_analysis_float_fields(),
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _create_fundamental_analysis_models() -> List[Type[BaseModel]]:
|
|
102
|
+
models = []
|
|
103
|
+
boolean_fields = {
|
|
104
|
+
"income": (Optional[List[str]], Field(default=None, description="Income")),
|
|
105
|
+
"cash_flow": (
|
|
106
|
+
Optional[List[str]],
|
|
107
|
+
Field(default=None, description="Cash flow"),
|
|
108
|
+
),
|
|
109
|
+
"eps": (
|
|
110
|
+
Optional[List[str]],
|
|
111
|
+
Field(default=None, description="Earnings per share"),
|
|
112
|
+
),
|
|
113
|
+
"properties": (
|
|
114
|
+
Optional[List[str]],
|
|
115
|
+
Field(default=None, description="General properties"),
|
|
116
|
+
),
|
|
117
|
+
}
|
|
118
|
+
yearly_fields = {
|
|
119
|
+
name: _get_type(name, info)
|
|
120
|
+
for name, info in YearlyFundamentalAnalysis.model_fields.items()
|
|
121
|
+
if info.annotation != Optional[bool] # type: ignore
|
|
122
|
+
}
|
|
123
|
+
quarterly_fields = {
|
|
124
|
+
name: _get_type(name, info)
|
|
125
|
+
for name, info in QuarterlyFundamentalAnalysis.model_fields.items()
|
|
126
|
+
if info.annotation != Optional[bool]
|
|
127
|
+
}
|
|
128
|
+
for property in [
|
|
129
|
+
(boolean_fields, "Selection filter", "SelectionFilter"),
|
|
130
|
+
(yearly_fields, "Yearly properties", "YearlyFilter"),
|
|
131
|
+
(quarterly_fields, "Quarterly properties", "QuarterlyFilter"),
|
|
132
|
+
]:
|
|
133
|
+
model_ = create_model( # type: ignore
|
|
134
|
+
property[-1],
|
|
135
|
+
__config__=ConfigDict(populate_by_name=True),
|
|
136
|
+
**property[0],
|
|
137
|
+
)
|
|
138
|
+
model_._description = property[1]
|
|
139
|
+
models.append(model_)
|
|
140
|
+
|
|
141
|
+
return models
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def create_technical_analysis_models() -> List[Type[BaseModel]]:
|
|
145
|
+
models = []
|
|
146
|
+
for model in TechnicalAnalysisModels:
|
|
147
|
+
model_ = create_model( # type: ignore
|
|
148
|
+
f"{model.__name__}Filter", # type: ignore
|
|
149
|
+
__config__=ConfigDict(populate_by_name=True),
|
|
150
|
+
**{
|
|
151
|
+
name: _get_type(name, info) for name, info in model.model_fields.items() # type: ignore
|
|
152
|
+
},
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
model_._description = model._description # type: ignore
|
|
156
|
+
models.append(model_)
|
|
157
|
+
return models
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
TechnicalAnalysisFilters = create_technical_analysis_models()
|
|
161
|
+
FundamentalAnalysisFilters = _create_fundamental_analysis_models()
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
class GeneralFilter(BaseModel):
|
|
165
|
+
country: Optional[List[str]] = None
|
|
166
|
+
order_by_asc: Optional[str] = None
|
|
167
|
+
order_by_desc: Optional[str] = None
|
|
168
|
+
industry: Optional[List[str]] = None
|
|
169
|
+
industry_group: Optional[List[str]] = None
|
|
170
|
+
sector: Optional[List[str]] = None
|
|
171
|
+
symbol: Optional[List[str]] = None
|
|
172
|
+
market_capitalization: Optional[List[float]] = Field(default=[5e8, 1e12])
|
|
173
|
+
price_per_earning_ratio: Optional[List[float]] = Field(default=[0.0, 1000.0])
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
class FilterQuery(GeneralFilter, *TechnicalAnalysisFilters, *FundamentalAnalysisFilters): # type: ignore
|
|
177
|
+
|
|
178
|
+
def valid(self) -> bool:
|
|
179
|
+
return any(
|
|
180
|
+
bool(v)
|
|
181
|
+
for _, v in self.model_dump(
|
|
182
|
+
exclude_defaults=True, exclude_unset=True
|
|
183
|
+
).items()
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
def to_query(self) -> str:
|
|
187
|
+
parameters = self.model_dump(exclude_defaults=True, exclude_unset=True)
|
|
188
|
+
query = []
|
|
189
|
+
order_by_desc = ""
|
|
190
|
+
order_by_asc = ""
|
|
191
|
+
for parameter, value in parameters.items():
|
|
192
|
+
if not value:
|
|
193
|
+
continue
|
|
194
|
+
|
|
195
|
+
if (
|
|
196
|
+
isinstance(value, list)
|
|
197
|
+
and all(isinstance(item, str) for item in value)
|
|
198
|
+
and parameter not in GeneralFilter.model_fields
|
|
199
|
+
):
|
|
200
|
+
query.append(" AND ".join([f"{v}=1" for v in value]))
|
|
201
|
+
elif (
|
|
202
|
+
isinstance(value, str) and bool(value) and parameter == "order_by_desc"
|
|
203
|
+
):
|
|
204
|
+
order_by_desc = f"ORDER BY {value} DESC"
|
|
205
|
+
elif isinstance(value, str) and bool(value) and parameter == "order_by_asc":
|
|
206
|
+
order_by_asc = f"ORDER BY {value} ASC"
|
|
207
|
+
elif (
|
|
208
|
+
isinstance(value, list)
|
|
209
|
+
and len(value) == SIZE_RANGE
|
|
210
|
+
and all(isinstance(item, (int, float)) for item in value)
|
|
211
|
+
):
|
|
212
|
+
query.append(f"{parameter} BETWEEN {value[0]} AND {value[1]}")
|
|
213
|
+
elif (
|
|
214
|
+
isinstance(value, list)
|
|
215
|
+
and len(value) == SIZE_RANGE
|
|
216
|
+
and all(isinstance(item, date) for item in value)
|
|
217
|
+
):
|
|
218
|
+
query.append(f"{parameter} BETWEEN '{value[0]}' AND '{value[1]}'")
|
|
219
|
+
elif (
|
|
220
|
+
isinstance(value, list)
|
|
221
|
+
and all(isinstance(item, str) for item in value)
|
|
222
|
+
and parameter in GeneralFilter.model_fields
|
|
223
|
+
):
|
|
224
|
+
general_filters = [f"'{v}'" for v in value]
|
|
225
|
+
query.append(f"{parameter} IN ({', '.join(general_filters)})")
|
|
226
|
+
else:
|
|
227
|
+
raise NotImplementedError
|
|
228
|
+
query_ = " AND ".join(query)
|
|
229
|
+
return f"{query_} {order_by_desc.strip()} {order_by_asc.strip()}".strip()
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
class FilterQueryStored(FilterQuery): ...
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
class FilterUpdate(BaseModel):
|
|
236
|
+
window_size: SeriesLength = Field("5d")
|
|
237
|
+
data_age_in_days: int = 1
|
|
238
|
+
update_financials: bool = False
|
|
239
|
+
update_analysis_only: bool = False
|
|
240
|
+
|
|
241
|
+
|
|
242
|
+
class FilteredResults(BaseModel):
|
|
243
|
+
name: str
|
|
244
|
+
filter_query: FilterQueryStored
|
|
245
|
+
symbols: list[str] = Field(
|
|
246
|
+
default_factory=list, description="List of filtered tickers."
|
|
247
|
+
)
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import datetime
|
|
2
1
|
import logging
|
|
3
2
|
from datetime import date
|
|
4
3
|
from typing import Optional, Callable, cast
|
|
@@ -282,24 +281,6 @@ def compute_price(data: pd.DataFrame) -> pd.DataFrame:
|
|
|
282
281
|
return results
|
|
283
282
|
|
|
284
283
|
|
|
285
|
-
def compute_percentile_return_after_rsi_crossover(
|
|
286
|
-
data: pd.DataFrame, rsi_threshold: int = 45, period: int = 90
|
|
287
|
-
) -> float:
|
|
288
|
-
data_ = cross_value_series(data.RSI, rsi_threshold)
|
|
289
|
-
values = []
|
|
290
|
-
for crossing_date in data_[data_ == 1].index:
|
|
291
|
-
data_crossed = data[
|
|
292
|
-
(data.index >= crossing_date)
|
|
293
|
-
& (data.index <= crossing_date + datetime.timedelta(days=period))
|
|
294
|
-
]
|
|
295
|
-
v = (
|
|
296
|
-
data_crossed.CLOSE.pct_change(periods=len(data_crossed.CLOSE) - 1).iloc[-1]
|
|
297
|
-
* 100
|
|
298
|
-
)
|
|
299
|
-
values.append(v)
|
|
300
|
-
return float(np.percentile(values, 30))
|
|
301
|
-
|
|
302
|
-
|
|
303
284
|
def find_last_true_run_start(series: pd.Series) -> Optional[date]:
|
|
304
285
|
if not series.iloc[-1]:
|
|
305
286
|
return None
|
|
@@ -311,6 +292,16 @@ def find_last_true_run_start(series: pd.Series) -> Optional[date]:
|
|
|
311
292
|
return series.index[last_true_run_start].date() # type: ignore
|
|
312
293
|
|
|
313
294
|
|
|
295
|
+
def sma_50_above_sma_200(data: pd.DataFrame) -> Optional[date]:
|
|
296
|
+
date_1 = find_last_true_run_start(data.SMA_50 > data.SMA_200)
|
|
297
|
+
return date_1
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def price_above_sma50(data: pd.DataFrame) -> Optional[date]:
|
|
301
|
+
date_1 = find_last_true_run_start(data.SMA_50 < data.CLOSE)
|
|
302
|
+
return date_1
|
|
303
|
+
|
|
304
|
+
|
|
314
305
|
def momentum(data: pd.DataFrame) -> Optional[date]:
|
|
315
306
|
date_1 = find_last_true_run_start(data.SMA_50 < data.CLOSE)
|
|
316
307
|
date_2 = find_last_true_run_start(data.SMA_200 < data.SMA_50)
|
|
@@ -19,8 +19,9 @@ from bullish.analysis.functions import (
|
|
|
19
19
|
SMA,
|
|
20
20
|
ADOSC,
|
|
21
21
|
PRICE,
|
|
22
|
-
compute_percentile_return_after_rsi_crossover,
|
|
23
22
|
momentum,
|
|
23
|
+
sma_50_above_sma_200,
|
|
24
|
+
price_above_sma50,
|
|
24
25
|
)
|
|
25
26
|
|
|
26
27
|
logger = logging.getLogger(__name__)
|
|
@@ -206,13 +207,6 @@ def indicators_factory() -> List[Indicator]:
|
|
|
206
207
|
(d.RSI < 60) & (d.RSI > 40)
|
|
207
208
|
].last_valid_index(),
|
|
208
209
|
),
|
|
209
|
-
Signal(
|
|
210
|
-
name="RETURN_AFTER_RSI_CROSSOVER_45_PERIOD_90",
|
|
211
|
-
description="Percentile 30 return after RSI crossover 45 in the next 90 days",
|
|
212
|
-
type_info="Long",
|
|
213
|
-
type=Optional[float],
|
|
214
|
-
function=lambda d: compute_percentile_return_after_rsi_crossover(d),
|
|
215
|
-
),
|
|
216
210
|
],
|
|
217
211
|
),
|
|
218
212
|
Indicator(
|
|
@@ -290,6 +284,20 @@ def indicators_factory() -> List[Indicator]:
|
|
|
290
284
|
type=Optional[date],
|
|
291
285
|
function=lambda d: momentum(d),
|
|
292
286
|
),
|
|
287
|
+
Signal(
|
|
288
|
+
name="SMA_50_ABOVE_SMA_200",
|
|
289
|
+
description="SMA 50 is above SMA 200",
|
|
290
|
+
type_info="Overbought",
|
|
291
|
+
type=Optional[date],
|
|
292
|
+
function=lambda d: sma_50_above_sma_200(d),
|
|
293
|
+
),
|
|
294
|
+
Signal(
|
|
295
|
+
name="PRICE_ABOVE_SMA_50",
|
|
296
|
+
description="Price is above SMA 50",
|
|
297
|
+
type_info="Overbought",
|
|
298
|
+
type=Optional[date],
|
|
299
|
+
function=lambda d: price_above_sma50(d),
|
|
300
|
+
),
|
|
293
301
|
],
|
|
294
302
|
),
|
|
295
303
|
Indicator(
|