wbfdm 2.2.1__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of wbfdm might be problematic. Click here for more details.
- wbfdm/__init__.py +2 -0
- wbfdm/admin/__init__.py +42 -0
- wbfdm/admin/classifications.py +39 -0
- wbfdm/admin/esg.py +23 -0
- wbfdm/admin/exchanges.py +53 -0
- wbfdm/admin/instrument_lists.py +23 -0
- wbfdm/admin/instrument_prices.py +62 -0
- wbfdm/admin/instrument_requests.py +33 -0
- wbfdm/admin/instruments.py +117 -0
- wbfdm/admin/instruments_relationships.py +25 -0
- wbfdm/admin/options.py +101 -0
- wbfdm/analysis/__init__.py +2 -0
- wbfdm/analysis/esg/__init__.py +0 -0
- wbfdm/analysis/esg/enums.py +82 -0
- wbfdm/analysis/esg/esg_analysis.py +217 -0
- wbfdm/analysis/esg/utils.py +13 -0
- wbfdm/analysis/financial_analysis/__init__.py +1 -0
- wbfdm/analysis/financial_analysis/financial_metric_analysis.py +88 -0
- wbfdm/analysis/financial_analysis/financial_ratio_analysis.py +125 -0
- wbfdm/analysis/financial_analysis/financial_statistics_analysis.py +271 -0
- wbfdm/analysis/financial_analysis/statement_with_estimates.py +558 -0
- wbfdm/analysis/financial_analysis/utils.py +316 -0
- wbfdm/analysis/technical_analysis/__init__.py +1 -0
- wbfdm/analysis/technical_analysis/technical_analysis.py +138 -0
- wbfdm/analysis/technical_analysis/traces.py +165 -0
- wbfdm/analysis/utils.py +32 -0
- wbfdm/apps.py +14 -0
- wbfdm/contrib/__init__.py +0 -0
- wbfdm/contrib/dsws/__init__.py +0 -0
- wbfdm/contrib/dsws/client.py +285 -0
- wbfdm/contrib/internal/__init__.py +0 -0
- wbfdm/contrib/internal/dataloaders/__init__.py +0 -0
- wbfdm/contrib/internal/dataloaders/market_data.py +87 -0
- wbfdm/contrib/metric/__init__.py +0 -0
- wbfdm/contrib/metric/admin/__init__.py +2 -0
- wbfdm/contrib/metric/admin/instruments.py +12 -0
- wbfdm/contrib/metric/admin/metrics.py +43 -0
- wbfdm/contrib/metric/apps.py +10 -0
- wbfdm/contrib/metric/backends/__init__.py +2 -0
- wbfdm/contrib/metric/backends/base.py +159 -0
- wbfdm/contrib/metric/backends/performances.py +265 -0
- wbfdm/contrib/metric/backends/statistics.py +182 -0
- wbfdm/contrib/metric/decorators.py +14 -0
- wbfdm/contrib/metric/dispatch.py +23 -0
- wbfdm/contrib/metric/dto.py +88 -0
- wbfdm/contrib/metric/exceptions.py +6 -0
- wbfdm/contrib/metric/factories.py +33 -0
- wbfdm/contrib/metric/filters.py +28 -0
- wbfdm/contrib/metric/migrations/0001_initial.py +88 -0
- wbfdm/contrib/metric/migrations/0002_remove_instrumentmetric_unique_instrument_metric_and_more.py +26 -0
- wbfdm/contrib/metric/migrations/__init__.py +0 -0
- wbfdm/contrib/metric/models.py +180 -0
- wbfdm/contrib/metric/orchestrators.py +94 -0
- wbfdm/contrib/metric/registry.py +80 -0
- wbfdm/contrib/metric/serializers.py +44 -0
- wbfdm/contrib/metric/tasks.py +27 -0
- wbfdm/contrib/metric/tests/__init__.py +0 -0
- wbfdm/contrib/metric/tests/backends/__init__.py +0 -0
- wbfdm/contrib/metric/tests/backends/test_performances.py +152 -0
- wbfdm/contrib/metric/tests/backends/test_statistics.py +48 -0
- wbfdm/contrib/metric/tests/conftest.py +92 -0
- wbfdm/contrib/metric/tests/test_dto.py +73 -0
- wbfdm/contrib/metric/tests/test_models.py +72 -0
- wbfdm/contrib/metric/tests/test_tasks.py +24 -0
- wbfdm/contrib/metric/tests/test_viewsets.py +79 -0
- wbfdm/contrib/metric/urls.py +19 -0
- wbfdm/contrib/metric/viewsets/__init__.py +1 -0
- wbfdm/contrib/metric/viewsets/configs/__init__.py +1 -0
- wbfdm/contrib/metric/viewsets/configs/display.py +92 -0
- wbfdm/contrib/metric/viewsets/configs/menus.py +11 -0
- wbfdm/contrib/metric/viewsets/configs/utils.py +137 -0
- wbfdm/contrib/metric/viewsets/mixins.py +245 -0
- wbfdm/contrib/metric/viewsets/viewsets.py +40 -0
- wbfdm/contrib/msci/__init__.py +0 -0
- wbfdm/contrib/msci/client.py +92 -0
- wbfdm/contrib/msci/dataloaders/__init__.py +0 -0
- wbfdm/contrib/msci/dataloaders/esg.py +87 -0
- wbfdm/contrib/msci/dataloaders/esg_controversies.py +81 -0
- wbfdm/contrib/msci/sync.py +58 -0
- wbfdm/contrib/msci/tests/__init__.py +0 -0
- wbfdm/contrib/msci/tests/conftest.py +1 -0
- wbfdm/contrib/msci/tests/test_client.py +70 -0
- wbfdm/contrib/qa/__init__.py +0 -0
- wbfdm/contrib/qa/apps.py +22 -0
- wbfdm/contrib/qa/database_routers.py +25 -0
- wbfdm/contrib/qa/dataloaders/__init__.py +0 -0
- wbfdm/contrib/qa/dataloaders/adjustments.py +56 -0
- wbfdm/contrib/qa/dataloaders/corporate_actions.py +59 -0
- wbfdm/contrib/qa/dataloaders/financials.py +83 -0
- wbfdm/contrib/qa/dataloaders/market_data.py +117 -0
- wbfdm/contrib/qa/dataloaders/officers.py +59 -0
- wbfdm/contrib/qa/dataloaders/reporting_dates.py +67 -0
- wbfdm/contrib/qa/dataloaders/statements.py +267 -0
- wbfdm/contrib/qa/tasks.py +0 -0
- wbfdm/dataloaders/__init__.py +0 -0
- wbfdm/dataloaders/cache.py +129 -0
- wbfdm/dataloaders/protocols.py +112 -0
- wbfdm/dataloaders/proxies.py +201 -0
- wbfdm/dataloaders/types.py +209 -0
- wbfdm/dynamic_preferences_registry.py +45 -0
- wbfdm/enums.py +657 -0
- wbfdm/factories/__init__.py +13 -0
- wbfdm/factories/classifications.py +56 -0
- wbfdm/factories/controversies.py +27 -0
- wbfdm/factories/exchanges.py +21 -0
- wbfdm/factories/instrument_list.py +22 -0
- wbfdm/factories/instrument_prices.py +79 -0
- wbfdm/factories/instruments.py +63 -0
- wbfdm/factories/instruments_relationships.py +31 -0
- wbfdm/factories/options.py +66 -0
- wbfdm/figures/__init__.py +1 -0
- wbfdm/figures/financials/__init__.py +1 -0
- wbfdm/figures/financials/financial_analysis_charts.py +469 -0
- wbfdm/figures/financials/financials_charts.py +711 -0
- wbfdm/filters/__init__.py +31 -0
- wbfdm/filters/classifications.py +100 -0
- wbfdm/filters/exchanges.py +22 -0
- wbfdm/filters/financials.py +95 -0
- wbfdm/filters/financials_analysis.py +119 -0
- wbfdm/filters/instrument_prices.py +112 -0
- wbfdm/filters/instruments.py +198 -0
- wbfdm/filters/utils.py +44 -0
- wbfdm/import_export/__init__.py +0 -0
- wbfdm/import_export/backends/__init__.py +0 -0
- wbfdm/import_export/backends/cbinsights/__init__.py +2 -0
- wbfdm/import_export/backends/cbinsights/deals.py +44 -0
- wbfdm/import_export/backends/cbinsights/equities.py +41 -0
- wbfdm/import_export/backends/cbinsights/mixin.py +15 -0
- wbfdm/import_export/backends/cbinsights/utils/__init__.py +0 -0
- wbfdm/import_export/backends/cbinsights/utils/classifications.py +4150 -0
- wbfdm/import_export/backends/cbinsights/utils/client.py +217 -0
- wbfdm/import_export/backends/refinitiv/__init__.py +5 -0
- wbfdm/import_export/backends/refinitiv/daily_fundamental.py +36 -0
- wbfdm/import_export/backends/refinitiv/fiscal_period.py +63 -0
- wbfdm/import_export/backends/refinitiv/forecast.py +178 -0
- wbfdm/import_export/backends/refinitiv/fundamental.py +103 -0
- wbfdm/import_export/backends/refinitiv/geographic_segment.py +32 -0
- wbfdm/import_export/backends/refinitiv/instrument.py +55 -0
- wbfdm/import_export/backends/refinitiv/instrument_price.py +77 -0
- wbfdm/import_export/backends/refinitiv/mixin.py +29 -0
- wbfdm/import_export/backends/refinitiv/utils/__init__.py +1 -0
- wbfdm/import_export/backends/refinitiv/utils/controller.py +182 -0
- wbfdm/import_export/handlers/__init__.py +0 -0
- wbfdm/import_export/handlers/instrument.py +253 -0
- wbfdm/import_export/handlers/instrument_list.py +101 -0
- wbfdm/import_export/handlers/instrument_price.py +71 -0
- wbfdm/import_export/handlers/option.py +54 -0
- wbfdm/import_export/handlers/private_equities.py +49 -0
- wbfdm/import_export/parsers/__init__.py +0 -0
- wbfdm/import_export/parsers/cbinsights/__init__.py +0 -0
- wbfdm/import_export/parsers/cbinsights/deals.py +39 -0
- wbfdm/import_export/parsers/cbinsights/equities.py +56 -0
- wbfdm/import_export/parsers/cbinsights/fundamentals.py +45 -0
- wbfdm/import_export/parsers/refinitiv/__init__.py +0 -0
- wbfdm/import_export/parsers/refinitiv/daily_fundamental.py +7 -0
- wbfdm/import_export/parsers/refinitiv/forecast.py +7 -0
- wbfdm/import_export/parsers/refinitiv/fundamental.py +9 -0
- wbfdm/import_export/parsers/refinitiv/geographic_segment.py +7 -0
- wbfdm/import_export/parsers/refinitiv/instrument.py +75 -0
- wbfdm/import_export/parsers/refinitiv/instrument_price.py +26 -0
- wbfdm/import_export/parsers/refinitiv/utils.py +96 -0
- wbfdm/import_export/resources/__init__.py +0 -0
- wbfdm/import_export/resources/classification.py +23 -0
- wbfdm/import_export/resources/instrument_prices.py +33 -0
- wbfdm/import_export/resources/instruments.py +176 -0
- wbfdm/jinja2.py +7 -0
- wbfdm/management/__init__.py +30 -0
- wbfdm/menu.py +11 -0
- wbfdm/migrations/0001_initial.py +71 -0
- wbfdm/migrations/0002_rename_statements_instrumentlookup_financials_and_more.py +144 -0
- wbfdm/migrations/0003_instrument_estimate_backend_and_more.py +34 -0
- wbfdm/migrations/0004_rename_financials_instrumentlookup_statements_and_more.py +86 -0
- wbfdm/migrations/0005_instrument_corporate_action_backend.py +29 -0
- wbfdm/migrations/0006_instrument_officer_backend.py +29 -0
- wbfdm/migrations/0007_instrument_country_instrument_currency_and_more.py +117 -0
- wbfdm/migrations/0008_controversy.py +75 -0
- wbfdm/migrations/0009_alter_controversy_flag_alter_controversy_initiated_and_more.py +85 -0
- wbfdm/migrations/0010_classification_classificationgroup_deal_exchange_and_more.py +1299 -0
- wbfdm/migrations/0011_delete_instrumentlookup_instrument_corporate_actions_and_more.py +169 -0
- wbfdm/migrations/0012_instrumentprice_created_instrumentprice_modified.py +564 -0
- wbfdm/migrations/0013_instrument_is_investable_universe_and_more.py +199 -0
- wbfdm/migrations/0014_alter_controversy_instrument.py +22 -0
- wbfdm/migrations/0015_instrument_instrument_investible_index.py +16 -0
- wbfdm/migrations/0016_instrumenttype_name_repr.py +18 -0
- wbfdm/migrations/0017_instrument_instrument_security_index.py +16 -0
- wbfdm/migrations/0018_instrument_instrument_level_index.py +20 -0
- wbfdm/migrations/0019_alter_controversy_source.py +17 -0
- wbfdm/migrations/0020_optionaggregate_option_and_more.py +249 -0
- wbfdm/migrations/0021_delete_instrumentdailystatistics.py +15 -0
- wbfdm/migrations/0022_instrument_cusip_option_open_interest_20d_and_more.py +91 -0
- wbfdm/migrations/0023_instrument_unique_ric_instrument_unique_rmc_and_more.py +53 -0
- wbfdm/migrations/0024_option_open_interest_10d_option_volume_10d_and_more.py +36 -0
- wbfdm/migrations/0025_instrument_is_primary_and_more.py +29 -0
- wbfdm/migrations/0026_instrument_is_cash_equivalent.py +30 -0
- wbfdm/migrations/0027_remove_instrument_unique_ric_and_more.py +100 -0
- wbfdm/migrations/__init__.py +0 -0
- wbfdm/models/__init__.py +4 -0
- wbfdm/models/esg/__init__.py +1 -0
- wbfdm/models/esg/controversies.py +81 -0
- wbfdm/models/exchanges/__init__.py +1 -0
- wbfdm/models/exchanges/exchanges.py +223 -0
- wbfdm/models/fields.py +117 -0
- wbfdm/models/fk_fields.py +403 -0
- wbfdm/models/indicators.py +0 -0
- wbfdm/models/instruments/__init__.py +19 -0
- wbfdm/models/instruments/classifications.py +265 -0
- wbfdm/models/instruments/instrument_lists.py +120 -0
- wbfdm/models/instruments/instrument_prices.py +540 -0
- wbfdm/models/instruments/instrument_relationships.py +251 -0
- wbfdm/models/instruments/instrument_requests.py +196 -0
- wbfdm/models/instruments/instruments.py +991 -0
- wbfdm/models/instruments/llm/__init__.py +1 -0
- wbfdm/models/instruments/llm/create_instrument_news_relationships.py +78 -0
- wbfdm/models/instruments/mixin/__init__.py +0 -0
- wbfdm/models/instruments/mixin/financials_computed.py +804 -0
- wbfdm/models/instruments/mixin/financials_serializer_fields.py +1407 -0
- wbfdm/models/instruments/mixin/instruments.py +294 -0
- wbfdm/models/instruments/options.py +225 -0
- wbfdm/models/instruments/private_equities.py +59 -0
- wbfdm/models/instruments/querysets.py +73 -0
- wbfdm/models/instruments/utils.py +41 -0
- wbfdm/preferences.py +21 -0
- wbfdm/serializers/__init__.py +4 -0
- wbfdm/serializers/esg.py +36 -0
- wbfdm/serializers/exchanges.py +39 -0
- wbfdm/serializers/instruments/__init__.py +37 -0
- wbfdm/serializers/instruments/classifications.py +139 -0
- wbfdm/serializers/instruments/instrument_lists.py +61 -0
- wbfdm/serializers/instruments/instrument_prices.py +73 -0
- wbfdm/serializers/instruments/instrument_relationships.py +170 -0
- wbfdm/serializers/instruments/instrument_requests.py +61 -0
- wbfdm/serializers/instruments/instruments.py +274 -0
- wbfdm/serializers/instruments/mixins.py +104 -0
- wbfdm/serializers/officers.py +20 -0
- wbfdm/signals.py +7 -0
- wbfdm/sync/__init__.py +0 -0
- wbfdm/sync/abstract.py +31 -0
- wbfdm/sync/runner.py +22 -0
- wbfdm/tasks.py +69 -0
- wbfdm/tests/__init__.py +0 -0
- wbfdm/tests/analysis/__init__.py +0 -0
- wbfdm/tests/analysis/financial_analysis/__init__.py +0 -0
- wbfdm/tests/analysis/financial_analysis/test_statement_with_estimates.py +392 -0
- wbfdm/tests/analysis/financial_analysis/test_utils.py +322 -0
- wbfdm/tests/analysis/test_esg.py +159 -0
- wbfdm/tests/conftest.py +92 -0
- wbfdm/tests/dataloaders/__init__.py +0 -0
- wbfdm/tests/dataloaders/test_cache.py +73 -0
- wbfdm/tests/models/__init__.py +0 -0
- wbfdm/tests/models/test_classifications.py +99 -0
- wbfdm/tests/models/test_exchanges.py +7 -0
- wbfdm/tests/models/test_instrument_list.py +117 -0
- wbfdm/tests/models/test_instrument_prices.py +306 -0
- wbfdm/tests/models/test_instruments.py +202 -0
- wbfdm/tests/models/test_merge.py +99 -0
- wbfdm/tests/models/test_options.py +69 -0
- wbfdm/tests/test_tasks.py +6 -0
- wbfdm/tests/tests.py +10 -0
- wbfdm/urls.py +222 -0
- wbfdm/utils.py +54 -0
- wbfdm/viewsets/__init__.py +10 -0
- wbfdm/viewsets/configs/__init__.py +5 -0
- wbfdm/viewsets/configs/buttons/__init__.py +8 -0
- wbfdm/viewsets/configs/buttons/classifications.py +23 -0
- wbfdm/viewsets/configs/buttons/exchanges.py +9 -0
- wbfdm/viewsets/configs/buttons/instrument_prices.py +49 -0
- wbfdm/viewsets/configs/buttons/instruments.py +283 -0
- wbfdm/viewsets/configs/display/__init__.py +22 -0
- wbfdm/viewsets/configs/display/classifications.py +138 -0
- wbfdm/viewsets/configs/display/esg.py +75 -0
- wbfdm/viewsets/configs/display/exchanges.py +42 -0
- wbfdm/viewsets/configs/display/instrument_lists.py +137 -0
- wbfdm/viewsets/configs/display/instrument_prices.py +199 -0
- wbfdm/viewsets/configs/display/instrument_requests.py +116 -0
- wbfdm/viewsets/configs/display/instruments.py +618 -0
- wbfdm/viewsets/configs/display/instruments_relationships.py +65 -0
- wbfdm/viewsets/configs/display/monthly_performances.py +72 -0
- wbfdm/viewsets/configs/display/officers.py +16 -0
- wbfdm/viewsets/configs/display/prices.py +21 -0
- wbfdm/viewsets/configs/display/statement_with_estimates.py +101 -0
- wbfdm/viewsets/configs/display/statements.py +48 -0
- wbfdm/viewsets/configs/endpoints/__init__.py +41 -0
- wbfdm/viewsets/configs/endpoints/classifications.py +87 -0
- wbfdm/viewsets/configs/endpoints/esg.py +20 -0
- wbfdm/viewsets/configs/endpoints/exchanges.py +6 -0
- wbfdm/viewsets/configs/endpoints/financials_analysis.py +65 -0
- wbfdm/viewsets/configs/endpoints/instrument_lists.py +38 -0
- wbfdm/viewsets/configs/endpoints/instrument_prices.py +51 -0
- wbfdm/viewsets/configs/endpoints/instrument_requests.py +20 -0
- wbfdm/viewsets/configs/endpoints/instruments.py +13 -0
- wbfdm/viewsets/configs/endpoints/instruments_relationships.py +31 -0
- wbfdm/viewsets/configs/endpoints/statements.py +6 -0
- wbfdm/viewsets/configs/menus/__init__.py +9 -0
- wbfdm/viewsets/configs/menus/classifications.py +19 -0
- wbfdm/viewsets/configs/menus/exchanges.py +10 -0
- wbfdm/viewsets/configs/menus/instrument_lists.py +10 -0
- wbfdm/viewsets/configs/menus/instruments.py +20 -0
- wbfdm/viewsets/configs/menus/instruments_relationships.py +33 -0
- wbfdm/viewsets/configs/titles/__init__.py +42 -0
- wbfdm/viewsets/configs/titles/classifications.py +79 -0
- wbfdm/viewsets/configs/titles/esg.py +11 -0
- wbfdm/viewsets/configs/titles/exchanges.py +12 -0
- wbfdm/viewsets/configs/titles/financial_ratio_analysis.py +6 -0
- wbfdm/viewsets/configs/titles/financials_analysis.py +50 -0
- wbfdm/viewsets/configs/titles/instrument_prices.py +50 -0
- wbfdm/viewsets/configs/titles/instrument_requests.py +16 -0
- wbfdm/viewsets/configs/titles/instruments.py +31 -0
- wbfdm/viewsets/configs/titles/instruments_relationships.py +21 -0
- wbfdm/viewsets/configs/titles/market_data.py +13 -0
- wbfdm/viewsets/configs/titles/prices.py +15 -0
- wbfdm/viewsets/configs/titles/statement_with_estimates.py +10 -0
- wbfdm/viewsets/esg.py +72 -0
- wbfdm/viewsets/exchanges.py +63 -0
- wbfdm/viewsets/financial_analysis/__init__.py +3 -0
- wbfdm/viewsets/financial_analysis/financial_metric_analysis.py +85 -0
- wbfdm/viewsets/financial_analysis/financial_ratio_analysis.py +85 -0
- wbfdm/viewsets/financial_analysis/statement_with_estimates.py +145 -0
- wbfdm/viewsets/instruments/__init__.py +80 -0
- wbfdm/viewsets/instruments/classifications.py +279 -0
- wbfdm/viewsets/instruments/financials_analysis.py +614 -0
- wbfdm/viewsets/instruments/instrument_lists.py +77 -0
- wbfdm/viewsets/instruments/instrument_prices.py +542 -0
- wbfdm/viewsets/instruments/instrument_requests.py +51 -0
- wbfdm/viewsets/instruments/instruments.py +106 -0
- wbfdm/viewsets/instruments/instruments_relationships.py +235 -0
- wbfdm/viewsets/instruments/utils.py +27 -0
- wbfdm/viewsets/market_data.py +172 -0
- wbfdm/viewsets/mixins.py +9 -0
- wbfdm/viewsets/officers.py +27 -0
- wbfdm/viewsets/prices.py +62 -0
- wbfdm/viewsets/statements/__init__.py +1 -0
- wbfdm/viewsets/statements/statements.py +100 -0
- wbfdm/viewsets/technical_analysis/__init__.py +1 -0
- wbfdm/viewsets/technical_analysis/monthly_performances.py +93 -0
- wbfdm-2.2.1.dist-info/METADATA +15 -0
- wbfdm-2.2.1.dist-info/RECORD +337 -0
- wbfdm-2.2.1.dist-info/WHEEL +5 -0
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
from contextlib import suppress
|
|
3
|
+
from datetime import date
|
|
4
|
+
|
|
5
|
+
import pandas as pd
|
|
6
|
+
from wbcore.pandas.utils import (
|
|
7
|
+
override_number_to_percent,
|
|
8
|
+
override_number_to_x,
|
|
9
|
+
override_number_with_currency,
|
|
10
|
+
)
|
|
11
|
+
from wbfdm.enums import CalendarType, Financial, MarketData, PeriodType
|
|
12
|
+
from wbfdm.models import Instrument
|
|
13
|
+
from wbfdm.utils import rename_period_index_level_to_repr
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Loader:
|
|
17
|
+
"""
|
|
18
|
+
Utility Class to load financial data into a Pandas dataframe with year and interim as index.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
instrument: Instrument,
|
|
24
|
+
values: list[Financial],
|
|
25
|
+
calendar_type: CalendarType = CalendarType.FISCAL,
|
|
26
|
+
market_data_values: list[MarketData] | None = None,
|
|
27
|
+
statement_values: list[Financial] | None = None,
|
|
28
|
+
):
|
|
29
|
+
self.instrument = instrument
|
|
30
|
+
self.calendar_type = calendar_type
|
|
31
|
+
self.values = values
|
|
32
|
+
self.market_data_values = (
|
|
33
|
+
market_data_values # specify if any extra market data needs to be merged into the dataframe
|
|
34
|
+
)
|
|
35
|
+
self.statement_values = (
|
|
36
|
+
statement_values # specify if any extra statement needs to be merged into the dataframe
|
|
37
|
+
)
|
|
38
|
+
self.errors: dict[str, list[str]] = defaultdict(list)
|
|
39
|
+
|
|
40
|
+
def load(self) -> pd.DataFrame:
|
|
41
|
+
"""
|
|
42
|
+
entry function of the class, loads the data into the dataframe and normalize it
|
|
43
|
+
"""
|
|
44
|
+
return self._normalize_df(*self._get_base_df())
|
|
45
|
+
|
|
46
|
+
def _get_base_df(self) -> tuple[pd.DataFrame, pd.Series]:
|
|
47
|
+
"""
|
|
48
|
+
Private method to load data into a dataframe and returns the resulting data with index [year, interim] and "financials" as columns. The source pandas series is returned as second argument
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
a tuple of dataframe and series
|
|
52
|
+
"""
|
|
53
|
+
# Get the base dataframe from a dataloader
|
|
54
|
+
df = pd.DataFrame(
|
|
55
|
+
Instrument.objects.filter(id=self.instrument.id).dl.financials(
|
|
56
|
+
values=self.values,
|
|
57
|
+
period_type=PeriodType.ALL,
|
|
58
|
+
from_year=date.today().year - 5,
|
|
59
|
+
calendar_type=self.calendar_type,
|
|
60
|
+
)
|
|
61
|
+
)
|
|
62
|
+
if df.empty:
|
|
63
|
+
self.errors["missing_data"].append("Missing financial data")
|
|
64
|
+
df = pd.DataFrame(
|
|
65
|
+
columns=[
|
|
66
|
+
"year",
|
|
67
|
+
"interim",
|
|
68
|
+
"period_type",
|
|
69
|
+
"estimate",
|
|
70
|
+
"period_end_date",
|
|
71
|
+
"source",
|
|
72
|
+
"financial",
|
|
73
|
+
"value",
|
|
74
|
+
]
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
df["period_end_date"] = pd.to_datetime(df["period_end_date"])
|
|
78
|
+
source_df = (
|
|
79
|
+
df[["year", "interim", "period_type", "source"]].groupby(["year", "interim", "period_type"]).first().source
|
|
80
|
+
)
|
|
81
|
+
# Pivot the data around
|
|
82
|
+
df = df.pivot_table(
|
|
83
|
+
index=["year", "interim", "period_type", "estimate", "period_end_date"],
|
|
84
|
+
columns="financial",
|
|
85
|
+
values="value",
|
|
86
|
+
)
|
|
87
|
+
allowed_columns: list[str] = [v.value for v in self.values]
|
|
88
|
+
if self.statement_values and not df.empty:
|
|
89
|
+
df = self._annotate_statement_data(df, self.statement_values)
|
|
90
|
+
allowed_columns.extend([v.value for v in self.statement_values])
|
|
91
|
+
# If market data should be included here, we need to annotate it and afterwards rename the column axis
|
|
92
|
+
if self.market_data_values and not df.empty:
|
|
93
|
+
df = self._annotate_market_data(df, self.market_data_values)
|
|
94
|
+
allowed_columns.extend([v.value for v in self.market_data_values])
|
|
95
|
+
df = df[[value for value in allowed_columns if value in df.columns]]
|
|
96
|
+
return df, source_df
|
|
97
|
+
|
|
98
|
+
# UTILS METHODS
|
|
99
|
+
|
|
100
|
+
def _annotate_market_data(self, df: pd.DataFrame, market_data_values: list[MarketData]) -> pd.DataFrame:
|
|
101
|
+
"""
|
|
102
|
+
Annotate the given market data into the given dataframe
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
df: a Pandas dataframe to annotate extra data into
|
|
106
|
+
market_data_values: a list of MarketData objects
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
The extended dataframe with market data
|
|
110
|
+
"""
|
|
111
|
+
# We need to reset the indexes as we merge based on columns
|
|
112
|
+
market_data_df = pd.DataFrame(
|
|
113
|
+
Instrument.objects.filter(id=self.instrument.id).dl.market_data(
|
|
114
|
+
from_date=df.index.get_level_values("period_end_date").min() if not df.empty else None,
|
|
115
|
+
target_currency=self.instrument.currency.key,
|
|
116
|
+
)
|
|
117
|
+
)
|
|
118
|
+
values = [mdv.value for mdv in market_data_values]
|
|
119
|
+
# We convert the period_end_date column to a native datetime object to allow for merges based on backwards data
|
|
120
|
+
if not market_data_df.empty:
|
|
121
|
+
market_data_df["period_end_date"] = pd.to_datetime(market_data_df["valuation_date"])
|
|
122
|
+
market_data_df = market_data_df[["period_end_date", *values]].sort_values("period_end_date")
|
|
123
|
+
df = pd.merge_asof(
|
|
124
|
+
left=df.reset_index().sort_values(by="period_end_date"),
|
|
125
|
+
right=market_data_df,
|
|
126
|
+
on="period_end_date",
|
|
127
|
+
direction="backward",
|
|
128
|
+
).set_index(["year", "interim", "period_type", "estimate", "period_end_date"])
|
|
129
|
+
if df[values].dropna().empty:
|
|
130
|
+
self.errors["missing_data"].append(
|
|
131
|
+
"We could not find any market data covering the financial statement period"
|
|
132
|
+
)
|
|
133
|
+
return df.rename_axis("financial", axis="columns")
|
|
134
|
+
|
|
135
|
+
def _annotate_statement_data(self, df: pd.DataFrame, statement_values: list[Financial]) -> pd.DataFrame:
|
|
136
|
+
"""
|
|
137
|
+
Annotate the given statement into the given dataframe
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
df: a Pandas dataframe to annotate extra data into
|
|
141
|
+
statement_values: a list of Financial objects
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
The extended dataframe with statement
|
|
145
|
+
"""
|
|
146
|
+
statement_df = pd.DataFrame(
|
|
147
|
+
Instrument.objects.filter(id=self.instrument.id).dl.statements(
|
|
148
|
+
financials=statement_values, from_year=date.today().year - 5
|
|
149
|
+
),
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
if not statement_df.empty:
|
|
153
|
+
statement_df["period_end_date"] = pd.to_datetime(statement_df["period_end_date"])
|
|
154
|
+
statement_df = statement_df.pivot_table(
|
|
155
|
+
index=["year", "interim", "period_end_date"], columns="financial", values="value"
|
|
156
|
+
)
|
|
157
|
+
statement_df = statement_df.ffill()
|
|
158
|
+
df = pd.merge(
|
|
159
|
+
how="left",
|
|
160
|
+
left=df.reset_index().sort_values(by="period_end_date"),
|
|
161
|
+
right=statement_df,
|
|
162
|
+
on=["year", "interim"],
|
|
163
|
+
).set_index(["year", "interim", "period_type", "estimate", "period_end_date"])
|
|
164
|
+
else:
|
|
165
|
+
self.errors["missing_data"].append("No statement data")
|
|
166
|
+
|
|
167
|
+
return df.rename_axis("financial", axis="columns")
|
|
168
|
+
|
|
169
|
+
def _normalize_df(self, df: pd.DataFrame, source_df: pd.Series) -> pd.DataFrame:
|
|
170
|
+
"""
|
|
171
|
+
This private method takes a dataframe and it's related source (a source per index), reset the index to be only (year, interim) and detect any possible duplicated row.
|
|
172
|
+
|
|
173
|
+
If a duplicate is detected, we appends the attribute `errors` with the duplicated index and take the first one
|
|
174
|
+
|
|
175
|
+
Finally, we rename the index into a more human readable format and make sure the the yearly row is present for every year of data
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
df: the DataFrame to normalize
|
|
179
|
+
source_df: The Series holding the source info for all the DataFrame index
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
A normalized DataFrame
|
|
183
|
+
"""
|
|
184
|
+
|
|
185
|
+
def _ensure_yearly_row_exists(row):
|
|
186
|
+
row["interim"] = row["interim"].astype(int)
|
|
187
|
+
if row[row["interim"] == 0].empty:
|
|
188
|
+
row = pd.concat(
|
|
189
|
+
[
|
|
190
|
+
pd.DataFrame(
|
|
191
|
+
[
|
|
192
|
+
{
|
|
193
|
+
"year": row.name,
|
|
194
|
+
"interim": 0,
|
|
195
|
+
"period_type": "Y",
|
|
196
|
+
"estimate": True,
|
|
197
|
+
"period_end_date": row["period_end_date"].max(),
|
|
198
|
+
}
|
|
199
|
+
]
|
|
200
|
+
),
|
|
201
|
+
row,
|
|
202
|
+
],
|
|
203
|
+
axis=0,
|
|
204
|
+
)
|
|
205
|
+
return row
|
|
206
|
+
|
|
207
|
+
new_index = (
|
|
208
|
+
df.index.to_frame(index=False)
|
|
209
|
+
.groupby(["year"], group_keys=False, as_index=False)
|
|
210
|
+
.apply(lambda row: _ensure_yearly_row_exists(row), include_groups=True)
|
|
211
|
+
.reset_index(drop=True)
|
|
212
|
+
)
|
|
213
|
+
df = df.reindex(new_index)
|
|
214
|
+
|
|
215
|
+
df = df.sort_index().reset_index(
|
|
216
|
+
level=[3, 4], names=["year", "interim", "period_type", "estimate", "period_end_date"]
|
|
217
|
+
)
|
|
218
|
+
# detect duplicates, gracefully handle it by taking the first but log the error for further usage
|
|
219
|
+
index_duplicated = df.index.duplicated()
|
|
220
|
+
if index_duplicated.any():
|
|
221
|
+
for year, interim, period_type in df.index[index_duplicated]:
|
|
222
|
+
interim_info = f"{year} Interim {period_type}{interim}"
|
|
223
|
+
with suppress(KeyError):
|
|
224
|
+
if source := source_df.loc[(year, interim, period_type), "source"]:
|
|
225
|
+
interim_info += f" [{source.upper()}]"
|
|
226
|
+
self.errors["duplicated_interims"].append(interim_info)
|
|
227
|
+
|
|
228
|
+
# remove duplicated index
|
|
229
|
+
df = df[~index_duplicated]
|
|
230
|
+
|
|
231
|
+
df = rename_period_index_level_to_repr(df)
|
|
232
|
+
|
|
233
|
+
return df
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
class FinancialAnalysisResult:
|
|
237
|
+
"""
|
|
238
|
+
Wrapper class to help present a multi index pivoted dataframe with (year, interim) as index and financials as columns into a transposed DataFrame for Tree view
|
|
239
|
+
|
|
240
|
+
This transposed dataframe is available under the attribute `formatted_df`
|
|
241
|
+
"""
|
|
242
|
+
|
|
243
|
+
FINANCIAL_MAP = {**MarketData.name_mapping(), **Financial.name_mapping()}
|
|
244
|
+
|
|
245
|
+
def __init__(
|
|
246
|
+
self,
|
|
247
|
+
df,
|
|
248
|
+
ordering: list[str] | None = None,
|
|
249
|
+
ignore_group_keys: list[Financial] | None = None,
|
|
250
|
+
override_number_with_currency: str | None = None,
|
|
251
|
+
override_number_with_currency_financials: list[str] | None = None,
|
|
252
|
+
override_number_to_x_financials: list[str] | None = None,
|
|
253
|
+
override_number_to_percent_financials: list[str] | None = None,
|
|
254
|
+
errors: dict[str, list[str]] | None = None,
|
|
255
|
+
):
|
|
256
|
+
self.df = df
|
|
257
|
+
self.columns = list(self.df.drop(columns=["estimate", "period_end_date"]).columns)
|
|
258
|
+
|
|
259
|
+
if ordering:
|
|
260
|
+
ordering.extend(["estimate", "period_end_date"])
|
|
261
|
+
allowed_columns = list(self.FINANCIAL_MAP.keys())
|
|
262
|
+
self.df = self.df[[col for col in ordering if col in self.df.columns]]
|
|
263
|
+
allowed_columns.extend(ordering)
|
|
264
|
+
self.df = self.df.drop(columns=self.df.columns.difference(allowed_columns))
|
|
265
|
+
|
|
266
|
+
if not ignore_group_keys:
|
|
267
|
+
ignore_group_keys = []
|
|
268
|
+
self.ignore_group_keys = ignore_group_keys
|
|
269
|
+
self.override_number_with_currency = override_number_with_currency
|
|
270
|
+
self.override_number_with_currency_financials = override_number_with_currency_financials
|
|
271
|
+
self.override_number_to_x_financials = override_number_to_x_financials
|
|
272
|
+
self.override_number_to_percent_financials = override_number_to_percent_financials
|
|
273
|
+
self.errors = errors
|
|
274
|
+
|
|
275
|
+
self.formatted_df, self.estimated_mapping = self._get_formatted_df()
|
|
276
|
+
|
|
277
|
+
def _get_formatted_df(self) -> tuple[pd.DataFrame, dict[str, bool]]:
|
|
278
|
+
# Transpose and reset the index twice to create an artificial index-column
|
|
279
|
+
df = self.df.copy()
|
|
280
|
+
|
|
281
|
+
# Flatten and Rename index into year-interim format
|
|
282
|
+
df.index = df.index.map(lambda index: f"{index[0]}-{index[1]}")
|
|
283
|
+
|
|
284
|
+
# store the estimate per index
|
|
285
|
+
estimated_mapping = df["estimate"].to_dict()
|
|
286
|
+
|
|
287
|
+
df = df.drop(columns=["estimate", "period_end_date"])
|
|
288
|
+
|
|
289
|
+
# get the group keys minus the one ignored
|
|
290
|
+
group_keys = [col if col not in self.ignore_group_keys and col in Financial else None for col in df.columns]
|
|
291
|
+
|
|
292
|
+
# Transpose table
|
|
293
|
+
df = df.T.reset_index().reset_index()
|
|
294
|
+
|
|
295
|
+
df["_group_key"] = group_keys
|
|
296
|
+
|
|
297
|
+
if "financial" in df.columns:
|
|
298
|
+
# set the _overriding columns to define extra decorator for the frontend
|
|
299
|
+
if self.override_number_with_currency and self.override_number_with_currency_financials:
|
|
300
|
+
override_number_with_currency(
|
|
301
|
+
df,
|
|
302
|
+
self.override_number_with_currency,
|
|
303
|
+
*list(map(lambda x: df["financial"] == x, self.override_number_with_currency_financials)),
|
|
304
|
+
)
|
|
305
|
+
if self.override_number_to_x_financials:
|
|
306
|
+
override_number_to_x(
|
|
307
|
+
df, *list(map(lambda x: df["financial"] == x, self.override_number_to_x_financials))
|
|
308
|
+
)
|
|
309
|
+
if self.override_number_to_percent_financials:
|
|
310
|
+
override_number_to_percent(
|
|
311
|
+
df, *list(map(lambda x: df["financial"] == x, self.override_number_to_percent_financials))
|
|
312
|
+
)
|
|
313
|
+
# Rename Financials into their verbose representation
|
|
314
|
+
df.financial = df.financial.map(self.FINANCIAL_MAP)
|
|
315
|
+
|
|
316
|
+
return df.rename(columns={"index": "id"}), estimated_mapping
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .technical_analysis import TechnicalAnalysis
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
from datetime import date, timedelta
|
|
2
|
+
from typing import TYPE_CHECKING, Literal
|
|
3
|
+
|
|
4
|
+
import pandas as pd
|
|
5
|
+
from pandas.tseries.offsets import BDay
|
|
6
|
+
from stockstats import StockDataFrame
|
|
7
|
+
from wbfdm.analysis.technical_analysis.traces import TechnicalAnalysisTraceFactory
|
|
8
|
+
from wbfdm.enums import MarketData
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from wbfdm.models import Instrument
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TechnicalAnalysis:
|
|
15
|
+
def __init__(self, instrument: "Instrument", sdf: StockDataFrame):
|
|
16
|
+
self.instrument = instrument
|
|
17
|
+
self._sdf = sdf
|
|
18
|
+
|
|
19
|
+
def trace_factory(self) -> TechnicalAnalysisTraceFactory:
|
|
20
|
+
return TechnicalAnalysisTraceFactory(self)
|
|
21
|
+
|
|
22
|
+
@property
|
|
23
|
+
def instrument_name(self):
|
|
24
|
+
return self.instrument.name
|
|
25
|
+
|
|
26
|
+
@classmethod
|
|
27
|
+
def init_from_dataloader(cls, instrument: "Instrument", dataloader):
|
|
28
|
+
try:
|
|
29
|
+
return cls(
|
|
30
|
+
instrument,
|
|
31
|
+
StockDataFrame.retype(
|
|
32
|
+
pd.DataFrame(dataloader)
|
|
33
|
+
.rename(columns={"valuation_date": "date"})
|
|
34
|
+
.set_index("date")
|
|
35
|
+
.sort_index()
|
|
36
|
+
.bfill()
|
|
37
|
+
),
|
|
38
|
+
)
|
|
39
|
+
except KeyError:
|
|
40
|
+
return cls(instrument, StockDataFrame(columns=["date", "close", "volume", "calculated"]))
|
|
41
|
+
|
|
42
|
+
@classmethod
|
|
43
|
+
def init_full_from_instrument(
|
|
44
|
+
cls, instrument: "Instrument", from_date: date | None = None, to_date: date | None = None
|
|
45
|
+
):
|
|
46
|
+
return cls.init_from_dataloader(
|
|
47
|
+
instrument,
|
|
48
|
+
instrument.__class__.objects.filter(id=instrument.id).dl.market_data(from_date=from_date, to_date=to_date),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
@classmethod
|
|
52
|
+
def init_close_from_instrument(
|
|
53
|
+
cls, instrument: "Instrument", from_date: date | None = None, to_date: date | None = None
|
|
54
|
+
):
|
|
55
|
+
return cls.init_from_dataloader(
|
|
56
|
+
instrument,
|
|
57
|
+
instrument.__class__.objects.filter(id=instrument.id).dl.market_data(
|
|
58
|
+
values=[MarketData.CLOSE], from_date=from_date, to_date=to_date
|
|
59
|
+
),
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
def add_sma(self, window, field="close"):
|
|
63
|
+
self._sdf[[f"{field}_{window}_sma"]]
|
|
64
|
+
|
|
65
|
+
def add_roc(self, window, field="close"):
|
|
66
|
+
self._sdf[[f"{field}_{window}_roc"]]
|
|
67
|
+
|
|
68
|
+
def add_shift(self, window, field="close"):
|
|
69
|
+
self._sdf[[f"{field}_{window}_s"]]
|
|
70
|
+
|
|
71
|
+
def add_delta(self, window, field="close"):
|
|
72
|
+
self._sdf[[f"{field}_{window}_d"]]
|
|
73
|
+
|
|
74
|
+
def add_drawdown(self, field="close"):
|
|
75
|
+
self._sdf["drawdown"] = self._sdf[field] - self._sdf[field].cummax()
|
|
76
|
+
|
|
77
|
+
def add_macd(self):
|
|
78
|
+
self._sdf[["macd", "macdh", "macds"]]
|
|
79
|
+
|
|
80
|
+
def add_bollinger(self, window=14):
|
|
81
|
+
self._sdf[[f"boll_{window}", f"boll_ub_{window}", f"boll_lb_{window}"]]
|
|
82
|
+
self._sdf.bfill()
|
|
83
|
+
|
|
84
|
+
def add_energy_index(self):
|
|
85
|
+
self._sdf[["cr", "cr-ma1", "cr-ma2", "cr-ma3"]]
|
|
86
|
+
|
|
87
|
+
def add_williams_index(self, window=14):
|
|
88
|
+
self._sdf[[f"wr_{window}"]]
|
|
89
|
+
|
|
90
|
+
def add_volume_variation_index(self, window=26):
|
|
91
|
+
self._sdf[[f"vr_{window}"]]
|
|
92
|
+
|
|
93
|
+
def add_return(self, return_type: Literal["log"] | Literal["normal"] = "normal"):
|
|
94
|
+
if return_type == "log":
|
|
95
|
+
self._sdf[["log-ret"]]
|
|
96
|
+
elif return_type == "normal":
|
|
97
|
+
self._sdf["ret"] = (self._sdf["close"] - self._sdf["close_-1_s"]) / self._sdf["close_-1_s"]
|
|
98
|
+
|
|
99
|
+
def add_cumulative_return(self, return_type: Literal["log"] | Literal["normal"] = "normal"):
|
|
100
|
+
if return_type == "log":
|
|
101
|
+
self._sdf["cum-log-ret"] = (1 + self._sdf[["log-ret"]]).cumprod() - 1
|
|
102
|
+
elif return_type == "normal":
|
|
103
|
+
self.add_return("normal")
|
|
104
|
+
self._sdf["cum-ret"] = (1 + self._sdf[["ret"]]).cumprod() - 1
|
|
105
|
+
|
|
106
|
+
def get_performance_between_dates(self, from_date: date, to_date: date | None = None) -> float:
|
|
107
|
+
if to_date is None:
|
|
108
|
+
to_date = date.today()
|
|
109
|
+
|
|
110
|
+
close = self.df[(self.df.index >= (pd.Timestamp(from_date) - BDay(1)).date()) & (self.df.index <= to_date)][
|
|
111
|
+
"close"
|
|
112
|
+
]
|
|
113
|
+
if close.empty:
|
|
114
|
+
return 0
|
|
115
|
+
return (close[-1] - close[0]) / close[0]
|
|
116
|
+
|
|
117
|
+
def get_performance_year_to_date(self) -> float:
|
|
118
|
+
return self.get_performance_between_dates(from_date=date.today().replace(month=1, day=1))
|
|
119
|
+
|
|
120
|
+
def get_performance_months(self, months: int) -> float:
|
|
121
|
+
return self.get_performance_between_dates(from_date=date.today() - timedelta(30 * months))
|
|
122
|
+
|
|
123
|
+
def get_performances_dataframe(self, freq: Literal["Y"] | Literal["ME"]) -> pd.DataFrame:
|
|
124
|
+
df = self.df
|
|
125
|
+
df.index = pd.to_datetime(df.index)
|
|
126
|
+
df = df.groupby(pd.Grouper(freq=freq)).last()
|
|
127
|
+
df["performance"] = df["close"].pct_change()
|
|
128
|
+
return df[["close", "performance"]]
|
|
129
|
+
|
|
130
|
+
def get_annual_performances_dataframe(self):
|
|
131
|
+
return self.get_performances_dataframe("Y")
|
|
132
|
+
|
|
133
|
+
def get_monthly_performances_dataframe(self):
|
|
134
|
+
return self.get_performances_dataframe("ME")
|
|
135
|
+
|
|
136
|
+
@property
|
|
137
|
+
def df(self):
|
|
138
|
+
return self._sdf
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
from contextlib import suppress
|
|
2
|
+
from typing import TYPE_CHECKING, Iterable, TypeVar
|
|
3
|
+
|
|
4
|
+
import pandas as pd
|
|
5
|
+
from plotly import graph_objects as go
|
|
6
|
+
|
|
7
|
+
from ..utils import normalize_series
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from wbfdm.analysis.technical_analysis.technical_analysis import TechnicalAnalysis
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
T = TypeVar("T", bound=go.Candlestick | go.Ohlc)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class TechnicalAnalysisTraceFactory:
|
|
17
|
+
def __init__(self, ta: "TechnicalAnalysis"):
|
|
18
|
+
self.ta = ta
|
|
19
|
+
|
|
20
|
+
def _line_trace(
|
|
21
|
+
self,
|
|
22
|
+
key: str,
|
|
23
|
+
prefix: str,
|
|
24
|
+
base_series: pd.Series | None = None,
|
|
25
|
+
percent_format: bool = False,
|
|
26
|
+
line_options: dict | None = None,
|
|
27
|
+
show_estimates: bool = False,
|
|
28
|
+
) -> go.Scatter:
|
|
29
|
+
# Parametrization
|
|
30
|
+
series = self.ta.df[key]
|
|
31
|
+
|
|
32
|
+
# get the calculated time series. By default all value are considered non estimated
|
|
33
|
+
calculated = pd.Series(False, index=self.ta.df.index)
|
|
34
|
+
with suppress(KeyError, UserWarning):
|
|
35
|
+
if show_estimates:
|
|
36
|
+
calculated = self.ta.df["calculated"]
|
|
37
|
+
|
|
38
|
+
if not (series := normalize_series(series, base_series=base_series)).empty:
|
|
39
|
+
inception_date = series.loc[~calculated].index.min()
|
|
40
|
+
real_series = series.loc[series.index >= inception_date]
|
|
41
|
+
|
|
42
|
+
# Prepare chart options
|
|
43
|
+
name = str(self.ta.instrument)
|
|
44
|
+
line_options = line_options if line_options else {}
|
|
45
|
+
text = []
|
|
46
|
+
|
|
47
|
+
format_template = "{y:.2%}" if percent_format else "{y:.2f}"
|
|
48
|
+
hovertemplate = "<b>" + prefix + "</b>: %" + format_template
|
|
49
|
+
# If a base series is present, we add a line into the hover template to show the difference
|
|
50
|
+
if base_series is not None and not base_series.empty:
|
|
51
|
+
hovertemplate += "<br>∆ : %{text}"
|
|
52
|
+
text = [format_template.format(y=y) for y in (real_series - base_series).dropna().values]
|
|
53
|
+
|
|
54
|
+
yield go.Scatter(
|
|
55
|
+
x=real_series.index,
|
|
56
|
+
y=real_series,
|
|
57
|
+
line=line_options,
|
|
58
|
+
name=name,
|
|
59
|
+
legendgroup=name,
|
|
60
|
+
hovertemplate=hovertemplate,
|
|
61
|
+
text=text,
|
|
62
|
+
)
|
|
63
|
+
# If data happens before the inception date, we consider them as estimated/backtesting data
|
|
64
|
+
backtesting_series = series.loc[series.index < inception_date]
|
|
65
|
+
if not backtesting_series.empty:
|
|
66
|
+
yield go.Scatter(
|
|
67
|
+
x=backtesting_series.index,
|
|
68
|
+
y=backtesting_series,
|
|
69
|
+
line={**line_options, "dash": "dashdot"},
|
|
70
|
+
name="Estimated " + name,
|
|
71
|
+
legendgroup=name,
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
def _ohlc_trace(self, graph_object: type[T]) -> Iterable[T]:
|
|
75
|
+
df = self.ta.df
|
|
76
|
+
name = str(self.ta.instrument)
|
|
77
|
+
yield graph_object(
|
|
78
|
+
x=df["close"].index,
|
|
79
|
+
close=df["close"].values,
|
|
80
|
+
open=df["open"].values,
|
|
81
|
+
high=df["high"].values,
|
|
82
|
+
low=df["low"].values,
|
|
83
|
+
name=name,
|
|
84
|
+
legendgroup=name,
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
def performance_summary_trace(self, bar_options: dict | None = None, **kwargs) -> go.Bar:
|
|
88
|
+
summaries = {
|
|
89
|
+
"36 Months": self.ta.get_performance_months(months=36),
|
|
90
|
+
"24 Months": self.ta.get_performance_months(months=24),
|
|
91
|
+
"12 Months": self.ta.get_performance_months(months=12),
|
|
92
|
+
"YTD": self.ta.get_performance_year_to_date(),
|
|
93
|
+
"6 Months": self.ta.get_performance_months(months=6),
|
|
94
|
+
"3 Months": self.ta.get_performance_months(months=3),
|
|
95
|
+
"1 Months": self.ta.get_performance_months(months=1),
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
yield go.Bar(
|
|
99
|
+
x=list(reversed(summaries.keys())),
|
|
100
|
+
y=list(reversed(summaries.values())),
|
|
101
|
+
text=list(map(lambda x: f"{x:.2%}", reversed(summaries.values()))),
|
|
102
|
+
textposition="auto",
|
|
103
|
+
marker=bar_options,
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
def volume_trace(self) -> go.Bar:
|
|
107
|
+
df = self.ta.df
|
|
108
|
+
df["color_volume"] = "green"
|
|
109
|
+
df.loc[df.volume < 0, "color_volume"] = "red"
|
|
110
|
+
bar_chart_name = "Inflow/Outflow" if self.ta.instrument.is_managed else "Volume"
|
|
111
|
+
return go.Bar(
|
|
112
|
+
x=df.index,
|
|
113
|
+
y=df["volume"],
|
|
114
|
+
name=bar_chart_name,
|
|
115
|
+
hovertemplate="<b>" + bar_chart_name + "</b><br>%{x}<br>%{y:.4s}<extra></extra>",
|
|
116
|
+
marker={"color": df.color_volume, "opacity": 0.4},
|
|
117
|
+
yaxis="y2",
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
def close_trace(
|
|
121
|
+
self,
|
|
122
|
+
base_series: pd.Series | None = None,
|
|
123
|
+
line_options: dict | None = None,
|
|
124
|
+
show_estimates: bool = False,
|
|
125
|
+
**kwargs,
|
|
126
|
+
) -> go.Scatter:
|
|
127
|
+
yield from self._line_trace(
|
|
128
|
+
"close", "Close", base_series=base_series, line_options=line_options, show_estimates=show_estimates
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
def sma_trace(
|
|
132
|
+
self, window: int, base_series: pd.Series | None = None, line_options: dict | None = None, **kwargs
|
|
133
|
+
) -> go.Scatter:
|
|
134
|
+
self.ta.add_sma(window)
|
|
135
|
+
yield from self._line_trace(
|
|
136
|
+
f"close_{window}_sma", f"SMA {window}", base_series=base_series, line_options=line_options
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
def return_trace(
|
|
140
|
+
self, base_series: pd.Series | None = None, line_options: dict | None = None, **kwargs
|
|
141
|
+
) -> go.Scatter:
|
|
142
|
+
self.ta.add_cumulative_return()
|
|
143
|
+
yield from self._line_trace(
|
|
144
|
+
"cum-ret", "Return", percent_format=True, base_series=base_series, line_options=line_options
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
def log_return_trace(
|
|
148
|
+
self, base_series: pd.Series | None = None, line_options: dict | None = None, **kwargs
|
|
149
|
+
) -> go.Scatter:
|
|
150
|
+
self.ta.add_cumulative_return("log")
|
|
151
|
+
yield from self._line_trace(
|
|
152
|
+
"cum-log-ret", "Log-Return", percent_format=True, base_series=base_series, line_options=line_options
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
def drawdown_trace(
|
|
156
|
+
self, base_series: pd.Series | None = None, line_options: dict | None = None, **kwargs
|
|
157
|
+
) -> go.Scatter:
|
|
158
|
+
self.ta.add_drawdown()
|
|
159
|
+
yield from self._line_trace("drawdown", "Drawdown", base_series=base_series, line_options=line_options)
|
|
160
|
+
|
|
161
|
+
def candlestick_trace(self, **kwargs) -> go.Candlestick:
|
|
162
|
+
yield from self._ohlc_trace(go.Candlestick)
|
|
163
|
+
|
|
164
|
+
def ohlc_trace(self, **kwargs) -> go.Ohlc:
|
|
165
|
+
yield from self._ohlc_trace(go.Ohlc)
|
wbfdm/analysis/utils.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from contextlib import suppress
|
|
2
|
+
|
|
3
|
+
import pandas as pd
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def normalize_series(series: pd.Series, base_series: pd.Series | None = None) -> pd.Series:
|
|
7
|
+
"""
|
|
8
|
+
Normalize the given time series by aligning and adjusting its values based on a base series.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
series (pd.Series): The time series to be normalized.
|
|
12
|
+
base_series (pd.Series | None): The base time series for alignment and normalization.
|
|
13
|
+
If None, no alignment or normalization is performed.
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
pd.Series: The normalized time series.
|
|
17
|
+
|
|
18
|
+
Note:
|
|
19
|
+
- If base_series is provided, the function aligns the series with the base_series and normalizes
|
|
20
|
+
the values based on the first value of both series.
|
|
21
|
+
- If the series index is older than the base series index, the function trims the series accordingly.
|
|
22
|
+
- If the first value of the series is zero or if the base series first value is zero, no normalization
|
|
23
|
+
is performed.
|
|
24
|
+
"""
|
|
25
|
+
if base_series is not None and not base_series.empty:
|
|
26
|
+
with suppress(KeyError, IndexError):
|
|
27
|
+
# Ensure the series is not older than the the base serie
|
|
28
|
+
series = series.loc[series.index >= base_series.index.min()]
|
|
29
|
+
# if a base serie is provided and normalization is possible, we align the timeseries
|
|
30
|
+
if series.iloc[0] != 0 and (normalize_factor := base_series.iloc[0]):
|
|
31
|
+
series = series / series.iloc[0] * normalize_factor
|
|
32
|
+
return series
|
wbfdm/apps.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from django.apps import AppConfig
|
|
2
|
+
from django.db.models.signals import post_migrate
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class WBFDMAppConfig(AppConfig):
|
|
6
|
+
name = "wbfdm"
|
|
7
|
+
|
|
8
|
+
def ready(self):
|
|
9
|
+
from wbfdm.management import initialize_task
|
|
10
|
+
|
|
11
|
+
post_migrate.connect(
|
|
12
|
+
initialize_task,
|
|
13
|
+
dispatch_uid="wbfdm.initialize_task",
|
|
14
|
+
)
|
|
File without changes
|
|
File without changes
|