aponyx 0.1.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aponyx/__init__.py +14 -0
- aponyx/backtest/__init__.py +31 -0
- aponyx/backtest/adapters.py +77 -0
- aponyx/backtest/config.py +84 -0
- aponyx/backtest/engine.py +560 -0
- aponyx/backtest/protocols.py +101 -0
- aponyx/backtest/registry.py +334 -0
- aponyx/backtest/strategy_catalog.json +50 -0
- aponyx/cli/__init__.py +5 -0
- aponyx/cli/commands/__init__.py +8 -0
- aponyx/cli/commands/clean.py +349 -0
- aponyx/cli/commands/list.py +302 -0
- aponyx/cli/commands/report.py +167 -0
- aponyx/cli/commands/run.py +377 -0
- aponyx/cli/main.py +125 -0
- aponyx/config/__init__.py +82 -0
- aponyx/data/__init__.py +99 -0
- aponyx/data/bloomberg_config.py +306 -0
- aponyx/data/bloomberg_instruments.json +26 -0
- aponyx/data/bloomberg_securities.json +42 -0
- aponyx/data/cache.py +294 -0
- aponyx/data/fetch.py +659 -0
- aponyx/data/fetch_registry.py +135 -0
- aponyx/data/loaders.py +205 -0
- aponyx/data/providers/__init__.py +13 -0
- aponyx/data/providers/bloomberg.py +383 -0
- aponyx/data/providers/file.py +111 -0
- aponyx/data/registry.py +500 -0
- aponyx/data/requirements.py +96 -0
- aponyx/data/sample_data.py +415 -0
- aponyx/data/schemas.py +60 -0
- aponyx/data/sources.py +171 -0
- aponyx/data/synthetic_params.json +46 -0
- aponyx/data/transforms.py +336 -0
- aponyx/data/validation.py +308 -0
- aponyx/docs/__init__.py +24 -0
- aponyx/docs/adding_data_providers.md +682 -0
- aponyx/docs/cdx_knowledge_base.md +455 -0
- aponyx/docs/cdx_overlay_strategy.md +135 -0
- aponyx/docs/cli_guide.md +607 -0
- aponyx/docs/governance_design.md +551 -0
- aponyx/docs/logging_design.md +251 -0
- aponyx/docs/performance_evaluation_design.md +265 -0
- aponyx/docs/python_guidelines.md +786 -0
- aponyx/docs/signal_registry_usage.md +369 -0
- aponyx/docs/signal_suitability_design.md +558 -0
- aponyx/docs/visualization_design.md +277 -0
- aponyx/evaluation/__init__.py +11 -0
- aponyx/evaluation/performance/__init__.py +24 -0
- aponyx/evaluation/performance/adapters.py +109 -0
- aponyx/evaluation/performance/analyzer.py +384 -0
- aponyx/evaluation/performance/config.py +320 -0
- aponyx/evaluation/performance/decomposition.py +304 -0
- aponyx/evaluation/performance/metrics.py +761 -0
- aponyx/evaluation/performance/registry.py +327 -0
- aponyx/evaluation/performance/report.py +541 -0
- aponyx/evaluation/suitability/__init__.py +67 -0
- aponyx/evaluation/suitability/config.py +143 -0
- aponyx/evaluation/suitability/evaluator.py +389 -0
- aponyx/evaluation/suitability/registry.py +328 -0
- aponyx/evaluation/suitability/report.py +398 -0
- aponyx/evaluation/suitability/scoring.py +367 -0
- aponyx/evaluation/suitability/tests.py +303 -0
- aponyx/examples/01_generate_synthetic_data.py +53 -0
- aponyx/examples/02_fetch_data_file.py +82 -0
- aponyx/examples/03_fetch_data_bloomberg.py +104 -0
- aponyx/examples/04_compute_signal.py +164 -0
- aponyx/examples/05_evaluate_suitability.py +224 -0
- aponyx/examples/06_run_backtest.py +242 -0
- aponyx/examples/07_analyze_performance.py +214 -0
- aponyx/examples/08_visualize_results.py +272 -0
- aponyx/main.py +7 -0
- aponyx/models/__init__.py +45 -0
- aponyx/models/config.py +83 -0
- aponyx/models/indicator_transformation.json +52 -0
- aponyx/models/indicators.py +292 -0
- aponyx/models/metadata.py +447 -0
- aponyx/models/orchestrator.py +213 -0
- aponyx/models/registry.py +860 -0
- aponyx/models/score_transformation.json +42 -0
- aponyx/models/signal_catalog.json +29 -0
- aponyx/models/signal_composer.py +513 -0
- aponyx/models/signal_transformation.json +29 -0
- aponyx/persistence/__init__.py +16 -0
- aponyx/persistence/json_io.py +132 -0
- aponyx/persistence/parquet_io.py +378 -0
- aponyx/py.typed +0 -0
- aponyx/reporting/__init__.py +10 -0
- aponyx/reporting/generator.py +517 -0
- aponyx/visualization/__init__.py +20 -0
- aponyx/visualization/app.py +37 -0
- aponyx/visualization/plots.py +309 -0
- aponyx/visualization/visualizer.py +242 -0
- aponyx/workflows/__init__.py +18 -0
- aponyx/workflows/concrete_steps.py +720 -0
- aponyx/workflows/config.py +122 -0
- aponyx/workflows/engine.py +279 -0
- aponyx/workflows/registry.py +116 -0
- aponyx/workflows/steps.py +180 -0
- aponyx-0.1.18.dist-info/METADATA +552 -0
- aponyx-0.1.18.dist-info/RECORD +104 -0
- aponyx-0.1.18.dist-info/WHEEL +4 -0
- aponyx-0.1.18.dist-info/entry_points.txt +2 -0
- aponyx-0.1.18.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Indicator computation functions for market metrics.
|
|
3
|
+
|
|
4
|
+
Indicators compute economically interpretable market metrics (spread differences,
|
|
5
|
+
ratios, momentum) in their natural units (basis points, ratios, percentages)
|
|
6
|
+
WITHOUT signal-level normalization (z-scores, percentile ranks).
|
|
7
|
+
|
|
8
|
+
This separation enables:
|
|
9
|
+
- Reusable indicators across multiple signals
|
|
10
|
+
- Independent testing and validation
|
|
11
|
+
- Clear governance boundaries
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import logging
|
|
15
|
+
from typing import Any
|
|
16
|
+
|
|
17
|
+
import pandas as pd
|
|
18
|
+
|
|
19
|
+
from ..config import INDICATOR_CACHE_DIR
|
|
20
|
+
from ..persistence.parquet_io import (
|
|
21
|
+
generate_indicator_cache_key,
|
|
22
|
+
load_indicator_from_cache,
|
|
23
|
+
save_indicator_to_cache,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
logger = logging.getLogger(__name__)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def compute_cdx_etf_spread_diff(
|
|
30
|
+
cdx_df: pd.DataFrame,
|
|
31
|
+
etf_df: pd.DataFrame,
|
|
32
|
+
parameters: dict[str, Any],
|
|
33
|
+
) -> pd.Series:
|
|
34
|
+
"""
|
|
35
|
+
Compute CDX spread minus ETF spread in basis points.
|
|
36
|
+
|
|
37
|
+
This is the raw basis between CDX index spreads and ETF-implied spreads
|
|
38
|
+
without normalization. Useful for identifying flow-driven mispricing.
|
|
39
|
+
|
|
40
|
+
Parameters
|
|
41
|
+
----------
|
|
42
|
+
cdx_df : pd.DataFrame
|
|
43
|
+
CDX spread data with 'spread' column.
|
|
44
|
+
etf_df : pd.DataFrame
|
|
45
|
+
ETF spread data with 'spread' column.
|
|
46
|
+
parameters : dict[str, Any]
|
|
47
|
+
Indicator parameters (unused for this simple indicator).
|
|
48
|
+
|
|
49
|
+
Returns
|
|
50
|
+
-------
|
|
51
|
+
pd.Series
|
|
52
|
+
Spread difference in basis points.
|
|
53
|
+
|
|
54
|
+
Notes
|
|
55
|
+
-----
|
|
56
|
+
Output units: basis_points
|
|
57
|
+
Positive values: CDX spreads wider than ETF spreads (CDX expensive vs ETF)
|
|
58
|
+
Negative values: CDX spreads tighter than ETF spreads (CDX cheap vs ETF)
|
|
59
|
+
Economic interpretation: Measures relative value between CDX and ETF markets
|
|
60
|
+
"""
|
|
61
|
+
logger.info(
|
|
62
|
+
"Computing CDX-ETF spread difference: cdx_rows=%d, etf_rows=%d",
|
|
63
|
+
len(cdx_df),
|
|
64
|
+
len(etf_df),
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
# Align data to common dates
|
|
68
|
+
cdx_spread = cdx_df["spread"]
|
|
69
|
+
etf_spread = etf_df["spread"].reindex(cdx_df.index, method="ffill")
|
|
70
|
+
|
|
71
|
+
# Compute raw difference (no normalization)
|
|
72
|
+
spread_diff = cdx_spread - etf_spread
|
|
73
|
+
|
|
74
|
+
valid_count = spread_diff.notna().sum()
|
|
75
|
+
logger.debug("Generated %d valid spread difference values", valid_count)
|
|
76
|
+
|
|
77
|
+
return spread_diff
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def compute_spread_momentum(
|
|
81
|
+
cdx_df: pd.DataFrame,
|
|
82
|
+
parameters: dict[str, Any],
|
|
83
|
+
) -> pd.Series:
|
|
84
|
+
"""
|
|
85
|
+
Compute short-term spread change in basis points.
|
|
86
|
+
|
|
87
|
+
Captures spread momentum over the specified lookback period without
|
|
88
|
+
volatility adjustment or normalization.
|
|
89
|
+
|
|
90
|
+
Parameters
|
|
91
|
+
----------
|
|
92
|
+
cdx_df : pd.DataFrame
|
|
93
|
+
CDX spread data with 'spread' column.
|
|
94
|
+
parameters : dict[str, Any]
|
|
95
|
+
Must contain 'lookback': Number of periods for momentum calculation.
|
|
96
|
+
|
|
97
|
+
Returns
|
|
98
|
+
-------
|
|
99
|
+
pd.Series
|
|
100
|
+
Spread change in basis points.
|
|
101
|
+
|
|
102
|
+
Raises
|
|
103
|
+
------
|
|
104
|
+
KeyError
|
|
105
|
+
If 'lookback' is not present in parameters. Catalog must define this.
|
|
106
|
+
|
|
107
|
+
Notes
|
|
108
|
+
-----
|
|
109
|
+
Output units: basis_points
|
|
110
|
+
Positive values: Spreads widening (credit deteriorating)
|
|
111
|
+
Negative values: Spreads tightening (credit improving)
|
|
112
|
+
Economic interpretation: Rate of spread change over lookback period
|
|
113
|
+
Sign convention: Negative change (tightening) is favorable for credit
|
|
114
|
+
"""
|
|
115
|
+
lookback = parameters["lookback"]
|
|
116
|
+
|
|
117
|
+
logger.info(
|
|
118
|
+
"Computing spread momentum: cdx_rows=%d, lookback=%d",
|
|
119
|
+
len(cdx_df),
|
|
120
|
+
lookback,
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
spread = cdx_df["spread"]
|
|
124
|
+
|
|
125
|
+
# Compute spread change over lookback period
|
|
126
|
+
spread_change = spread.diff(lookback)
|
|
127
|
+
|
|
128
|
+
valid_count = spread_change.notna().sum()
|
|
129
|
+
logger.debug("Generated %d valid momentum values", valid_count)
|
|
130
|
+
|
|
131
|
+
return spread_change
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def compute_cdx_vix_deviation_gap(
|
|
135
|
+
cdx_df: pd.DataFrame,
|
|
136
|
+
vix_df: pd.DataFrame,
|
|
137
|
+
parameters: dict[str, Any],
|
|
138
|
+
) -> pd.Series:
|
|
139
|
+
"""
|
|
140
|
+
Compute gap between CDX and VIX deviations from their means.
|
|
141
|
+
|
|
142
|
+
Identifies cross-asset risk sentiment divergence by comparing how far
|
|
143
|
+
each asset is from its recent average level.
|
|
144
|
+
|
|
145
|
+
Parameters
|
|
146
|
+
----------
|
|
147
|
+
cdx_df : pd.DataFrame
|
|
148
|
+
CDX spreads with 'spread' column.
|
|
149
|
+
vix_df : pd.DataFrame
|
|
150
|
+
VIX levels with 'level' column.
|
|
151
|
+
parameters : dict[str, Any]
|
|
152
|
+
Must contain 'lookback': Window for computing mean deviations.
|
|
153
|
+
|
|
154
|
+
Returns
|
|
155
|
+
-------
|
|
156
|
+
pd.Series
|
|
157
|
+
Deviation gap in basis points.
|
|
158
|
+
|
|
159
|
+
Raises
|
|
160
|
+
------
|
|
161
|
+
KeyError
|
|
162
|
+
If 'lookback' is not present in parameters. Catalog must define this.
|
|
163
|
+
|
|
164
|
+
Notes
|
|
165
|
+
-----
|
|
166
|
+
Output units: basis_points (approximate, combining CDX bps and VIX points)
|
|
167
|
+
Positive values: Credit stress > equity stress (CDX elevated relative to VIX)
|
|
168
|
+
Negative values: Equity stress > credit stress (VIX elevated relative to CDX)
|
|
169
|
+
Economic interpretation: Cross-asset risk sentiment divergence
|
|
170
|
+
"""
|
|
171
|
+
lookback = parameters["lookback"]
|
|
172
|
+
|
|
173
|
+
logger.info(
|
|
174
|
+
"Computing CDX-VIX deviation gap: cdx_rows=%d, vix_rows=%d, lookback=%d",
|
|
175
|
+
len(cdx_df),
|
|
176
|
+
len(vix_df),
|
|
177
|
+
lookback,
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
# Align data to common dates
|
|
181
|
+
cdx = cdx_df["spread"]
|
|
182
|
+
vix = vix_df["level"].reindex(cdx_df.index, method="ffill")
|
|
183
|
+
|
|
184
|
+
# Compute deviations from rolling means
|
|
185
|
+
cdx_mean = cdx.rolling(window=lookback, min_periods=lookback // 2).mean()
|
|
186
|
+
cdx_deviation = cdx - cdx_mean
|
|
187
|
+
|
|
188
|
+
vix_mean = vix.rolling(window=lookback, min_periods=lookback // 2).mean()
|
|
189
|
+
vix_deviation = vix - vix_mean
|
|
190
|
+
|
|
191
|
+
# Raw gap: CDX stress minus VIX stress
|
|
192
|
+
gap = cdx_deviation - vix_deviation
|
|
193
|
+
|
|
194
|
+
valid_count = gap.notna().sum()
|
|
195
|
+
logger.debug("Generated %d valid deviation gap values", valid_count)
|
|
196
|
+
|
|
197
|
+
return gap
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def compute_indicator(
|
|
201
|
+
indicator_name: str,
|
|
202
|
+
market_data: dict[str, pd.DataFrame],
|
|
203
|
+
indicator_metadata: Any,
|
|
204
|
+
use_cache: bool = True,
|
|
205
|
+
) -> pd.Series:
|
|
206
|
+
"""
|
|
207
|
+
Orchestration function for indicator computation with caching.
|
|
208
|
+
|
|
209
|
+
Parameters
|
|
210
|
+
----------
|
|
211
|
+
indicator_name : str
|
|
212
|
+
Name of the indicator to compute.
|
|
213
|
+
market_data : dict[str, pd.DataFrame]
|
|
214
|
+
Market data required for indicator computation.
|
|
215
|
+
Keys should match data_requirements in indicator metadata.
|
|
216
|
+
indicator_metadata : IndicatorMetadata
|
|
217
|
+
Metadata containing compute function, parameters, and requirements.
|
|
218
|
+
use_cache : bool, default True
|
|
219
|
+
Whether to use cached values if available.
|
|
220
|
+
|
|
221
|
+
Returns
|
|
222
|
+
-------
|
|
223
|
+
pd.Series
|
|
224
|
+
Computed indicator time series.
|
|
225
|
+
|
|
226
|
+
Raises
|
|
227
|
+
------
|
|
228
|
+
ValueError
|
|
229
|
+
If required market data is missing or compute function not found.
|
|
230
|
+
|
|
231
|
+
Examples
|
|
232
|
+
--------
|
|
233
|
+
>>> from aponyx.models.registry import IndicatorRegistry
|
|
234
|
+
>>> from aponyx.config import INDICATOR_CATALOG_PATH
|
|
235
|
+
>>> registry = IndicatorRegistry(INDICATOR_CATALOG_PATH)
|
|
236
|
+
>>> metadata = registry.get_metadata("cdx_etf_spread_diff")
|
|
237
|
+
>>> market_data = {"cdx": cdx_df, "etf": etf_df}
|
|
238
|
+
>>> indicator = compute_indicator(
|
|
239
|
+
... "cdx_etf_spread_diff",
|
|
240
|
+
... market_data,
|
|
241
|
+
... metadata,
|
|
242
|
+
... use_cache=True
|
|
243
|
+
... )
|
|
244
|
+
"""
|
|
245
|
+
logger.info("Computing indicator: name=%s, use_cache=%s", indicator_name, use_cache)
|
|
246
|
+
|
|
247
|
+
# Check cache if enabled
|
|
248
|
+
if use_cache:
|
|
249
|
+
cache_key = generate_indicator_cache_key(
|
|
250
|
+
indicator_name,
|
|
251
|
+
indicator_metadata.parameters,
|
|
252
|
+
market_data,
|
|
253
|
+
)
|
|
254
|
+
cached_result = load_indicator_from_cache(cache_key, INDICATOR_CACHE_DIR)
|
|
255
|
+
if cached_result is not None:
|
|
256
|
+
logger.info("Using cached indicator: name=%s", indicator_name)
|
|
257
|
+
return cached_result
|
|
258
|
+
|
|
259
|
+
# Get compute function from this module's namespace
|
|
260
|
+
import sys
|
|
261
|
+
|
|
262
|
+
current_module = sys.modules[__name__]
|
|
263
|
+
compute_fn_name = indicator_metadata.compute_function_name
|
|
264
|
+
if not hasattr(current_module, compute_fn_name):
|
|
265
|
+
raise ValueError(
|
|
266
|
+
f"Compute function '{compute_fn_name}' not found in indicators module"
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
compute_fn = getattr(current_module, compute_fn_name)
|
|
270
|
+
|
|
271
|
+
# Prepare arguments based on data requirements
|
|
272
|
+
args = []
|
|
273
|
+
for data_key in sorted(indicator_metadata.data_requirements.keys()):
|
|
274
|
+
if data_key not in market_data:
|
|
275
|
+
raise ValueError(
|
|
276
|
+
f"Missing required market data '{data_key}' for indicator '{indicator_name}'"
|
|
277
|
+
)
|
|
278
|
+
args.append(market_data[data_key])
|
|
279
|
+
|
|
280
|
+
# Add parameters as last argument
|
|
281
|
+
args.append(indicator_metadata.parameters)
|
|
282
|
+
|
|
283
|
+
# Compute indicator
|
|
284
|
+
logger.debug("Calling compute function: %s", compute_fn_name)
|
|
285
|
+
result: pd.Series = compute_fn(*args)
|
|
286
|
+
|
|
287
|
+
# Cache result if enabled
|
|
288
|
+
if use_cache:
|
|
289
|
+
save_indicator_to_cache(result, cache_key, INDICATOR_CACHE_DIR)
|
|
290
|
+
|
|
291
|
+
logger.info("Computed indicator: name=%s, values=%d", indicator_name, len(result))
|
|
292
|
+
return result
|