aponyx 0.1.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. aponyx/__init__.py +14 -0
  2. aponyx/backtest/__init__.py +31 -0
  3. aponyx/backtest/adapters.py +77 -0
  4. aponyx/backtest/config.py +84 -0
  5. aponyx/backtest/engine.py +560 -0
  6. aponyx/backtest/protocols.py +101 -0
  7. aponyx/backtest/registry.py +334 -0
  8. aponyx/backtest/strategy_catalog.json +50 -0
  9. aponyx/cli/__init__.py +5 -0
  10. aponyx/cli/commands/__init__.py +8 -0
  11. aponyx/cli/commands/clean.py +349 -0
  12. aponyx/cli/commands/list.py +302 -0
  13. aponyx/cli/commands/report.py +167 -0
  14. aponyx/cli/commands/run.py +377 -0
  15. aponyx/cli/main.py +125 -0
  16. aponyx/config/__init__.py +82 -0
  17. aponyx/data/__init__.py +99 -0
  18. aponyx/data/bloomberg_config.py +306 -0
  19. aponyx/data/bloomberg_instruments.json +26 -0
  20. aponyx/data/bloomberg_securities.json +42 -0
  21. aponyx/data/cache.py +294 -0
  22. aponyx/data/fetch.py +659 -0
  23. aponyx/data/fetch_registry.py +135 -0
  24. aponyx/data/loaders.py +205 -0
  25. aponyx/data/providers/__init__.py +13 -0
  26. aponyx/data/providers/bloomberg.py +383 -0
  27. aponyx/data/providers/file.py +111 -0
  28. aponyx/data/registry.py +500 -0
  29. aponyx/data/requirements.py +96 -0
  30. aponyx/data/sample_data.py +415 -0
  31. aponyx/data/schemas.py +60 -0
  32. aponyx/data/sources.py +171 -0
  33. aponyx/data/synthetic_params.json +46 -0
  34. aponyx/data/transforms.py +336 -0
  35. aponyx/data/validation.py +308 -0
  36. aponyx/docs/__init__.py +24 -0
  37. aponyx/docs/adding_data_providers.md +682 -0
  38. aponyx/docs/cdx_knowledge_base.md +455 -0
  39. aponyx/docs/cdx_overlay_strategy.md +135 -0
  40. aponyx/docs/cli_guide.md +607 -0
  41. aponyx/docs/governance_design.md +551 -0
  42. aponyx/docs/logging_design.md +251 -0
  43. aponyx/docs/performance_evaluation_design.md +265 -0
  44. aponyx/docs/python_guidelines.md +786 -0
  45. aponyx/docs/signal_registry_usage.md +369 -0
  46. aponyx/docs/signal_suitability_design.md +558 -0
  47. aponyx/docs/visualization_design.md +277 -0
  48. aponyx/evaluation/__init__.py +11 -0
  49. aponyx/evaluation/performance/__init__.py +24 -0
  50. aponyx/evaluation/performance/adapters.py +109 -0
  51. aponyx/evaluation/performance/analyzer.py +384 -0
  52. aponyx/evaluation/performance/config.py +320 -0
  53. aponyx/evaluation/performance/decomposition.py +304 -0
  54. aponyx/evaluation/performance/metrics.py +761 -0
  55. aponyx/evaluation/performance/registry.py +327 -0
  56. aponyx/evaluation/performance/report.py +541 -0
  57. aponyx/evaluation/suitability/__init__.py +67 -0
  58. aponyx/evaluation/suitability/config.py +143 -0
  59. aponyx/evaluation/suitability/evaluator.py +389 -0
  60. aponyx/evaluation/suitability/registry.py +328 -0
  61. aponyx/evaluation/suitability/report.py +398 -0
  62. aponyx/evaluation/suitability/scoring.py +367 -0
  63. aponyx/evaluation/suitability/tests.py +303 -0
  64. aponyx/examples/01_generate_synthetic_data.py +53 -0
  65. aponyx/examples/02_fetch_data_file.py +82 -0
  66. aponyx/examples/03_fetch_data_bloomberg.py +104 -0
  67. aponyx/examples/04_compute_signal.py +164 -0
  68. aponyx/examples/05_evaluate_suitability.py +224 -0
  69. aponyx/examples/06_run_backtest.py +242 -0
  70. aponyx/examples/07_analyze_performance.py +214 -0
  71. aponyx/examples/08_visualize_results.py +272 -0
  72. aponyx/main.py +7 -0
  73. aponyx/models/__init__.py +45 -0
  74. aponyx/models/config.py +83 -0
  75. aponyx/models/indicator_transformation.json +52 -0
  76. aponyx/models/indicators.py +292 -0
  77. aponyx/models/metadata.py +447 -0
  78. aponyx/models/orchestrator.py +213 -0
  79. aponyx/models/registry.py +860 -0
  80. aponyx/models/score_transformation.json +42 -0
  81. aponyx/models/signal_catalog.json +29 -0
  82. aponyx/models/signal_composer.py +513 -0
  83. aponyx/models/signal_transformation.json +29 -0
  84. aponyx/persistence/__init__.py +16 -0
  85. aponyx/persistence/json_io.py +132 -0
  86. aponyx/persistence/parquet_io.py +378 -0
  87. aponyx/py.typed +0 -0
  88. aponyx/reporting/__init__.py +10 -0
  89. aponyx/reporting/generator.py +517 -0
  90. aponyx/visualization/__init__.py +20 -0
  91. aponyx/visualization/app.py +37 -0
  92. aponyx/visualization/plots.py +309 -0
  93. aponyx/visualization/visualizer.py +242 -0
  94. aponyx/workflows/__init__.py +18 -0
  95. aponyx/workflows/concrete_steps.py +720 -0
  96. aponyx/workflows/config.py +122 -0
  97. aponyx/workflows/engine.py +279 -0
  98. aponyx/workflows/registry.py +116 -0
  99. aponyx/workflows/steps.py +180 -0
  100. aponyx-0.1.18.dist-info/METADATA +552 -0
  101. aponyx-0.1.18.dist-info/RECORD +104 -0
  102. aponyx-0.1.18.dist-info/WHEEL +4 -0
  103. aponyx-0.1.18.dist-info/entry_points.txt +2 -0
  104. aponyx-0.1.18.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,349 @@
1
+ """
2
+ Clean cached results command.
3
+
4
+ Removes processed outputs to force fresh computation.
5
+ """
6
+
7
+ import json
8
+ import logging
9
+ from datetime import datetime, timedelta
10
+ from pathlib import Path
11
+
12
+ import click
13
+
14
+ from aponyx.config import DATA_WORKFLOWS_DIR, INDICATOR_CACHE_DIR
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ def _parse_days(older_than: str) -> int:
20
+ """
21
+ Parse days from string format like '30d', '7d', '90d'.
22
+
23
+ Parameters
24
+ ----------
25
+ older_than : str
26
+ String in format '<number>d'.
27
+
28
+ Returns
29
+ -------
30
+ int
31
+ Number of days.
32
+
33
+ Raises
34
+ ------
35
+ click.ClickException
36
+ If format is invalid.
37
+ """
38
+ if not older_than.endswith("d"):
39
+ raise click.ClickException(
40
+ f"Invalid format '{older_than}'. Expected format: '<number>d' (e.g., '30d', '7d')"
41
+ )
42
+
43
+ try:
44
+ days = int(older_than[:-1])
45
+ if days <= 0:
46
+ raise ValueError
47
+ return days
48
+ except ValueError:
49
+ raise click.ClickException(
50
+ f"Invalid number in '{older_than}'. Must be a positive integer."
51
+ )
52
+
53
+
54
+ def _collect_targets(base_path: Path) -> list[Path]:
55
+ """
56
+ Recursively collect all files and directories to delete.
57
+
58
+ Parameters
59
+ ----------
60
+ base_path : Path
61
+ Root directory to collect from.
62
+
63
+ Returns
64
+ -------
65
+ list[Path]
66
+ List of all files and directories, depth-first order.
67
+ """
68
+ targets = []
69
+
70
+ if not base_path.exists():
71
+ return targets
72
+
73
+ if base_path.is_file():
74
+ targets.append(base_path)
75
+ elif base_path.is_dir():
76
+ # Collect files and subdirectories recursively
77
+ for item in sorted(base_path.rglob("*"), reverse=True):
78
+ targets.append(item)
79
+ # Add the directory itself last
80
+ targets.append(base_path)
81
+
82
+ return targets
83
+
84
+
85
+ @click.command(name="clean")
86
+ @click.option(
87
+ "--signal",
88
+ type=str,
89
+ help="Filter workflows by signal name (use with --workflows)",
90
+ )
91
+ @click.option(
92
+ "--all",
93
+ "clean_all",
94
+ is_flag=True,
95
+ help="Clean all cached results",
96
+ )
97
+ @click.option(
98
+ "--workflows",
99
+ is_flag=True,
100
+ help="Clean workflow results",
101
+ )
102
+ @click.option(
103
+ "--older-than",
104
+ type=str,
105
+ help="Delete workflows older than specified days (format: '30d', '7d', '90d'). Use with --workflows.",
106
+ )
107
+ @click.option(
108
+ "--indicators",
109
+ is_flag=True,
110
+ help="Clean indicator cache",
111
+ )
112
+ @click.option(
113
+ "--dry-run",
114
+ is_flag=True,
115
+ help="Show what would be deleted without deleting",
116
+ )
117
+ def clean(
118
+ signal: str | None,
119
+ clean_all: bool,
120
+ workflows: bool,
121
+ older_than: str | None,
122
+ indicators: bool,
123
+ dry_run: bool,
124
+ ) -> None:
125
+ """
126
+ Clear cached workflow results and indicator cache.
127
+
128
+ \b
129
+ Examples:
130
+ # Clean all workflow results
131
+ aponyx clean --workflows --all
132
+
133
+ # Clean workflows older than 30 days
134
+ aponyx clean --workflows --older-than 30d
135
+
136
+ # Clean old workflows for specific signal
137
+ aponyx clean --workflows --signal spread_momentum --older-than 30d
138
+
139
+ # Clean indicator cache
140
+ aponyx clean --indicators
141
+
142
+ # Preview changes without deleting
143
+ aponyx clean --workflows --older-than 30d --dry-run
144
+ """
145
+ # Handle indicator cache cleaning
146
+ if indicators:
147
+ _clean_indicator_cache(dry_run)
148
+ if not signal and not clean_all and not workflows:
149
+ # If only --indicators flag, we're done
150
+ return
151
+
152
+ # Validate options
153
+ if older_than and not workflows:
154
+ click.echo("Error: --older-than requires --workflows flag", err=True)
155
+ raise click.Abort()
156
+
157
+ # Handle workflow cleaning
158
+ if workflows or clean_all:
159
+ _clean_workflows(
160
+ signal_filter=signal,
161
+ clean_all=clean_all,
162
+ older_than=older_than,
163
+ dry_run=dry_run,
164
+ )
165
+ return
166
+
167
+ # If no workflow/indicator flags, show error
168
+ if not indicators:
169
+ click.echo("Must specify --workflows, --indicators, or --all", err=True)
170
+ raise click.Abort()
171
+
172
+
173
+ def _clean_workflows(
174
+ signal_filter: str | None,
175
+ clean_all: bool,
176
+ older_than: str | None,
177
+ dry_run: bool,
178
+ ) -> None:
179
+ """
180
+ Clean workflow directories based on filters.
181
+
182
+ Parameters
183
+ ----------
184
+ signal_filter : str | None
185
+ Filter by signal name.
186
+ clean_all : bool
187
+ Clean all workflows (ignore age filter).
188
+ older_than : str | None
189
+ Delete workflows older than specified days (format: '30d').
190
+ dry_run : bool
191
+ Preview without deleting.
192
+ """
193
+ workflows_dir = DATA_WORKFLOWS_DIR
194
+
195
+ if not workflows_dir.exists():
196
+ click.echo("No workflows found")
197
+ return
198
+
199
+ # Parse age threshold if provided
200
+ age_threshold = None
201
+ if older_than:
202
+ days = _parse_days(older_than)
203
+ age_threshold = datetime.now() - timedelta(days=days)
204
+
205
+ # Collect workflow directories to delete
206
+ workflow_dirs_to_delete = []
207
+
208
+ for workflow_dir in workflows_dir.iterdir():
209
+ if not workflow_dir.is_dir():
210
+ continue
211
+
212
+ # Load metadata for filtering
213
+ metadata_path = workflow_dir / "metadata.json"
214
+ if not metadata_path.exists():
215
+ # Include directories without metadata if --all specified
216
+ if clean_all:
217
+ workflow_dirs_to_delete.append(workflow_dir)
218
+ continue
219
+
220
+ try:
221
+ with open(metadata_path, "r", encoding="utf-8") as f:
222
+ metadata = json.load(f)
223
+ except Exception as e:
224
+ logger.debug("Failed to load metadata from %s: %s", workflow_dir, e)
225
+ if clean_all:
226
+ workflow_dirs_to_delete.append(workflow_dir)
227
+ continue
228
+
229
+ # Apply signal filter
230
+ if signal_filter:
231
+ if metadata.get("signal") != signal_filter:
232
+ continue
233
+
234
+ # Apply age filter (unless --all specified)
235
+ if not clean_all and age_threshold:
236
+ timestamp_str = metadata.get("timestamp")
237
+ if timestamp_str:
238
+ try:
239
+ timestamp = datetime.fromisoformat(timestamp_str)
240
+ if timestamp >= age_threshold:
241
+ # Workflow is newer than threshold, skip
242
+ continue
243
+ except Exception as e:
244
+ logger.debug(
245
+ "Failed to parse timestamp from %s: %s", workflow_dir, e
246
+ )
247
+ continue
248
+
249
+ # Add to deletion list
250
+ workflow_dirs_to_delete.append(workflow_dir)
251
+
252
+ if not workflow_dirs_to_delete:
253
+ if signal_filter:
254
+ click.echo(f"No workflows found matching signal '{signal_filter}'")
255
+ elif older_than:
256
+ click.echo(f"No workflows found older than {older_than}")
257
+ else:
258
+ click.echo("No workflows found")
259
+ return
260
+
261
+ # Collect all files and directories from matched workflows
262
+ targets = []
263
+ for workflow_dir in workflow_dirs_to_delete:
264
+ targets.extend(_collect_targets(workflow_dir))
265
+
266
+ # Display summary
267
+ if dry_run:
268
+ click.echo(
269
+ f"Would delete {len(workflow_dirs_to_delete)} workflow(s) ({len(targets)} items):\n"
270
+ )
271
+
272
+ deleted_count = 0
273
+ for target in targets:
274
+ # Display path relative to workflows dir for clarity
275
+ rel_path = target.relative_to(workflows_dir.parent)
276
+
277
+ if dry_run:
278
+ click.echo(f" {rel_path}")
279
+ else:
280
+ # Show workflow directory names being deleted
281
+ if target.parent == workflows_dir and target.is_dir():
282
+ click.echo(f"Deleting workflow: {target.name}")
283
+ logger.debug("Deleting %s", target)
284
+ try:
285
+ if target.is_dir():
286
+ target.rmdir()
287
+ else:
288
+ target.unlink()
289
+ deleted_count += 1
290
+ except Exception as e:
291
+ logger.warning("Failed to delete %s: %s", target, e)
292
+ click.echo(f" Failed: {e}", err=True)
293
+
294
+ # Summary
295
+ if dry_run:
296
+ click.echo(
297
+ f"\nDry run complete: {len(workflow_dirs_to_delete)} workflow(s) would be deleted"
298
+ )
299
+ else:
300
+ click.echo(
301
+ f"\nCleaned {deleted_count}/{len(targets)} item(s) from {len(workflow_dirs_to_delete)} workflow(s)"
302
+ )
303
+
304
+
305
+ def _clean_indicator_cache(dry_run: bool) -> None:
306
+ """
307
+ Clean all cached indicator values.
308
+
309
+ Parameters
310
+ ----------
311
+ dry_run : bool
312
+ If True, only show what would be deleted.
313
+ """
314
+ if not INDICATOR_CACHE_DIR.exists():
315
+ click.echo("No indicator cache found")
316
+ return
317
+
318
+ # Collect all cache files
319
+ cache_files = list(INDICATOR_CACHE_DIR.glob("*.parquet"))
320
+
321
+ if not cache_files:
322
+ click.echo("No cached indicators found")
323
+ return
324
+
325
+ if dry_run:
326
+ click.echo(f"\nWould delete {len(cache_files)} cached indicator(s):")
327
+ for cache_file in sorted(cache_files):
328
+ click.echo(f" {cache_file.name}")
329
+ click.echo(
330
+ f"\nDry run complete: {len(cache_files)} indicator(s) would be deleted"
331
+ )
332
+ else:
333
+ click.echo(f"Cleaning {len(cache_files)} cached indicator(s)...")
334
+ deleted_count = 0
335
+
336
+ for cache_file in cache_files:
337
+ try:
338
+ # Extract indicator name from cache key (format: {name}_{params_hash}_{data_hash}.parquet)
339
+ indicator_name = cache_file.stem.split("_")[0]
340
+ click.echo(f"Deleting cached indicator: {indicator_name}")
341
+ cache_file.unlink()
342
+ deleted_count += 1
343
+ except Exception as e:
344
+ logger.warning("Failed to delete %s: %s", cache_file, e)
345
+ click.echo(f" Failed: {e}", err=True)
346
+
347
+ click.echo(
348
+ f"\nCleaned {deleted_count}/{len(cache_files)} indicator cache file(s)"
349
+ )
@@ -0,0 +1,302 @@
1
+ """
2
+ List catalog items command.
3
+
4
+ Displays available signals, products, indicators, transformations, securities,
5
+ datasets, strategies, and workflow steps.
6
+ """
7
+
8
+ import json
9
+ import logging
10
+
11
+ import click
12
+
13
+ from aponyx.models.registry import (
14
+ SignalRegistry,
15
+ IndicatorTransformationRegistry,
16
+ ScoreTransformationRegistry,
17
+ SignalTransformationRegistry,
18
+ )
19
+ from aponyx.backtest.registry import StrategyRegistry
20
+ from aponyx.data.registry import DataRegistry
21
+ from aponyx.workflows.registry import StepRegistry
22
+ from aponyx.config import (
23
+ SIGNAL_CATALOG_PATH,
24
+ INDICATOR_TRANSFORMATION_PATH,
25
+ SCORE_TRANSFORMATION_PATH,
26
+ SIGNAL_TRANSFORMATION_PATH,
27
+ STRATEGY_CATALOG_PATH,
28
+ BLOOMBERG_SECURITIES_PATH,
29
+ REGISTRY_PATH,
30
+ DATA_DIR,
31
+ DATA_WORKFLOWS_DIR,
32
+ )
33
+
34
+ logger = logging.getLogger(__name__)
35
+
36
+
37
+ @click.command(name="list")
38
+ @click.argument(
39
+ "item_type",
40
+ type=click.Choice(
41
+ [
42
+ "signals",
43
+ "products",
44
+ "indicators",
45
+ "score-transformations",
46
+ "signal-transformations",
47
+ "securities",
48
+ "datasets",
49
+ "strategies",
50
+ "steps",
51
+ "workflows",
52
+ ],
53
+ case_sensitive=False,
54
+ ),
55
+ )
56
+ @click.option(
57
+ "--signal",
58
+ type=str,
59
+ help="Filter workflows by signal name (workflows only)",
60
+ )
61
+ @click.option(
62
+ "--product",
63
+ type=str,
64
+ help="Filter workflows by product (workflows only)",
65
+ )
66
+ @click.option(
67
+ "--strategy",
68
+ type=str,
69
+ help="Filter workflows by strategy name (workflows only)",
70
+ )
71
+ def list_items(
72
+ item_type: str,
73
+ signal: str | None,
74
+ product: str | None,
75
+ strategy: str | None,
76
+ ) -> None:
77
+ """
78
+ List available catalog items or workflow results.
79
+
80
+ ITEM_TYPE can be: signals, products, indicators, score-transformations,
81
+ signal-transformations, securities, datasets, strategies, steps, or workflows
82
+
83
+ \b
84
+ Examples:
85
+ aponyx list signals
86
+ aponyx list indicators
87
+ aponyx list score-transformations
88
+ aponyx list signal-transformations
89
+ aponyx list products
90
+ aponyx list workflows
91
+ aponyx list workflows --signal spread_momentum
92
+ aponyx list workflows --product cdx_ig_5y --strategy balanced
93
+ """
94
+ # Validate that filters only apply to workflows
95
+ if item_type != "workflows" and (signal or product or strategy):
96
+ click.echo(
97
+ "Error: --signal, --product, and --strategy filters only apply to 'workflows'",
98
+ err=True,
99
+ )
100
+ raise click.Abort()
101
+
102
+ if item_type == "signals":
103
+ registry = SignalRegistry(SIGNAL_CATALOG_PATH)
104
+ signals = registry.list_all()
105
+
106
+ for signal_name, metadata in signals.items():
107
+ click.echo(f"{signal_name:<25} {metadata.description}")
108
+
109
+ elif item_type == "products":
110
+ # Products are the tradeable instruments (securities with CDX instrument type)
111
+ with open(BLOOMBERG_SECURITIES_PATH, "r", encoding="utf-8") as f:
112
+ securities = json.load(f)
113
+
114
+ products = {
115
+ name: info
116
+ for name, info in securities.items()
117
+ if info.get("instrument_type") == "cdx"
118
+ }
119
+
120
+ for product_name, info in products.items():
121
+ desc = info.get("description", "No description")
122
+ click.echo(f"{product_name:<20} {desc}")
123
+
124
+ elif item_type == "indicators":
125
+ registry = IndicatorTransformationRegistry(INDICATOR_TRANSFORMATION_PATH)
126
+ indicators = registry.list_all()
127
+
128
+ for indicator_name, metadata in indicators.items():
129
+ click.echo(f"{indicator_name:<30} {metadata.description}")
130
+
131
+ elif item_type == "score-transformations":
132
+ registry = ScoreTransformationRegistry(SCORE_TRANSFORMATION_PATH)
133
+ transformations = registry.list_all()
134
+
135
+ for transform_name, metadata in transformations.items():
136
+ click.echo(f"{transform_name:<25} {metadata.description}")
137
+
138
+ elif item_type == "signal-transformations":
139
+ registry = SignalTransformationRegistry(SIGNAL_TRANSFORMATION_PATH)
140
+ transformations = registry.list_all()
141
+
142
+ for transform_name, metadata in transformations.items():
143
+ click.echo(f"{transform_name:<25} {metadata.description}")
144
+
145
+ elif item_type == "securities":
146
+ # All securities (CDX, ETF, VIX, etc.)
147
+ with open(BLOOMBERG_SECURITIES_PATH, "r", encoding="utf-8") as f:
148
+ securities = json.load(f)
149
+
150
+ for security_name, info in securities.items():
151
+ desc = info.get("description", "No description")
152
+ instrument_type = info.get("instrument_type", "unknown")
153
+ click.echo(f"{security_name:<20} {instrument_type:<10} {desc}")
154
+
155
+ elif item_type == "datasets":
156
+ registry = DataRegistry(REGISTRY_PATH, DATA_DIR)
157
+ datasets = registry.list_datasets()
158
+
159
+ for dataset in datasets:
160
+ info = registry.get_dataset_info(dataset)
161
+ # Try to get security from params, fall back to instrument type
162
+ params = info.get("metadata", {}).get("params", {})
163
+ instrument = params.get("security") or info.get("instrument", "unknown")
164
+ # Extract source from metadata
165
+ source = info.get("metadata", {}).get("provider", "unknown")
166
+ click.echo(f"{dataset:<40} {instrument:<20} {source}")
167
+
168
+ elif item_type == "strategies":
169
+ registry = StrategyRegistry(STRATEGY_CATALOG_PATH)
170
+ strategies = registry.list_all()
171
+
172
+ for strategy_name, metadata in strategies.items():
173
+ click.echo(f"{strategy_name:<20} {metadata.description}")
174
+
175
+ elif item_type == "steps":
176
+ # Display canonical workflow step order with descriptions
177
+ step_registry = StepRegistry()
178
+ steps = step_registry.get_canonical_order()
179
+
180
+ click.echo("Workflow steps (canonical order):\n")
181
+ for i, step_name in enumerate(steps, 1):
182
+ # Get description from step class docstring
183
+ descriptions = {
184
+ "data": "Load/fetch market data from registry or sources",
185
+ "signal": "Compute signal values from market data",
186
+ "suitability": "Evaluate signal-product suitability",
187
+ "backtest": "Run strategy backtest with risk tracking",
188
+ "performance": "Compute extended performance metrics",
189
+ "visualization": "Generate interactive charts",
190
+ }
191
+ desc = descriptions.get(step_name, "No description available")
192
+ click.echo(f"{i}. {step_name:<15} {desc}")
193
+
194
+ elif item_type == "workflows":
195
+ from datetime import datetime
196
+
197
+ if not DATA_WORKFLOWS_DIR.exists():
198
+ click.echo("No workflows found")
199
+ return
200
+
201
+ # Collect all workflow metadata
202
+ workflows = []
203
+ for workflow_dir in DATA_WORKFLOWS_DIR.iterdir():
204
+ if not workflow_dir.is_dir():
205
+ continue
206
+
207
+ metadata_path = workflow_dir / "metadata.json"
208
+ if not metadata_path.exists():
209
+ continue
210
+
211
+ try:
212
+ with open(metadata_path, "r", encoding="utf-8") as f:
213
+ metadata = json.load(f)
214
+
215
+ # Skip workflows without label (old format)
216
+ if "label" not in metadata:
217
+ continue
218
+
219
+ workflows.append(
220
+ {
221
+ "dir": workflow_dir,
222
+ "label": metadata.get("label", "unknown"),
223
+ "signal": metadata.get("signal", "unknown"),
224
+ "strategy": metadata.get("strategy", "unknown"),
225
+ "product": metadata.get("product", "unknown"),
226
+ "status": metadata.get("status", "unknown"),
227
+ "timestamp": metadata.get("timestamp", ""),
228
+ }
229
+ )
230
+ except Exception as e:
231
+ logger.debug("Failed to load metadata from %s: %s", workflow_dir, e)
232
+ continue
233
+
234
+ if not workflows:
235
+ click.echo("No workflows found")
236
+ return
237
+
238
+ # Apply filters
239
+ if signal:
240
+ workflows = [w for w in workflows if w["signal"] == signal]
241
+ if product:
242
+ workflows = [w for w in workflows if w["product"] == product]
243
+ if strategy:
244
+ workflows = [w for w in workflows if w["strategy"] == strategy]
245
+
246
+ if not workflows:
247
+ click.echo("No workflows match the specified filters")
248
+ return
249
+
250
+ # Sort by timestamp descending (newest first)
251
+ workflows.sort(key=lambda w: w["timestamp"], reverse=True)
252
+
253
+ # Apply limit only if no filters active
254
+ has_filters = bool(signal or product or strategy)
255
+ if not has_filters and len(workflows) > 50:
256
+ workflows_to_show = workflows[:50]
257
+ click.echo(
258
+ f"Showing 50 most recent workflows (of {len(workflows)} total). Use filters to narrow results.\n"
259
+ )
260
+ else:
261
+ workflows_to_show = workflows
262
+
263
+ # Display header
264
+ click.echo(
265
+ f"{'IDX':<5} {'LABEL':<25} {'SIGNAL':<20} {'STRATEGY':<15} {'PRODUCT':<15} {'STATUS':<10} {'TIMESTAMP':<20}"
266
+ )
267
+ click.echo("-" * 115)
268
+
269
+ # Display workflows
270
+ for idx, workflow in enumerate(workflows_to_show):
271
+ # Parse timestamp for display
272
+ try:
273
+ ts = datetime.fromisoformat(workflow["timestamp"])
274
+ ts_str = ts.strftime("%Y-%m-%d %H:%M:%S")
275
+ except Exception:
276
+ ts_str = (
277
+ workflow["timestamp"][:19] if workflow["timestamp"] else "unknown"
278
+ )
279
+
280
+ click.echo(
281
+ f"{idx:<5} "
282
+ f"{workflow['label'][:24]:<25} "
283
+ f"{workflow['signal'][:19]:<20} "
284
+ f"{workflow['strategy'][:14]:<15} "
285
+ f"{workflow['product'][:14]:<15} "
286
+ f"{workflow['status']:<10} "
287
+ f"{ts_str}"
288
+ )
289
+
290
+ # Show summary
291
+ if has_filters:
292
+ click.echo(
293
+ f"\nShowing {len(workflows_to_show)} workflow(s) matching filters"
294
+ )
295
+ else:
296
+ click.echo(f"\nShowing {len(workflows_to_show)} workflow(s)")
297
+
298
+ # Note about indices
299
+ click.echo(
300
+ "\nNote: Indices are ephemeral and change as new workflows are added."
301
+ )
302
+ click.echo("Use workflow label for stable references in report command.")