odibi 2.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. odibi/__init__.py +32 -0
  2. odibi/__main__.py +8 -0
  3. odibi/catalog.py +3011 -0
  4. odibi/cli/__init__.py +11 -0
  5. odibi/cli/__main__.py +6 -0
  6. odibi/cli/catalog.py +553 -0
  7. odibi/cli/deploy.py +69 -0
  8. odibi/cli/doctor.py +161 -0
  9. odibi/cli/export.py +66 -0
  10. odibi/cli/graph.py +150 -0
  11. odibi/cli/init_pipeline.py +242 -0
  12. odibi/cli/lineage.py +259 -0
  13. odibi/cli/main.py +215 -0
  14. odibi/cli/run.py +98 -0
  15. odibi/cli/schema.py +208 -0
  16. odibi/cli/secrets.py +232 -0
  17. odibi/cli/story.py +379 -0
  18. odibi/cli/system.py +132 -0
  19. odibi/cli/test.py +286 -0
  20. odibi/cli/ui.py +31 -0
  21. odibi/cli/validate.py +39 -0
  22. odibi/config.py +3541 -0
  23. odibi/connections/__init__.py +9 -0
  24. odibi/connections/azure_adls.py +499 -0
  25. odibi/connections/azure_sql.py +709 -0
  26. odibi/connections/base.py +28 -0
  27. odibi/connections/factory.py +322 -0
  28. odibi/connections/http.py +78 -0
  29. odibi/connections/local.py +119 -0
  30. odibi/connections/local_dbfs.py +61 -0
  31. odibi/constants.py +17 -0
  32. odibi/context.py +528 -0
  33. odibi/diagnostics/__init__.py +12 -0
  34. odibi/diagnostics/delta.py +520 -0
  35. odibi/diagnostics/diff.py +169 -0
  36. odibi/diagnostics/manager.py +171 -0
  37. odibi/engine/__init__.py +20 -0
  38. odibi/engine/base.py +334 -0
  39. odibi/engine/pandas_engine.py +2178 -0
  40. odibi/engine/polars_engine.py +1114 -0
  41. odibi/engine/registry.py +54 -0
  42. odibi/engine/spark_engine.py +2362 -0
  43. odibi/enums.py +7 -0
  44. odibi/exceptions.py +297 -0
  45. odibi/graph.py +426 -0
  46. odibi/introspect.py +1214 -0
  47. odibi/lineage.py +511 -0
  48. odibi/node.py +3341 -0
  49. odibi/orchestration/__init__.py +0 -0
  50. odibi/orchestration/airflow.py +90 -0
  51. odibi/orchestration/dagster.py +77 -0
  52. odibi/patterns/__init__.py +24 -0
  53. odibi/patterns/aggregation.py +599 -0
  54. odibi/patterns/base.py +94 -0
  55. odibi/patterns/date_dimension.py +423 -0
  56. odibi/patterns/dimension.py +696 -0
  57. odibi/patterns/fact.py +748 -0
  58. odibi/patterns/merge.py +128 -0
  59. odibi/patterns/scd2.py +148 -0
  60. odibi/pipeline.py +2382 -0
  61. odibi/plugins.py +80 -0
  62. odibi/project.py +581 -0
  63. odibi/references.py +151 -0
  64. odibi/registry.py +246 -0
  65. odibi/semantics/__init__.py +71 -0
  66. odibi/semantics/materialize.py +392 -0
  67. odibi/semantics/metrics.py +361 -0
  68. odibi/semantics/query.py +743 -0
  69. odibi/semantics/runner.py +430 -0
  70. odibi/semantics/story.py +507 -0
  71. odibi/semantics/views.py +432 -0
  72. odibi/state/__init__.py +1203 -0
  73. odibi/story/__init__.py +55 -0
  74. odibi/story/doc_story.py +554 -0
  75. odibi/story/generator.py +1431 -0
  76. odibi/story/lineage.py +1043 -0
  77. odibi/story/lineage_utils.py +324 -0
  78. odibi/story/metadata.py +608 -0
  79. odibi/story/renderers.py +453 -0
  80. odibi/story/templates/run_story.html +2520 -0
  81. odibi/story/themes.py +216 -0
  82. odibi/testing/__init__.py +13 -0
  83. odibi/testing/assertions.py +75 -0
  84. odibi/testing/fixtures.py +85 -0
  85. odibi/testing/source_pool.py +277 -0
  86. odibi/transformers/__init__.py +122 -0
  87. odibi/transformers/advanced.py +1472 -0
  88. odibi/transformers/delete_detection.py +610 -0
  89. odibi/transformers/manufacturing.py +1029 -0
  90. odibi/transformers/merge_transformer.py +778 -0
  91. odibi/transformers/relational.py +675 -0
  92. odibi/transformers/scd.py +579 -0
  93. odibi/transformers/sql_core.py +1356 -0
  94. odibi/transformers/validation.py +165 -0
  95. odibi/ui/__init__.py +0 -0
  96. odibi/ui/app.py +195 -0
  97. odibi/utils/__init__.py +66 -0
  98. odibi/utils/alerting.py +667 -0
  99. odibi/utils/config_loader.py +343 -0
  100. odibi/utils/console.py +231 -0
  101. odibi/utils/content_hash.py +202 -0
  102. odibi/utils/duration.py +43 -0
  103. odibi/utils/encoding.py +102 -0
  104. odibi/utils/extensions.py +28 -0
  105. odibi/utils/hashing.py +61 -0
  106. odibi/utils/logging.py +203 -0
  107. odibi/utils/logging_context.py +740 -0
  108. odibi/utils/progress.py +429 -0
  109. odibi/utils/setup_helpers.py +302 -0
  110. odibi/utils/telemetry.py +140 -0
  111. odibi/validation/__init__.py +62 -0
  112. odibi/validation/engine.py +765 -0
  113. odibi/validation/explanation_linter.py +155 -0
  114. odibi/validation/fk.py +547 -0
  115. odibi/validation/gate.py +252 -0
  116. odibi/validation/quarantine.py +605 -0
  117. odibi/writers/__init__.py +15 -0
  118. odibi/writers/sql_server_writer.py +2081 -0
  119. odibi-2.5.0.dist-info/METADATA +255 -0
  120. odibi-2.5.0.dist-info/RECORD +124 -0
  121. odibi-2.5.0.dist-info/WHEEL +5 -0
  122. odibi-2.5.0.dist-info/entry_points.txt +2 -0
  123. odibi-2.5.0.dist-info/licenses/LICENSE +190 -0
  124. odibi-2.5.0.dist-info/top_level.txt +1 -0
odibi/cli/lineage.py ADDED
@@ -0,0 +1,259 @@
1
+ """CLI commands for cross-pipeline lineage tracking."""
2
+
3
+ import json
4
+ from typing import Dict, List, Optional
5
+
6
+ from odibi.config import load_config_from_file
7
+
8
+
9
+ def add_lineage_parser(subparsers) -> None:
10
+ """Add lineage-related subcommands to the CLI."""
11
+ lineage_parser = subparsers.add_parser("lineage", help="Cross-pipeline lineage commands")
12
+ lineage_subparsers = lineage_parser.add_subparsers(dest="lineage_command")
13
+
14
+ # odibi lineage upstream <table>
15
+ upstream_parser = lineage_subparsers.add_parser(
16
+ "upstream", help="Trace upstream sources of a table"
17
+ )
18
+ upstream_parser.add_argument("table", help="Table path (e.g., gold/customer_360)")
19
+ upstream_parser.add_argument("--config", help="Path to YAML config file")
20
+ upstream_parser.add_argument(
21
+ "--depth", type=int, default=3, help="Maximum depth to traverse (default: 3)"
22
+ )
23
+ upstream_parser.add_argument(
24
+ "--format",
25
+ choices=["tree", "json"],
26
+ default="tree",
27
+ help="Output format (default: tree)",
28
+ )
29
+
30
+ # odibi lineage downstream <table>
31
+ downstream_parser = lineage_subparsers.add_parser(
32
+ "downstream", help="Trace downstream consumers of a table"
33
+ )
34
+ downstream_parser.add_argument("table", help="Table path (e.g., bronze/customers_raw)")
35
+ downstream_parser.add_argument("--config", help="Path to YAML config file")
36
+ downstream_parser.add_argument(
37
+ "--depth", type=int, default=3, help="Maximum depth to traverse (default: 3)"
38
+ )
39
+ downstream_parser.add_argument(
40
+ "--format",
41
+ choices=["tree", "json"],
42
+ default="tree",
43
+ help="Output format (default: tree)",
44
+ )
45
+
46
+ # odibi lineage impact <table>
47
+ impact_parser = lineage_subparsers.add_parser(
48
+ "impact", help="Impact analysis for schema changes"
49
+ )
50
+ impact_parser.add_argument("table", help="Table path to analyze impact for")
51
+ impact_parser.add_argument("--config", help="Path to YAML config file")
52
+ impact_parser.add_argument(
53
+ "--depth", type=int, default=3, help="Maximum depth to traverse (default: 3)"
54
+ )
55
+
56
+
57
+ def lineage_command(args) -> int:
58
+ """Execute lineage commands."""
59
+ if not hasattr(args, "lineage_command") or not args.lineage_command:
60
+ print("Usage: odibi lineage <command> [options]")
61
+ print("Commands: upstream, downstream, impact")
62
+ return 1
63
+
64
+ if args.lineage_command == "upstream":
65
+ return _lineage_upstream(args)
66
+ elif args.lineage_command == "downstream":
67
+ return _lineage_downstream(args)
68
+ elif args.lineage_command == "impact":
69
+ return _lineage_impact(args)
70
+ else:
71
+ print(f"Unknown lineage command: {args.lineage_command}")
72
+ return 1
73
+
74
+
75
+ def _get_catalog_manager(config_path: Optional[str]):
76
+ """Get CatalogManager instance from config."""
77
+ if not config_path:
78
+ print("Error: --config is required")
79
+ return None
80
+
81
+ try:
82
+ project_config = load_config_from_file(config_path)
83
+
84
+ from odibi.catalog import CatalogManager
85
+ from odibi.engine import get_engine
86
+
87
+ engine = get_engine(project_config.engine)
88
+ system_conn = project_config.connections.get(project_config.system.connection)
89
+
90
+ if hasattr(system_conn, "base_path"):
91
+ base_path = f"{system_conn.base_path.rstrip('/')}/{project_config.system.path}"
92
+ else:
93
+ base_path = project_config.system.path
94
+
95
+ catalog = CatalogManager(
96
+ spark=None,
97
+ config=project_config.system,
98
+ base_path=base_path,
99
+ engine=engine,
100
+ connection=system_conn,
101
+ )
102
+
103
+ return catalog
104
+
105
+ except FileNotFoundError:
106
+ print(f"Error: Config file not found: {config_path}")
107
+ return None
108
+ except Exception as e:
109
+ print(f"Error loading config: {e}")
110
+ return None
111
+
112
+
113
+ def _build_tree(records: List[Dict], root: str, direction: str = "upstream") -> Dict:
114
+ """Build a tree structure from lineage records."""
115
+ tree = {"name": root, "children": []}
116
+
117
+ by_depth = {}
118
+ for record in records:
119
+ depth = record.get("depth", 0)
120
+ if depth not in by_depth:
121
+ by_depth[depth] = []
122
+ by_depth[depth].append(record)
123
+
124
+ if direction == "upstream":
125
+ depth_0_records = by_depth.get(0, [])
126
+ for record in depth_0_records:
127
+ source = record.get("source_table")
128
+ node_info = ""
129
+ if record.get("source_pipeline") and record.get("source_node"):
130
+ node_info = f" ({record['source_pipeline']}.{record['source_node']})"
131
+ child = {"name": f"{source}{node_info}", "children": []}
132
+ tree["children"].append(child)
133
+ else:
134
+ depth_0_records = by_depth.get(0, [])
135
+ for record in depth_0_records:
136
+ target = record.get("target_table")
137
+ node_info = ""
138
+ if record.get("target_pipeline") and record.get("target_node"):
139
+ node_info = f" ({record['target_pipeline']}.{record['target_node']})"
140
+ child = {"name": f"{target}{node_info}", "children": []}
141
+ tree["children"].append(child)
142
+
143
+ return tree
144
+
145
+
146
+ def _print_tree(node: Dict, prefix: str = "", is_last: bool = True, depth: int = 0) -> None:
147
+ """Print a tree structure in ASCII format."""
148
+ connector = "└── " if is_last else "├── "
149
+ if depth == 0:
150
+ print(node["name"])
151
+ else:
152
+ print(f"{prefix}{connector}{node['name']}")
153
+
154
+ children = node.get("children", [])
155
+ child_prefix = prefix + (" " if is_last else "│ ")
156
+ for i, child in enumerate(children):
157
+ is_child_last = i == len(children) - 1
158
+ _print_tree(child, child_prefix, is_child_last, depth + 1)
159
+
160
+
161
+ def _lineage_upstream(args) -> int:
162
+ """Trace upstream lineage for a table."""
163
+ catalog = _get_catalog_manager(args.config)
164
+ if not catalog:
165
+ return 1
166
+
167
+ upstream = catalog.get_upstream(args.table, depth=args.depth)
168
+
169
+ if not upstream:
170
+ print(f"No upstream lineage found for: {args.table}")
171
+ return 0
172
+
173
+ if args.format == "json":
174
+ print(json.dumps(upstream, indent=2, default=str))
175
+ return 0
176
+
177
+ print(f"\nUpstream Lineage: {args.table}")
178
+ print("=" * 60)
179
+
180
+ tree = _build_tree(upstream, args.table, direction="upstream")
181
+ _print_tree(tree)
182
+
183
+ print()
184
+ return 0
185
+
186
+
187
+ def _lineage_downstream(args) -> int:
188
+ """Trace downstream lineage for a table."""
189
+ catalog = _get_catalog_manager(args.config)
190
+ if not catalog:
191
+ return 1
192
+
193
+ downstream = catalog.get_downstream(args.table, depth=args.depth)
194
+
195
+ if not downstream:
196
+ print(f"No downstream lineage found for: {args.table}")
197
+ return 0
198
+
199
+ if args.format == "json":
200
+ print(json.dumps(downstream, indent=2, default=str))
201
+ return 0
202
+
203
+ print(f"\nDownstream Lineage: {args.table}")
204
+ print("=" * 60)
205
+
206
+ tree = _build_tree(downstream, args.table, direction="downstream")
207
+ _print_tree(tree)
208
+
209
+ print()
210
+ return 0
211
+
212
+
213
+ def _lineage_impact(args) -> int:
214
+ """Perform impact analysis for a table."""
215
+ catalog = _get_catalog_manager(args.config)
216
+ if not catalog:
217
+ return 1
218
+
219
+ downstream = catalog.get_downstream(args.table, depth=args.depth)
220
+
221
+ if not downstream:
222
+ print(f"No downstream dependencies found for: {args.table}")
223
+ return 0
224
+
225
+ affected_tables = set()
226
+ affected_pipelines = set()
227
+
228
+ for record in downstream:
229
+ target = record.get("target_table")
230
+ if target:
231
+ affected_tables.add(target)
232
+ pipeline = record.get("target_pipeline")
233
+ if pipeline:
234
+ affected_pipelines.add(pipeline)
235
+
236
+ print(f"\n⚠️ Impact Analysis: {args.table}")
237
+ print("=" * 60)
238
+ print(f"\nChanges to {args.table} would affect:")
239
+ print()
240
+
241
+ if affected_tables:
242
+ print(" Affected Tables:")
243
+ for table in sorted(affected_tables):
244
+ pipeline_info = ""
245
+ for record in downstream:
246
+ if record.get("target_table") == table:
247
+ if record.get("target_pipeline"):
248
+ pipeline_info = f" (pipeline: {record['target_pipeline']})"
249
+ break
250
+ print(f" - {table}{pipeline_info}")
251
+
252
+ print()
253
+ print(" Summary:")
254
+ print(
255
+ f" Total: {len(affected_tables)} downstream table(s) in {len(affected_pipelines)} pipeline(s)"
256
+ )
257
+ print()
258
+
259
+ return 0
odibi/cli/main.py ADDED
@@ -0,0 +1,215 @@
1
+ """Main CLI entry point."""
2
+
3
+ import argparse
4
+ import sys
5
+
6
+ from odibi.cli.catalog import add_catalog_parser, catalog_command
7
+ from odibi.cli.doctor import add_doctor_parser, doctor_command
8
+ from odibi.cli.export import add_export_parser, export_command
9
+ from odibi.cli.graph import graph_command
10
+ from odibi.cli.init_pipeline import add_init_parser, init_pipeline_command
11
+ from odibi.cli.lineage import add_lineage_parser, lineage_command
12
+ from odibi.cli.run import run_command
13
+ from odibi.cli.schema import add_schema_parser, schema_command
14
+ from odibi.cli.secrets import add_secrets_parser, secrets_command
15
+ from odibi.cli.story import add_story_parser, story_command
16
+ from odibi.cli.system import add_system_parser, system_command
17
+ from odibi.cli.test import test_command
18
+ from odibi.cli.ui import add_ui_parser, ui_command
19
+ from odibi.cli.validate import validate_command
20
+ from odibi.introspect import generate_docs
21
+ from odibi.utils.telemetry import setup_telemetry
22
+
23
+
24
+ def main():
25
+ """Main CLI entry point."""
26
+ # Configure telemetry early
27
+ setup_telemetry()
28
+
29
+ parser = argparse.ArgumentParser(
30
+ description="Odibi Data Pipeline Framework",
31
+ formatter_class=argparse.RawDescriptionHelpFormatter,
32
+ epilog="""
33
+ Examples:
34
+ odibi run config.yaml Run a pipeline
35
+ odibi validate config.yaml Validate configuration
36
+ odibi graph config.yaml Visualize dependencies
37
+ odibi story generate config.yaml Generate documentation
38
+ odibi story diff run1.json run2.json Compare two runs
39
+ odibi story list List story files
40
+ """,
41
+ )
42
+
43
+ # Global arguments
44
+ parser.add_argument(
45
+ "--log-level",
46
+ choices=["DEBUG", "INFO", "WARNING", "ERROR"],
47
+ default="INFO",
48
+ help="Set logging verbosity (default: INFO)",
49
+ )
50
+
51
+ subparsers = parser.add_subparsers(dest="command", help="Available commands")
52
+
53
+ # odibi run
54
+ run_parser = subparsers.add_parser("run", help="Execute pipeline")
55
+ run_parser.add_argument("config", help="Path to YAML config file")
56
+ run_parser.add_argument(
57
+ "--env", default=None, help="Environment to apply overrides (e.g., dev, qat, prod)"
58
+ )
59
+ run_parser.add_argument(
60
+ "--dry-run", action="store_true", help="Simulate execution without running operations"
61
+ )
62
+ run_parser.add_argument(
63
+ "--resume", action="store_true", help="Resume from last failure (skip successful nodes)"
64
+ )
65
+ run_parser.add_argument(
66
+ "--parallel", action="store_true", help="Run independent nodes in parallel"
67
+ )
68
+ run_parser.add_argument(
69
+ "--workers",
70
+ type=int,
71
+ default=4,
72
+ help="Number of worker threads for parallel execution (default: 4)",
73
+ )
74
+ run_parser.add_argument(
75
+ "--on-error",
76
+ choices=["fail_fast", "fail_later", "ignore"],
77
+ help="Override error handling strategy",
78
+ )
79
+ run_parser.add_argument(
80
+ "--tag",
81
+ help="Filter nodes by tag (e.g., --tag daily)",
82
+ )
83
+ run_parser.add_argument(
84
+ "--pipeline",
85
+ dest="pipeline_name",
86
+ help="Run specific pipeline by name",
87
+ )
88
+ run_parser.add_argument(
89
+ "--node",
90
+ dest="node_name",
91
+ help="Run specific node by name",
92
+ )
93
+
94
+ # odibi deploy
95
+ deploy_parser = subparsers.add_parser("deploy", help="Deploy definitions to System Catalog")
96
+ deploy_parser.add_argument("config", help="Path to YAML config file")
97
+ deploy_parser.add_argument(
98
+ "--env", default=None, help="Environment to apply overrides (e.g., dev, qat, prod)"
99
+ )
100
+
101
+ # odibi validate
102
+ validate_parser = subparsers.add_parser("validate", help="Validate config")
103
+ validate_parser.add_argument("config", help="Path to YAML config file")
104
+ validate_parser.add_argument(
105
+ "--env", default=None, help="Environment to apply overrides (e.g., dev, qat, prod)"
106
+ )
107
+
108
+ # odibi test
109
+ test_parser = subparsers.add_parser("test", help="Run unit tests for transformations")
110
+ test_parser.add_argument(
111
+ "path", nargs="?", default="tests", help="Path to tests directory or file (default: tests)"
112
+ )
113
+ test_parser.add_argument("--snapshot", action="store_true", help="Update snapshots for tests")
114
+
115
+ # odibi docs
116
+ subparsers.add_parser("docs", help="Generate API documentation")
117
+
118
+ # odibi graph
119
+ graph_parser = subparsers.add_parser("graph", help="Visualize dependency graph")
120
+ graph_parser.add_argument("config", help="Path to YAML config file")
121
+ graph_parser.add_argument("--pipeline", help="Pipeline name (optional)")
122
+ graph_parser.add_argument(
123
+ "--env", default=None, help="Environment to apply overrides (e.g., dev, qat, prod)"
124
+ )
125
+ graph_parser.add_argument(
126
+ "--format",
127
+ choices=["ascii", "dot", "mermaid"],
128
+ default="ascii",
129
+ help="Output format (default: ascii)",
130
+ )
131
+ graph_parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output")
132
+
133
+ # odibi story
134
+ add_story_parser(subparsers)
135
+
136
+ # odibi secrets
137
+ add_secrets_parser(subparsers)
138
+
139
+ # odibi init-pipeline (create/init)
140
+ add_init_parser(subparsers)
141
+
142
+ # odibi doctor
143
+ add_doctor_parser(subparsers)
144
+
145
+ # odibi ui
146
+ add_ui_parser(subparsers)
147
+
148
+ # odibi export
149
+ add_export_parser(subparsers)
150
+
151
+ # odibi catalog
152
+ add_catalog_parser(subparsers)
153
+
154
+ # odibi schema
155
+ add_schema_parser(subparsers)
156
+
157
+ # odibi lineage
158
+ add_lineage_parser(subparsers)
159
+
160
+ # odibi system
161
+ add_system_parser(subparsers)
162
+
163
+ args = parser.parse_args()
164
+
165
+ # Configure logging
166
+ import logging
167
+
168
+ logging.basicConfig(
169
+ level=getattr(logging, args.log_level),
170
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
171
+ datefmt="%Y-%m-%d %H:%M:%S",
172
+ )
173
+
174
+ if args.command == "run":
175
+ return run_command(args)
176
+ elif args.command == "deploy":
177
+ from odibi.cli.deploy import deploy_command
178
+
179
+ return deploy_command(args)
180
+ elif args.command == "docs":
181
+ generate_docs()
182
+ return 0
183
+ elif args.command == "validate":
184
+ return validate_command(args)
185
+ elif args.command == "test":
186
+ return test_command(args)
187
+ elif args.command == "graph":
188
+ return graph_command(args)
189
+ elif args.command == "story":
190
+ return story_command(args)
191
+ elif args.command == "secrets":
192
+ return secrets_command(args)
193
+ elif args.command in ["init-pipeline", "create", "init", "generate-project"]:
194
+ return init_pipeline_command(args)
195
+ elif args.command == "doctor":
196
+ return doctor_command(args)
197
+ elif args.command == "ui":
198
+ return ui_command(args)
199
+ elif args.command == "export":
200
+ return export_command(args)
201
+ elif args.command == "catalog":
202
+ return catalog_command(args)
203
+ elif args.command == "schema":
204
+ return schema_command(args)
205
+ elif args.command == "lineage":
206
+ return lineage_command(args)
207
+ elif args.command == "system":
208
+ return system_command(args)
209
+ else:
210
+ parser.print_help()
211
+ return 1
212
+
213
+
214
+ if __name__ == "__main__":
215
+ sys.exit(main())
odibi/cli/run.py ADDED
@@ -0,0 +1,98 @@
1
+ """Run command implementation."""
2
+
3
+ from pathlib import Path
4
+
5
+ from odibi.pipeline import PipelineManager
6
+ from odibi.utils.extensions import load_extensions
7
+ from odibi.utils.logging import logger
8
+
9
+
10
+ def run_command(args):
11
+ """Execute pipeline from config file."""
12
+ try:
13
+ config_path = Path(args.config).resolve()
14
+ project_root = config_path.parent
15
+
16
+ # Change CWD to config directory to resolve relative paths consistently
17
+ import os
18
+
19
+ original_cwd = os.getcwd()
20
+ os.chdir(project_root)
21
+ logger.debug(f"Changed working directory to: {project_root}")
22
+
23
+ try:
24
+ # Load extensions from config dir (which is now CWD)
25
+ load_extensions(project_root)
26
+
27
+ manager = PipelineManager.from_yaml(config_path.name, env=args.env)
28
+ results = manager.run(
29
+ pipelines=getattr(args, "pipeline_name", None),
30
+ dry_run=args.dry_run,
31
+ resume_from_failure=args.resume,
32
+ parallel=args.parallel,
33
+ max_workers=args.workers,
34
+ on_error=args.on_error,
35
+ tag=getattr(args, "tag", None),
36
+ node=getattr(args, "node_name", None),
37
+ )
38
+ finally:
39
+ # Restore CWD
40
+ os.chdir(original_cwd)
41
+
42
+ # Check results for failures
43
+ failed = False
44
+ if isinstance(results, dict):
45
+ # Multiple pipelines
46
+ for result in results.values():
47
+ if result.failed:
48
+ failed = True
49
+ logger.error(f"Pipeline '{result.pipeline_name}' failed")
50
+ for node_name in result.failed:
51
+ node_res = result.node_results.get(node_name)
52
+ if node_res and node_res.error:
53
+ logger.error(f"Node '{node_name}' error: {node_res.error}")
54
+
55
+ # Unbury Suggestions
56
+ error_obj = node_res.error
57
+ suggestions = getattr(error_obj, "suggestions", [])
58
+
59
+ if not suggestions and hasattr(error_obj, "original_error"):
60
+ suggestions = getattr(error_obj.original_error, "suggestions", [])
61
+
62
+ if suggestions:
63
+ logger.info("💡 Suggestions:")
64
+ for suggestion in suggestions:
65
+ logger.info(f" - {suggestion}")
66
+ break
67
+ else:
68
+ # Single pipeline
69
+ if results.failed:
70
+ failed = True
71
+ logger.error(f"Pipeline '{results.pipeline_name}' failed")
72
+ for node_name in results.failed:
73
+ node_res = results.node_results.get(node_name)
74
+ if node_res and node_res.error:
75
+ logger.error(f"Node '{node_name}' error: {node_res.error}")
76
+
77
+ # Unbury Suggestions
78
+ error_obj = node_res.error
79
+ suggestions = getattr(error_obj, "suggestions", [])
80
+
81
+ if not suggestions and hasattr(error_obj, "original_error"):
82
+ suggestions = getattr(error_obj.original_error, "suggestions", [])
83
+
84
+ if suggestions:
85
+ logger.info("Suggestions:")
86
+ for suggestion in suggestions:
87
+ logger.info(f" - {suggestion}")
88
+
89
+ if failed:
90
+ logger.error("Pipeline execution failed")
91
+ return 1
92
+ else:
93
+ logger.info("Pipeline completed successfully")
94
+ return 0
95
+
96
+ except Exception as e:
97
+ logger.error(f"Pipeline failed: {e}")
98
+ return 1