odibi 2.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. odibi/__init__.py +32 -0
  2. odibi/__main__.py +8 -0
  3. odibi/catalog.py +3011 -0
  4. odibi/cli/__init__.py +11 -0
  5. odibi/cli/__main__.py +6 -0
  6. odibi/cli/catalog.py +553 -0
  7. odibi/cli/deploy.py +69 -0
  8. odibi/cli/doctor.py +161 -0
  9. odibi/cli/export.py +66 -0
  10. odibi/cli/graph.py +150 -0
  11. odibi/cli/init_pipeline.py +242 -0
  12. odibi/cli/lineage.py +259 -0
  13. odibi/cli/main.py +215 -0
  14. odibi/cli/run.py +98 -0
  15. odibi/cli/schema.py +208 -0
  16. odibi/cli/secrets.py +232 -0
  17. odibi/cli/story.py +379 -0
  18. odibi/cli/system.py +132 -0
  19. odibi/cli/test.py +286 -0
  20. odibi/cli/ui.py +31 -0
  21. odibi/cli/validate.py +39 -0
  22. odibi/config.py +3541 -0
  23. odibi/connections/__init__.py +9 -0
  24. odibi/connections/azure_adls.py +499 -0
  25. odibi/connections/azure_sql.py +709 -0
  26. odibi/connections/base.py +28 -0
  27. odibi/connections/factory.py +322 -0
  28. odibi/connections/http.py +78 -0
  29. odibi/connections/local.py +119 -0
  30. odibi/connections/local_dbfs.py +61 -0
  31. odibi/constants.py +17 -0
  32. odibi/context.py +528 -0
  33. odibi/diagnostics/__init__.py +12 -0
  34. odibi/diagnostics/delta.py +520 -0
  35. odibi/diagnostics/diff.py +169 -0
  36. odibi/diagnostics/manager.py +171 -0
  37. odibi/engine/__init__.py +20 -0
  38. odibi/engine/base.py +334 -0
  39. odibi/engine/pandas_engine.py +2178 -0
  40. odibi/engine/polars_engine.py +1114 -0
  41. odibi/engine/registry.py +54 -0
  42. odibi/engine/spark_engine.py +2362 -0
  43. odibi/enums.py +7 -0
  44. odibi/exceptions.py +297 -0
  45. odibi/graph.py +426 -0
  46. odibi/introspect.py +1214 -0
  47. odibi/lineage.py +511 -0
  48. odibi/node.py +3341 -0
  49. odibi/orchestration/__init__.py +0 -0
  50. odibi/orchestration/airflow.py +90 -0
  51. odibi/orchestration/dagster.py +77 -0
  52. odibi/patterns/__init__.py +24 -0
  53. odibi/patterns/aggregation.py +599 -0
  54. odibi/patterns/base.py +94 -0
  55. odibi/patterns/date_dimension.py +423 -0
  56. odibi/patterns/dimension.py +696 -0
  57. odibi/patterns/fact.py +748 -0
  58. odibi/patterns/merge.py +128 -0
  59. odibi/patterns/scd2.py +148 -0
  60. odibi/pipeline.py +2382 -0
  61. odibi/plugins.py +80 -0
  62. odibi/project.py +581 -0
  63. odibi/references.py +151 -0
  64. odibi/registry.py +246 -0
  65. odibi/semantics/__init__.py +71 -0
  66. odibi/semantics/materialize.py +392 -0
  67. odibi/semantics/metrics.py +361 -0
  68. odibi/semantics/query.py +743 -0
  69. odibi/semantics/runner.py +430 -0
  70. odibi/semantics/story.py +507 -0
  71. odibi/semantics/views.py +432 -0
  72. odibi/state/__init__.py +1203 -0
  73. odibi/story/__init__.py +55 -0
  74. odibi/story/doc_story.py +554 -0
  75. odibi/story/generator.py +1431 -0
  76. odibi/story/lineage.py +1043 -0
  77. odibi/story/lineage_utils.py +324 -0
  78. odibi/story/metadata.py +608 -0
  79. odibi/story/renderers.py +453 -0
  80. odibi/story/templates/run_story.html +2520 -0
  81. odibi/story/themes.py +216 -0
  82. odibi/testing/__init__.py +13 -0
  83. odibi/testing/assertions.py +75 -0
  84. odibi/testing/fixtures.py +85 -0
  85. odibi/testing/source_pool.py +277 -0
  86. odibi/transformers/__init__.py +122 -0
  87. odibi/transformers/advanced.py +1472 -0
  88. odibi/transformers/delete_detection.py +610 -0
  89. odibi/transformers/manufacturing.py +1029 -0
  90. odibi/transformers/merge_transformer.py +778 -0
  91. odibi/transformers/relational.py +675 -0
  92. odibi/transformers/scd.py +579 -0
  93. odibi/transformers/sql_core.py +1356 -0
  94. odibi/transformers/validation.py +165 -0
  95. odibi/ui/__init__.py +0 -0
  96. odibi/ui/app.py +195 -0
  97. odibi/utils/__init__.py +66 -0
  98. odibi/utils/alerting.py +667 -0
  99. odibi/utils/config_loader.py +343 -0
  100. odibi/utils/console.py +231 -0
  101. odibi/utils/content_hash.py +202 -0
  102. odibi/utils/duration.py +43 -0
  103. odibi/utils/encoding.py +102 -0
  104. odibi/utils/extensions.py +28 -0
  105. odibi/utils/hashing.py +61 -0
  106. odibi/utils/logging.py +203 -0
  107. odibi/utils/logging_context.py +740 -0
  108. odibi/utils/progress.py +429 -0
  109. odibi/utils/setup_helpers.py +302 -0
  110. odibi/utils/telemetry.py +140 -0
  111. odibi/validation/__init__.py +62 -0
  112. odibi/validation/engine.py +765 -0
  113. odibi/validation/explanation_linter.py +155 -0
  114. odibi/validation/fk.py +547 -0
  115. odibi/validation/gate.py +252 -0
  116. odibi/validation/quarantine.py +605 -0
  117. odibi/writers/__init__.py +15 -0
  118. odibi/writers/sql_server_writer.py +2081 -0
  119. odibi-2.5.0.dist-info/METADATA +255 -0
  120. odibi-2.5.0.dist-info/RECORD +124 -0
  121. odibi-2.5.0.dist-info/WHEEL +5 -0
  122. odibi-2.5.0.dist-info/entry_points.txt +2 -0
  123. odibi-2.5.0.dist-info/licenses/LICENSE +190 -0
  124. odibi-2.5.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,343 @@
1
+ import os
2
+ import re
3
+ from typing import Any, Dict
4
+
5
+ import yaml
6
+
7
+ from odibi.utils.logging import logger
8
+
9
+ # Pattern to match ${VAR} or ${env:VAR}
10
+ # Captures the variable name in group 1
11
+ ENV_PATTERN = re.compile(r"\$\{(?:env:)?([A-Za-z0-9_]+)\}")
12
+
13
+
14
+ def _normalize_pattern_to_transformer(data: Dict[str, Any]) -> None:
15
+ """Normalize 'pattern:' block to 'transformer:' + 'params:' fields.
16
+
17
+ The documentation uses `pattern: type: X` syntax, but the node executor
18
+ reads `transformer:` and `params:`. This function converts the user-friendly
19
+ syntax to the internal representation.
20
+
21
+ Example:
22
+ pattern:
23
+ type: dimension
24
+ params:
25
+ natural_key: customer_id
26
+
27
+ Becomes:
28
+ transformer: dimension
29
+ params:
30
+ natural_key: customer_id
31
+ """
32
+ pipelines = data.get("pipelines", [])
33
+ for pipeline in pipelines:
34
+ if isinstance(pipeline, dict):
35
+ nodes = pipeline.get("nodes", [])
36
+ for node in nodes:
37
+ if isinstance(node, dict) and "pattern" in node:
38
+ pattern_block = node.pop("pattern")
39
+ if isinstance(pattern_block, dict):
40
+ pattern_type = pattern_block.get("type")
41
+ pattern_params = pattern_block.get("params", {})
42
+ if pattern_type:
43
+ node["transformer"] = pattern_type
44
+ if pattern_params:
45
+ existing_params = node.get("params", {})
46
+ node["params"] = {**existing_params, **pattern_params}
47
+
48
+
49
+ def _tag_nodes_with_source(data: Dict[str, Any], source_path: str) -> None:
50
+ """Tag all nodes in pipelines with their source YAML file path.
51
+
52
+ This enables sql_file resolution to work correctly when pipelines are imported
53
+ from different directories.
54
+ """
55
+ pipelines = data.get("pipelines", [])
56
+ for pipeline in pipelines:
57
+ if isinstance(pipeline, dict):
58
+ nodes = pipeline.get("nodes", [])
59
+ for node in nodes:
60
+ if isinstance(node, dict) and "_source_yaml" not in node:
61
+ node["_source_yaml"] = source_path
62
+
63
+
64
+ def _merge_semantic_config(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
65
+ """Merge semantic layer configs by appending lists.
66
+
67
+ Semantic configs have list fields (metrics, dimensions, views, materializations)
68
+ that should be accumulated from multiple imports.
69
+ """
70
+ result = base.copy()
71
+ list_keys = ["metrics", "dimensions", "views", "materializations"]
72
+
73
+ for key, value in override.items():
74
+ if key in list_keys and isinstance(value, list):
75
+ if key in result and isinstance(result[key], list):
76
+ logger.debug(
77
+ "Appending semantic list",
78
+ key=key,
79
+ existing_count=len(result[key]),
80
+ new_count=len(value),
81
+ )
82
+ result[key] = result[key] + value
83
+ else:
84
+ result[key] = value
85
+ elif isinstance(value, dict) and key in result and isinstance(result[key], dict):
86
+ result[key] = _merge_semantic_config(result[key], value)
87
+ else:
88
+ result[key] = value
89
+
90
+ return result
91
+
92
+
93
+ def _deep_merge(base: Dict[str, Any], override: Dict[str, Any]) -> Dict[str, Any]:
94
+ """Merge override dictionary into base dictionary.
95
+
96
+ Rules:
97
+ 1. Dicts are merged recursively.
98
+ 2. List 'pipelines' are appended.
99
+ 3. Semantic config lists (metrics, dimensions, views, materializations) are appended.
100
+ 4. Other types (and other lists) are overwritten by the override.
101
+ """
102
+ result = base.copy()
103
+ for key, value in override.items():
104
+ if key in result and isinstance(result[key], dict) and isinstance(value, dict):
105
+ if key == "semantic":
106
+ logger.debug(
107
+ "Merging semantic config",
108
+ base_keys=list(result[key].keys()) if result[key] else [],
109
+ override_keys=list(value.keys()),
110
+ )
111
+ result[key] = _merge_semantic_config(result[key], value)
112
+ else:
113
+ logger.debug(
114
+ "Deep merging nested dictionary",
115
+ key=key,
116
+ base_keys=list(result[key].keys()),
117
+ override_keys=list(value.keys()),
118
+ )
119
+ result[key] = _deep_merge(result[key], value)
120
+ elif key == "pipelines" and isinstance(value, list) and isinstance(result.get(key), list):
121
+ logger.debug(
122
+ "Appending pipelines list",
123
+ existing_count=len(result[key]),
124
+ new_count=len(value),
125
+ )
126
+ result[key] = result[key] + value
127
+ else:
128
+ if key in result:
129
+ logger.debug("Overwriting key during merge", key=key)
130
+ result[key] = value
131
+ return result
132
+
133
+
134
+ def load_yaml_with_env(path: str, env: str = None) -> Dict[str, Any]:
135
+ """Load YAML file with environment variable substitution and imports.
136
+
137
+ Supports:
138
+ - ${VAR_NAME} substitution
139
+ - 'imports' list of relative paths
140
+ - 'environments' overrides based on env param
141
+
142
+ Args:
143
+ path: Path to YAML file
144
+ env: Environment name (e.g., 'prod', 'dev') to apply overrides
145
+
146
+ Returns:
147
+ Parsed dictionary (merged with imports and env overrides)
148
+
149
+ Raises:
150
+ FileNotFoundError: If file does not exist
151
+ ValueError: If environment variable is missing
152
+ yaml.YAMLError: If YAML parsing fails
153
+ """
154
+ logger.debug("Loading YAML configuration", path=path, env=env)
155
+
156
+ if not os.path.exists(path):
157
+ logger.error("Configuration file not found", path=path)
158
+ raise FileNotFoundError(f"YAML file not found: {path}")
159
+
160
+ # Get absolute path for relative import resolution
161
+ abs_path = os.path.abspath(path)
162
+ base_dir = os.path.dirname(abs_path)
163
+
164
+ logger.debug("Reading configuration file", absolute_path=abs_path)
165
+
166
+ with open(abs_path, "r", encoding="utf-8") as f:
167
+ content = f.read()
168
+
169
+ # Debug: Log first 100 chars to detect encoding/BOM issues
170
+ logger.debug(
171
+ "File content loaded",
172
+ file_size=len(content),
173
+ first_100_repr=repr(content[:100]),
174
+ )
175
+
176
+ env_vars_found = []
177
+
178
+ def replace_env(match):
179
+ var_name = match.group(1)
180
+ env_vars_found.append(var_name)
181
+ value = os.environ.get(var_name)
182
+ if value is None:
183
+ logger.error(
184
+ "Missing required environment variable",
185
+ variable=var_name,
186
+ file=abs_path,
187
+ )
188
+ raise ValueError(f"Missing environment variable: {var_name}")
189
+ # Check for problematic characters that could break YAML
190
+ # Note: Colons are NOT checked because URLs (https://) are common and safe
191
+ # when the value is substituted into a quoted YAML string
192
+ if any(c in value for c in ["\n", "\r", "#"]):
193
+ logger.warning(
194
+ "Environment variable contains YAML-sensitive characters",
195
+ variable=var_name,
196
+ has_newline="\n" in value or "\r" in value,
197
+ has_hash="#" in value,
198
+ )
199
+ logger.debug("Environment variable substituted", variable=var_name, length=len(value))
200
+ return value
201
+
202
+ # Substitute variables
203
+ substituted_content = ENV_PATTERN.sub(replace_env, content)
204
+
205
+ if env_vars_found:
206
+ logger.debug(
207
+ "Environment variable substitution complete",
208
+ variables_substituted=env_vars_found,
209
+ count=len(env_vars_found),
210
+ )
211
+
212
+ # Parse YAML
213
+ logger.debug("Parsing YAML content", path=abs_path)
214
+ try:
215
+ data = yaml.safe_load(substituted_content) or {}
216
+ except yaml.YAMLError as e:
217
+ logger.error("YAML parsing failed", path=abs_path, error=str(e))
218
+ raise
219
+
220
+ logger.debug("YAML parsed successfully", top_level_keys=list(data.keys()))
221
+
222
+ # Normalize pattern: blocks to transformer: + params: (user-friendly -> internal)
223
+ _normalize_pattern_to_transformer(data)
224
+
225
+ # Tag all nodes in this file with their source YAML path (for sql_file resolution)
226
+ _tag_nodes_with_source(data, abs_path)
227
+
228
+ # Handle imports
229
+ imports = data.pop("imports", [])
230
+ if imports:
231
+ if isinstance(imports, str):
232
+ imports = [imports]
233
+
234
+ logger.debug("Processing imports", import_count=len(imports), imports=imports)
235
+
236
+ # Start with current data as the base
237
+ merged_data = data.copy()
238
+
239
+ for import_path in imports:
240
+ # Resolve relative to current file
241
+ if not os.path.isabs(import_path):
242
+ full_import_path = os.path.join(base_dir, import_path)
243
+ else:
244
+ full_import_path = import_path
245
+
246
+ logger.debug(
247
+ "Resolving import",
248
+ import_path=import_path,
249
+ resolved_path=full_import_path,
250
+ )
251
+
252
+ if not os.path.exists(full_import_path):
253
+ logger.error(
254
+ "Imported configuration file not found",
255
+ import_path=import_path,
256
+ resolved_path=full_import_path,
257
+ parent_file=abs_path,
258
+ )
259
+ raise FileNotFoundError(f"Imported YAML file not found: {full_import_path}")
260
+
261
+ # Recursive load
262
+ # Note: We pass env down to imported files too
263
+ logger.debug("Loading imported configuration", path=full_import_path)
264
+ try:
265
+ imported_data = load_yaml_with_env(full_import_path, env=env)
266
+ except Exception as e:
267
+ logger.error(
268
+ "Failed to load imported configuration",
269
+ import_path=import_path,
270
+ resolved_path=full_import_path,
271
+ parent_file=abs_path,
272
+ error=str(e),
273
+ )
274
+ raise ValueError(
275
+ f"Failed to load import '{import_path}' (resolved: {full_import_path}): {e}"
276
+ ) from e
277
+
278
+ # Merge imported data INTO the current data
279
+ # This way, the main file acts as the "master" that accumulates imports
280
+ logger.debug(
281
+ "Merging imported configuration",
282
+ import_path=full_import_path,
283
+ imported_keys=list(imported_data.keys()),
284
+ )
285
+ merged_data = _deep_merge(merged_data, imported_data)
286
+
287
+ data = merged_data
288
+ logger.debug("All imports processed and merged", import_count=len(imports))
289
+
290
+ # Apply Environment Overrides from "environments" block in main file
291
+ if env:
292
+ environments = data.get("environments", {})
293
+ if env in environments:
294
+ logger.debug(
295
+ "Applying environment overrides from environments block",
296
+ env=env,
297
+ override_keys=list(environments[env].keys()),
298
+ )
299
+ override = environments[env]
300
+ data = _deep_merge(data, override)
301
+ else:
302
+ logger.debug(
303
+ "No environment override found in environments block",
304
+ env=env,
305
+ available_environments=list(environments.keys()),
306
+ )
307
+
308
+ # Apply Environment Overrides from external env.{env}.yaml file
309
+ if env:
310
+ env_file_name = f"env.{env}.yaml"
311
+ env_file_path = os.path.join(base_dir, env_file_name)
312
+ if os.path.exists(env_file_path):
313
+ logger.debug(
314
+ "Loading external environment override file",
315
+ env=env,
316
+ env_file=env_file_path,
317
+ )
318
+ # Load the env file (recursively, so it can have imports too)
319
+ # We pass env=None to avoid infinite recursion if it somehow references itself,
320
+ # though strictly it shouldn't matter as we look for env.{env}.yaml based on the passed env.
321
+ # But logically, an env specific file shouldn't load other env specific files for the same env.
322
+ env_data = load_yaml_with_env(env_file_path, env=None)
323
+ logger.debug(
324
+ "Merging external environment overrides",
325
+ env_file=env_file_path,
326
+ override_keys=list(env_data.keys()),
327
+ )
328
+ data = _deep_merge(data, env_data)
329
+ else:
330
+ logger.debug(
331
+ "No external environment override file found",
332
+ env=env,
333
+ expected_path=env_file_path,
334
+ )
335
+
336
+ logger.debug(
337
+ "Configuration loading complete",
338
+ path=path,
339
+ env=env,
340
+ final_keys=list(data.keys()),
341
+ )
342
+
343
+ return data
odibi/utils/console.py ADDED
@@ -0,0 +1,231 @@
1
+ """Rich console utilities for polished terminal/notebook output.
2
+
3
+ This module provides Rich-based console output with graceful fallback
4
+ when Rich is not installed. Works in both CLI and Jupyter/Databricks notebooks.
5
+ """
6
+
7
+ from typing import Any, Dict, List, Optional, Union
8
+
9
+ _RICH_AVAILABLE: Optional[bool] = None
10
+
11
+
12
+ def is_rich_available() -> bool:
13
+ """Check if Rich library is available.
14
+
15
+ Returns:
16
+ True if Rich is installed and importable, False otherwise.
17
+ """
18
+ global _RICH_AVAILABLE
19
+ if _RICH_AVAILABLE is None:
20
+ try:
21
+ import importlib.util
22
+
23
+ _RICH_AVAILABLE = importlib.util.find_spec("rich") is not None
24
+ except (ImportError, ModuleNotFoundError):
25
+ _RICH_AVAILABLE = False
26
+ return _RICH_AVAILABLE
27
+
28
+
29
+ def get_console() -> Optional[Any]:
30
+ """Get a Rich Console instance.
31
+
32
+ Returns:
33
+ Rich Console instance if available, None otherwise.
34
+ """
35
+ if not is_rich_available():
36
+ return None
37
+
38
+ from rich.console import Console
39
+
40
+ return Console()
41
+
42
+
43
+ def _is_notebook_environment() -> bool:
44
+ """Detect if running in Jupyter/Databricks notebook."""
45
+ try:
46
+ from IPython import get_ipython
47
+
48
+ shell = get_ipython()
49
+ if shell is None:
50
+ return False
51
+ shell_class = shell.__class__.__name__
52
+ return shell_class in ("ZMQInteractiveShell", "DatabricksShell", "Shell")
53
+ except (ImportError, NameError):
54
+ return False
55
+
56
+
57
+ def success(message: str, prefix: str = "✓") -> None:
58
+ """Print a success message in green.
59
+
60
+ Args:
61
+ message: The message to display.
62
+ prefix: Prefix symbol (default: ✓).
63
+ """
64
+ if is_rich_available():
65
+ console = get_console()
66
+ console.print(f"[green]{prefix}[/green] {message}")
67
+ else:
68
+ print(f"{prefix} {message}")
69
+
70
+
71
+ def error(message: str, prefix: str = "✗") -> None:
72
+ """Print an error message in red.
73
+
74
+ Args:
75
+ message: The message to display.
76
+ prefix: Prefix symbol (default: ✗).
77
+ """
78
+ if is_rich_available():
79
+ console = get_console()
80
+ console.print(f"[red]{prefix}[/red] {message}")
81
+ else:
82
+ print(f"{prefix} {message}")
83
+
84
+
85
+ def warning(message: str, prefix: str = "⚠") -> None:
86
+ """Print a warning message in yellow.
87
+
88
+ Args:
89
+ message: The message to display.
90
+ prefix: Prefix symbol (default: ⚠).
91
+ """
92
+ if is_rich_available():
93
+ console = get_console()
94
+ console.print(f"[yellow]{prefix}[/yellow] {message}")
95
+ else:
96
+ print(f"{prefix} {message}")
97
+
98
+
99
+ def info(message: str, prefix: str = "ℹ") -> None:
100
+ """Print an info message in blue.
101
+
102
+ Args:
103
+ message: The message to display.
104
+ prefix: Prefix symbol (default: ℹ).
105
+ """
106
+ if is_rich_available():
107
+ console = get_console()
108
+ console.print(f"[blue]{prefix}[/blue] {message}")
109
+ else:
110
+ print(f"{prefix} {message}")
111
+
112
+
113
+ def print_table(
114
+ data: Union[List[Dict[str, Any]], Any],
115
+ title: Optional[str] = None,
116
+ columns: Optional[List[str]] = None,
117
+ ) -> None:
118
+ """Render data as a Rich table or plain text fallback.
119
+
120
+ Args:
121
+ data: List of dicts or a DataFrame to display.
122
+ title: Optional table title.
123
+ columns: Optional list of column names to include.
124
+ """
125
+ if not data:
126
+ print("(empty)")
127
+ return
128
+
129
+ rows: List[Dict[str, Any]] = []
130
+ if hasattr(data, "to_dict"):
131
+ rows = data.to_dict("records")
132
+ elif isinstance(data, list):
133
+ rows = data
134
+ else:
135
+ print(str(data))
136
+ return
137
+
138
+ if not rows:
139
+ print("(empty)")
140
+ return
141
+
142
+ col_names = columns or list(rows[0].keys())
143
+
144
+ if is_rich_available():
145
+ from rich.table import Table
146
+
147
+ console = get_console()
148
+ table = Table(title=title, show_header=True, header_style="bold cyan")
149
+
150
+ for col in col_names:
151
+ table.add_column(col)
152
+
153
+ for row in rows:
154
+ table.add_row(*[str(row.get(col, "")) for col in col_names])
155
+
156
+ console.print(table)
157
+ else:
158
+ if title:
159
+ print(f"\n{title}")
160
+ print("-" * len(title))
161
+
162
+ max_widths = {col: len(col) for col in col_names}
163
+ for row in rows:
164
+ for col in col_names:
165
+ max_widths[col] = max(max_widths[col], len(str(row.get(col, ""))))
166
+
167
+ header = " | ".join(col.ljust(max_widths[col]) for col in col_names)
168
+ print(header)
169
+ print("-" * len(header))
170
+
171
+ for row in rows:
172
+ line = " | ".join(str(row.get(col, "")).ljust(max_widths[col]) for col in col_names)
173
+ print(line)
174
+
175
+
176
+ def print_panel(
177
+ content: str,
178
+ title: Optional[str] = None,
179
+ border_style: str = "blue",
180
+ padding: tuple = (0, 1),
181
+ ) -> None:
182
+ """Display content in a boxed panel.
183
+
184
+ Args:
185
+ content: The content to display.
186
+ title: Optional panel title.
187
+ border_style: Border color/style (default: blue).
188
+ padding: Tuple of (vertical, horizontal) padding.
189
+ """
190
+ if is_rich_available():
191
+ from rich.panel import Panel
192
+
193
+ console = get_console()
194
+ panel = Panel(content, title=title, border_style=border_style, padding=padding)
195
+ console.print(panel)
196
+ else:
197
+ width = max(len(line) for line in content.split("\n")) + 4
198
+ if title:
199
+ width = max(width, len(title) + 6)
200
+
201
+ border = "─" * width
202
+ print(f"┌{border}┐")
203
+ if title:
204
+ centered_title = f" {title} ".center(width)
205
+ print(f"│{centered_title}│")
206
+ print(f"├{border}┤")
207
+
208
+ for line in content.split("\n"):
209
+ padded = f" {line} ".ljust(width)
210
+ print(f"│{padded}│")
211
+
212
+ print(f"└{border}┘")
213
+
214
+
215
+ def print_rule(title: Optional[str] = None, style: str = "blue") -> None:
216
+ """Print a horizontal rule/divider.
217
+
218
+ Args:
219
+ title: Optional centered title in the rule.
220
+ style: Line color/style.
221
+ """
222
+ if is_rich_available():
223
+ from rich.rule import Rule
224
+
225
+ console = get_console()
226
+ console.print(Rule(title, style=style))
227
+ else:
228
+ if title:
229
+ print(f"\n{'─' * 10} {title} {'─' * 10}\n")
230
+ else:
231
+ print("─" * 40)