churnkit 0.75.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (302) hide show
  1. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/00_start_here.ipynb +647 -0
  2. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/01_data_discovery.ipynb +1165 -0
  3. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/01a_a_temporal_text_deep_dive.ipynb +961 -0
  4. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/01a_temporal_deep_dive.ipynb +1690 -0
  5. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/01b_temporal_quality.ipynb +679 -0
  6. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/01c_temporal_patterns.ipynb +3305 -0
  7. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/01d_event_aggregation.ipynb +1463 -0
  8. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/02_column_deep_dive.ipynb +1430 -0
  9. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/02a_text_columns_deep_dive.ipynb +854 -0
  10. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/03_quality_assessment.ipynb +1639 -0
  11. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/04_relationship_analysis.ipynb +1890 -0
  12. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/05_multi_dataset.ipynb +1457 -0
  13. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/06_feature_opportunities.ipynb +1624 -0
  14. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/07_modeling_readiness.ipynb +780 -0
  15. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/08_baseline_experiments.ipynb +979 -0
  16. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/09_business_alignment.ipynb +572 -0
  17. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/10_spec_generation.ipynb +1179 -0
  18. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/11_scoring_validation.ipynb +1418 -0
  19. churnkit-0.75.0a1.data/data/share/churnkit/exploration_notebooks/12_view_documentation.ipynb +151 -0
  20. churnkit-0.75.0a1.dist-info/METADATA +229 -0
  21. churnkit-0.75.0a1.dist-info/RECORD +302 -0
  22. churnkit-0.75.0a1.dist-info/WHEEL +4 -0
  23. churnkit-0.75.0a1.dist-info/entry_points.txt +2 -0
  24. churnkit-0.75.0a1.dist-info/licenses/LICENSE +202 -0
  25. customer_retention/__init__.py +37 -0
  26. customer_retention/analysis/__init__.py +0 -0
  27. customer_retention/analysis/auto_explorer/__init__.py +62 -0
  28. customer_retention/analysis/auto_explorer/exploration_manager.py +470 -0
  29. customer_retention/analysis/auto_explorer/explorer.py +258 -0
  30. customer_retention/analysis/auto_explorer/findings.py +291 -0
  31. customer_retention/analysis/auto_explorer/layered_recommendations.py +485 -0
  32. customer_retention/analysis/auto_explorer/recommendation_builder.py +148 -0
  33. customer_retention/analysis/auto_explorer/recommendations.py +418 -0
  34. customer_retention/analysis/business/__init__.py +26 -0
  35. customer_retention/analysis/business/ab_test_designer.py +144 -0
  36. customer_retention/analysis/business/fairness_analyzer.py +166 -0
  37. customer_retention/analysis/business/intervention_matcher.py +121 -0
  38. customer_retention/analysis/business/report_generator.py +222 -0
  39. customer_retention/analysis/business/risk_profile.py +199 -0
  40. customer_retention/analysis/business/roi_analyzer.py +139 -0
  41. customer_retention/analysis/diagnostics/__init__.py +20 -0
  42. customer_retention/analysis/diagnostics/calibration_analyzer.py +133 -0
  43. customer_retention/analysis/diagnostics/cv_analyzer.py +144 -0
  44. customer_retention/analysis/diagnostics/error_analyzer.py +107 -0
  45. customer_retention/analysis/diagnostics/leakage_detector.py +394 -0
  46. customer_retention/analysis/diagnostics/noise_tester.py +140 -0
  47. customer_retention/analysis/diagnostics/overfitting_analyzer.py +190 -0
  48. customer_retention/analysis/diagnostics/segment_analyzer.py +122 -0
  49. customer_retention/analysis/discovery/__init__.py +8 -0
  50. customer_retention/analysis/discovery/config_generator.py +49 -0
  51. customer_retention/analysis/discovery/discovery_flow.py +19 -0
  52. customer_retention/analysis/discovery/type_inferencer.py +147 -0
  53. customer_retention/analysis/interpretability/__init__.py +13 -0
  54. customer_retention/analysis/interpretability/cohort_analyzer.py +185 -0
  55. customer_retention/analysis/interpretability/counterfactual.py +175 -0
  56. customer_retention/analysis/interpretability/individual_explainer.py +141 -0
  57. customer_retention/analysis/interpretability/pdp_generator.py +103 -0
  58. customer_retention/analysis/interpretability/shap_explainer.py +106 -0
  59. customer_retention/analysis/jupyter_save_hook.py +28 -0
  60. customer_retention/analysis/notebook_html_exporter.py +136 -0
  61. customer_retention/analysis/notebook_progress.py +60 -0
  62. customer_retention/analysis/plotly_preprocessor.py +154 -0
  63. customer_retention/analysis/recommendations/__init__.py +54 -0
  64. customer_retention/analysis/recommendations/base.py +158 -0
  65. customer_retention/analysis/recommendations/cleaning/__init__.py +11 -0
  66. customer_retention/analysis/recommendations/cleaning/consistency.py +107 -0
  67. customer_retention/analysis/recommendations/cleaning/deduplicate.py +94 -0
  68. customer_retention/analysis/recommendations/cleaning/impute.py +67 -0
  69. customer_retention/analysis/recommendations/cleaning/outlier.py +71 -0
  70. customer_retention/analysis/recommendations/datetime/__init__.py +3 -0
  71. customer_retention/analysis/recommendations/datetime/extract.py +149 -0
  72. customer_retention/analysis/recommendations/encoding/__init__.py +3 -0
  73. customer_retention/analysis/recommendations/encoding/categorical.py +114 -0
  74. customer_retention/analysis/recommendations/pipeline.py +74 -0
  75. customer_retention/analysis/recommendations/registry.py +76 -0
  76. customer_retention/analysis/recommendations/selection/__init__.py +3 -0
  77. customer_retention/analysis/recommendations/selection/drop_column.py +56 -0
  78. customer_retention/analysis/recommendations/transform/__init__.py +4 -0
  79. customer_retention/analysis/recommendations/transform/power.py +94 -0
  80. customer_retention/analysis/recommendations/transform/scale.py +112 -0
  81. customer_retention/analysis/visualization/__init__.py +15 -0
  82. customer_retention/analysis/visualization/chart_builder.py +2619 -0
  83. customer_retention/analysis/visualization/console.py +122 -0
  84. customer_retention/analysis/visualization/display.py +171 -0
  85. customer_retention/analysis/visualization/number_formatter.py +36 -0
  86. customer_retention/artifacts/__init__.py +3 -0
  87. customer_retention/artifacts/fit_artifact_registry.py +146 -0
  88. customer_retention/cli.py +93 -0
  89. customer_retention/core/__init__.py +0 -0
  90. customer_retention/core/compat/__init__.py +193 -0
  91. customer_retention/core/compat/detection.py +99 -0
  92. customer_retention/core/compat/ops.py +48 -0
  93. customer_retention/core/compat/pandas_backend.py +57 -0
  94. customer_retention/core/compat/spark_backend.py +75 -0
  95. customer_retention/core/components/__init__.py +11 -0
  96. customer_retention/core/components/base.py +79 -0
  97. customer_retention/core/components/components/__init__.py +13 -0
  98. customer_retention/core/components/components/deployer.py +26 -0
  99. customer_retention/core/components/components/explainer.py +26 -0
  100. customer_retention/core/components/components/feature_eng.py +33 -0
  101. customer_retention/core/components/components/ingester.py +34 -0
  102. customer_retention/core/components/components/profiler.py +34 -0
  103. customer_retention/core/components/components/trainer.py +38 -0
  104. customer_retention/core/components/components/transformer.py +36 -0
  105. customer_retention/core/components/components/validator.py +37 -0
  106. customer_retention/core/components/enums.py +33 -0
  107. customer_retention/core/components/orchestrator.py +94 -0
  108. customer_retention/core/components/registry.py +59 -0
  109. customer_retention/core/config/__init__.py +39 -0
  110. customer_retention/core/config/column_config.py +95 -0
  111. customer_retention/core/config/experiments.py +71 -0
  112. customer_retention/core/config/pipeline_config.py +117 -0
  113. customer_retention/core/config/source_config.py +83 -0
  114. customer_retention/core/utils/__init__.py +28 -0
  115. customer_retention/core/utils/leakage.py +85 -0
  116. customer_retention/core/utils/severity.py +53 -0
  117. customer_retention/core/utils/statistics.py +90 -0
  118. customer_retention/generators/__init__.py +0 -0
  119. customer_retention/generators/notebook_generator/__init__.py +167 -0
  120. customer_retention/generators/notebook_generator/base.py +55 -0
  121. customer_retention/generators/notebook_generator/cell_builder.py +49 -0
  122. customer_retention/generators/notebook_generator/config.py +47 -0
  123. customer_retention/generators/notebook_generator/databricks_generator.py +48 -0
  124. customer_retention/generators/notebook_generator/local_generator.py +48 -0
  125. customer_retention/generators/notebook_generator/project_init.py +174 -0
  126. customer_retention/generators/notebook_generator/runner.py +150 -0
  127. customer_retention/generators/notebook_generator/script_generator.py +110 -0
  128. customer_retention/generators/notebook_generator/stages/__init__.py +19 -0
  129. customer_retention/generators/notebook_generator/stages/base_stage.py +86 -0
  130. customer_retention/generators/notebook_generator/stages/s01_ingestion.py +100 -0
  131. customer_retention/generators/notebook_generator/stages/s02_profiling.py +95 -0
  132. customer_retention/generators/notebook_generator/stages/s03_cleaning.py +180 -0
  133. customer_retention/generators/notebook_generator/stages/s04_transformation.py +165 -0
  134. customer_retention/generators/notebook_generator/stages/s05_feature_engineering.py +115 -0
  135. customer_retention/generators/notebook_generator/stages/s06_feature_selection.py +97 -0
  136. customer_retention/generators/notebook_generator/stages/s07_model_training.py +176 -0
  137. customer_retention/generators/notebook_generator/stages/s08_deployment.py +81 -0
  138. customer_retention/generators/notebook_generator/stages/s09_monitoring.py +112 -0
  139. customer_retention/generators/notebook_generator/stages/s10_batch_inference.py +642 -0
  140. customer_retention/generators/notebook_generator/stages/s11_feature_store.py +348 -0
  141. customer_retention/generators/orchestration/__init__.py +23 -0
  142. customer_retention/generators/orchestration/code_generator.py +196 -0
  143. customer_retention/generators/orchestration/context.py +147 -0
  144. customer_retention/generators/orchestration/data_materializer.py +188 -0
  145. customer_retention/generators/orchestration/databricks_exporter.py +411 -0
  146. customer_retention/generators/orchestration/doc_generator.py +311 -0
  147. customer_retention/generators/pipeline_generator/__init__.py +26 -0
  148. customer_retention/generators/pipeline_generator/findings_parser.py +727 -0
  149. customer_retention/generators/pipeline_generator/generator.py +142 -0
  150. customer_retention/generators/pipeline_generator/models.py +166 -0
  151. customer_retention/generators/pipeline_generator/renderer.py +2125 -0
  152. customer_retention/generators/spec_generator/__init__.py +37 -0
  153. customer_retention/generators/spec_generator/databricks_generator.py +433 -0
  154. customer_retention/generators/spec_generator/generic_generator.py +373 -0
  155. customer_retention/generators/spec_generator/mlflow_pipeline_generator.py +685 -0
  156. customer_retention/generators/spec_generator/pipeline_spec.py +298 -0
  157. customer_retention/integrations/__init__.py +0 -0
  158. customer_retention/integrations/adapters/__init__.py +13 -0
  159. customer_retention/integrations/adapters/base.py +10 -0
  160. customer_retention/integrations/adapters/factory.py +25 -0
  161. customer_retention/integrations/adapters/feature_store/__init__.py +6 -0
  162. customer_retention/integrations/adapters/feature_store/base.py +57 -0
  163. customer_retention/integrations/adapters/feature_store/databricks.py +94 -0
  164. customer_retention/integrations/adapters/feature_store/feast_adapter.py +97 -0
  165. customer_retention/integrations/adapters/feature_store/local.py +75 -0
  166. customer_retention/integrations/adapters/mlflow/__init__.py +6 -0
  167. customer_retention/integrations/adapters/mlflow/base.py +32 -0
  168. customer_retention/integrations/adapters/mlflow/databricks.py +54 -0
  169. customer_retention/integrations/adapters/mlflow/experiment_tracker.py +161 -0
  170. customer_retention/integrations/adapters/mlflow/local.py +50 -0
  171. customer_retention/integrations/adapters/storage/__init__.py +5 -0
  172. customer_retention/integrations/adapters/storage/base.py +33 -0
  173. customer_retention/integrations/adapters/storage/databricks.py +76 -0
  174. customer_retention/integrations/adapters/storage/local.py +59 -0
  175. customer_retention/integrations/feature_store/__init__.py +47 -0
  176. customer_retention/integrations/feature_store/definitions.py +215 -0
  177. customer_retention/integrations/feature_store/manager.py +744 -0
  178. customer_retention/integrations/feature_store/registry.py +412 -0
  179. customer_retention/integrations/iteration/__init__.py +28 -0
  180. customer_retention/integrations/iteration/context.py +212 -0
  181. customer_retention/integrations/iteration/feedback_collector.py +184 -0
  182. customer_retention/integrations/iteration/orchestrator.py +168 -0
  183. customer_retention/integrations/iteration/recommendation_tracker.py +341 -0
  184. customer_retention/integrations/iteration/signals.py +212 -0
  185. customer_retention/integrations/llm_context/__init__.py +4 -0
  186. customer_retention/integrations/llm_context/context_builder.py +201 -0
  187. customer_retention/integrations/llm_context/prompts.py +100 -0
  188. customer_retention/integrations/streaming/__init__.py +103 -0
  189. customer_retention/integrations/streaming/batch_integration.py +149 -0
  190. customer_retention/integrations/streaming/early_warning_model.py +227 -0
  191. customer_retention/integrations/streaming/event_schema.py +214 -0
  192. customer_retention/integrations/streaming/online_store_writer.py +249 -0
  193. customer_retention/integrations/streaming/realtime_scorer.py +261 -0
  194. customer_retention/integrations/streaming/trigger_engine.py +293 -0
  195. customer_retention/integrations/streaming/window_aggregator.py +393 -0
  196. customer_retention/stages/__init__.py +0 -0
  197. customer_retention/stages/cleaning/__init__.py +9 -0
  198. customer_retention/stages/cleaning/base.py +28 -0
  199. customer_retention/stages/cleaning/missing_handler.py +160 -0
  200. customer_retention/stages/cleaning/outlier_handler.py +204 -0
  201. customer_retention/stages/deployment/__init__.py +28 -0
  202. customer_retention/stages/deployment/batch_scorer.py +106 -0
  203. customer_retention/stages/deployment/champion_challenger.py +299 -0
  204. customer_retention/stages/deployment/model_registry.py +182 -0
  205. customer_retention/stages/deployment/retraining_trigger.py +245 -0
  206. customer_retention/stages/features/__init__.py +73 -0
  207. customer_retention/stages/features/behavioral_features.py +266 -0
  208. customer_retention/stages/features/customer_segmentation.py +505 -0
  209. customer_retention/stages/features/feature_definitions.py +265 -0
  210. customer_retention/stages/features/feature_engineer.py +551 -0
  211. customer_retention/stages/features/feature_manifest.py +340 -0
  212. customer_retention/stages/features/feature_selector.py +239 -0
  213. customer_retention/stages/features/interaction_features.py +160 -0
  214. customer_retention/stages/features/temporal_features.py +243 -0
  215. customer_retention/stages/ingestion/__init__.py +9 -0
  216. customer_retention/stages/ingestion/load_result.py +32 -0
  217. customer_retention/stages/ingestion/loaders.py +195 -0
  218. customer_retention/stages/ingestion/source_registry.py +130 -0
  219. customer_retention/stages/modeling/__init__.py +31 -0
  220. customer_retention/stages/modeling/baseline_trainer.py +139 -0
  221. customer_retention/stages/modeling/cross_validator.py +125 -0
  222. customer_retention/stages/modeling/data_splitter.py +205 -0
  223. customer_retention/stages/modeling/feature_scaler.py +99 -0
  224. customer_retention/stages/modeling/hyperparameter_tuner.py +107 -0
  225. customer_retention/stages/modeling/imbalance_handler.py +282 -0
  226. customer_retention/stages/modeling/mlflow_logger.py +95 -0
  227. customer_retention/stages/modeling/model_comparator.py +149 -0
  228. customer_retention/stages/modeling/model_evaluator.py +138 -0
  229. customer_retention/stages/modeling/threshold_optimizer.py +131 -0
  230. customer_retention/stages/monitoring/__init__.py +37 -0
  231. customer_retention/stages/monitoring/alert_manager.py +328 -0
  232. customer_retention/stages/monitoring/drift_detector.py +201 -0
  233. customer_retention/stages/monitoring/performance_monitor.py +242 -0
  234. customer_retention/stages/preprocessing/__init__.py +5 -0
  235. customer_retention/stages/preprocessing/transformer_manager.py +284 -0
  236. customer_retention/stages/profiling/__init__.py +256 -0
  237. customer_retention/stages/profiling/categorical_distribution.py +269 -0
  238. customer_retention/stages/profiling/categorical_target_analyzer.py +274 -0
  239. customer_retention/stages/profiling/column_profiler.py +527 -0
  240. customer_retention/stages/profiling/distribution_analysis.py +483 -0
  241. customer_retention/stages/profiling/drift_detector.py +310 -0
  242. customer_retention/stages/profiling/feature_capacity.py +507 -0
  243. customer_retention/stages/profiling/pattern_analysis_config.py +513 -0
  244. customer_retention/stages/profiling/profile_result.py +212 -0
  245. customer_retention/stages/profiling/quality_checks.py +1632 -0
  246. customer_retention/stages/profiling/relationship_detector.py +256 -0
  247. customer_retention/stages/profiling/relationship_recommender.py +454 -0
  248. customer_retention/stages/profiling/report_generator.py +520 -0
  249. customer_retention/stages/profiling/scd_analyzer.py +151 -0
  250. customer_retention/stages/profiling/segment_analyzer.py +632 -0
  251. customer_retention/stages/profiling/segment_aware_outlier.py +265 -0
  252. customer_retention/stages/profiling/target_level_analyzer.py +217 -0
  253. customer_retention/stages/profiling/temporal_analyzer.py +388 -0
  254. customer_retention/stages/profiling/temporal_coverage.py +488 -0
  255. customer_retention/stages/profiling/temporal_feature_analyzer.py +692 -0
  256. customer_retention/stages/profiling/temporal_feature_engineer.py +703 -0
  257. customer_retention/stages/profiling/temporal_pattern_analyzer.py +636 -0
  258. customer_retention/stages/profiling/temporal_quality_checks.py +278 -0
  259. customer_retention/stages/profiling/temporal_target_analyzer.py +241 -0
  260. customer_retention/stages/profiling/text_embedder.py +87 -0
  261. customer_retention/stages/profiling/text_processor.py +115 -0
  262. customer_retention/stages/profiling/text_reducer.py +60 -0
  263. customer_retention/stages/profiling/time_series_profiler.py +303 -0
  264. customer_retention/stages/profiling/time_window_aggregator.py +376 -0
  265. customer_retention/stages/profiling/type_detector.py +382 -0
  266. customer_retention/stages/profiling/window_recommendation.py +288 -0
  267. customer_retention/stages/temporal/__init__.py +166 -0
  268. customer_retention/stages/temporal/access_guard.py +180 -0
  269. customer_retention/stages/temporal/cutoff_analyzer.py +235 -0
  270. customer_retention/stages/temporal/data_preparer.py +178 -0
  271. customer_retention/stages/temporal/point_in_time_join.py +134 -0
  272. customer_retention/stages/temporal/point_in_time_registry.py +148 -0
  273. customer_retention/stages/temporal/scenario_detector.py +163 -0
  274. customer_retention/stages/temporal/snapshot_manager.py +259 -0
  275. customer_retention/stages/temporal/synthetic_coordinator.py +66 -0
  276. customer_retention/stages/temporal/timestamp_discovery.py +531 -0
  277. customer_retention/stages/temporal/timestamp_manager.py +255 -0
  278. customer_retention/stages/transformation/__init__.py +13 -0
  279. customer_retention/stages/transformation/binary_handler.py +85 -0
  280. customer_retention/stages/transformation/categorical_encoder.py +245 -0
  281. customer_retention/stages/transformation/datetime_transformer.py +97 -0
  282. customer_retention/stages/transformation/numeric_transformer.py +181 -0
  283. customer_retention/stages/transformation/pipeline.py +257 -0
  284. customer_retention/stages/validation/__init__.py +60 -0
  285. customer_retention/stages/validation/adversarial_scoring_validator.py +205 -0
  286. customer_retention/stages/validation/business_sense_gate.py +173 -0
  287. customer_retention/stages/validation/data_quality_gate.py +235 -0
  288. customer_retention/stages/validation/data_validators.py +511 -0
  289. customer_retention/stages/validation/feature_quality_gate.py +183 -0
  290. customer_retention/stages/validation/gates.py +117 -0
  291. customer_retention/stages/validation/leakage_gate.py +352 -0
  292. customer_retention/stages/validation/model_validity_gate.py +213 -0
  293. customer_retention/stages/validation/pipeline_validation_runner.py +264 -0
  294. customer_retention/stages/validation/quality_scorer.py +544 -0
  295. customer_retention/stages/validation/rule_generator.py +57 -0
  296. customer_retention/stages/validation/scoring_pipeline_validator.py +446 -0
  297. customer_retention/stages/validation/timeseries_detector.py +769 -0
  298. customer_retention/transforms/__init__.py +47 -0
  299. customer_retention/transforms/artifact_store.py +50 -0
  300. customer_retention/transforms/executor.py +157 -0
  301. customer_retention/transforms/fitted.py +92 -0
  302. customer_retention/transforms/ops.py +148 -0
@@ -0,0 +1,411 @@
1
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional
2
+
3
+ from customer_retention.analysis.auto_explorer.layered_recommendations import (
4
+ LayeredRecommendation,
5
+ RecommendationRegistry,
6
+ )
7
+
8
+ if TYPE_CHECKING:
9
+ from customer_retention.analysis.auto_explorer.findings import ExplorationFindings
10
+
11
+
12
+ class DatabricksExporter:
13
+ CELL_SEPARATOR = "\n# COMMAND ----------\n"
14
+
15
+ def __init__(
16
+ self,
17
+ registry: RecommendationRegistry,
18
+ findings: Optional["ExplorationFindings"] = None,
19
+ catalog: str = "main",
20
+ schema: str = "default"
21
+ ):
22
+ self.registry = registry
23
+ self.findings = findings
24
+ self.catalog = catalog
25
+ self.schema = schema
26
+
27
+ def generate_notebook(self) -> str:
28
+ cells = [
29
+ self._header_cell(),
30
+ self._imports_cell(),
31
+ self._config_cell(),
32
+ self.generate_bronze_notebook(),
33
+ self.generate_silver_notebook(),
34
+ self.generate_gold_notebook(),
35
+ ]
36
+ return self.CELL_SEPARATOR.join(cells)
37
+
38
+ def generate_source_notebooks(self) -> Dict[str, str]:
39
+ notebooks = {}
40
+ for name, bronze in self.registry.sources.items():
41
+ notebooks[name] = self._generate_source_bronze_notebook(name, bronze)
42
+ return notebooks
43
+
44
+ def _generate_source_bronze_notebook(self, name: str, bronze) -> str:
45
+ lines = [
46
+ "# MAGIC %md",
47
+ f"# MAGIC ## Bronze Layer: {name}",
48
+ "",
49
+ "# COMMAND ----------",
50
+ "",
51
+ self._imports_cell(),
52
+ "",
53
+ "# COMMAND ----------",
54
+ "",
55
+ f"# Read from landing zone: {name}",
56
+ f'df_raw = spark.read.format("csv").option("header", "true").option("inferSchema", "true").load("{bronze.source_file}")',
57
+ "",
58
+ "# Apply cleaning transformations",
59
+ "df_bronze = df_raw",
60
+ ]
61
+ for rec in bronze.null_handling:
62
+ lines.extend(self._pyspark_null_handling(rec))
63
+ for rec in bronze.outlier_handling:
64
+ lines.extend(self._pyspark_outlier_handling(rec))
65
+ for rec in bronze.type_conversions:
66
+ lines.extend(self._pyspark_type_conversion(rec))
67
+ lines.extend([
68
+ "",
69
+ "# Write to bronze Delta table",
70
+ f'df_bronze.write.format("delta").mode("overwrite").saveAsTable("{self._source_table_path("bronze", name)}")',
71
+ "",
72
+ "display(df_bronze.limit(10))",
73
+ ])
74
+ return "\n".join(lines)
75
+
76
+ def generate_silver_merge_notebook(self) -> str:
77
+ lines = [
78
+ "# MAGIC %md",
79
+ "# MAGIC ## Silver Layer: Merge & Aggregations",
80
+ "",
81
+ "# COMMAND ----------",
82
+ "",
83
+ self._imports_cell(),
84
+ "",
85
+ "# COMMAND ----------",
86
+ "",
87
+ "# Read bronze tables",
88
+ ]
89
+ for name in self.registry.source_names:
90
+ lines.append(f'df_{name} = spark.table("{self._source_table_path("bronze", name)}")')
91
+ lines.append("")
92
+
93
+ if self.registry.silver and self.registry.silver.joins:
94
+ lines.append("# Merge sources")
95
+ for i, join_rec in enumerate(self.registry.silver.joins):
96
+ params = join_rec.parameters
97
+ left = params["left_source"]
98
+ right = params["right_source"]
99
+ keys = params["join_keys"]
100
+ join_type = params["join_type"]
101
+ if i == 0:
102
+ lines.append(f'df_merged = df_{left}.join(df_{right}, on={keys}, how="{join_type}")')
103
+ else:
104
+ if left == "_merged":
105
+ lines.append(f'df_merged = df_merged.join(df_{right}, on={keys}, how="{join_type}")')
106
+ else:
107
+ lines.append(f'df_merged = df_{left}.join(df_{right}, on={keys}, how="{join_type}")')
108
+ lines.append("")
109
+ lines.append("df_silver = df_merged")
110
+ else:
111
+ first_source = self.registry.source_names[0] if self.registry.source_names else "data"
112
+ lines.append(f"df_silver = df_{first_source}")
113
+
114
+ if self.registry.silver:
115
+ entity_col = self.registry.silver.entity_column
116
+ for rec in self.registry.silver.aggregations:
117
+ lines.extend(self._pyspark_aggregation(rec, entity_col))
118
+
119
+ lines.extend([
120
+ "",
121
+ "# Write to silver Delta table",
122
+ f'df_silver.write.format("delta").mode("overwrite").saveAsTable("{self._table_path("silver")}")',
123
+ "",
124
+ "display(df_silver.limit(10))",
125
+ ])
126
+ return "\n".join(lines)
127
+
128
+ def generate_gold_features_notebook(self) -> str:
129
+ lines = [
130
+ "# MAGIC %md",
131
+ "# MAGIC ## Gold Layer: Feature Engineering",
132
+ "",
133
+ "# COMMAND ----------",
134
+ "",
135
+ self._imports_cell(),
136
+ "",
137
+ "# COMMAND ----------",
138
+ "",
139
+ "# Read from silver",
140
+ f'df_silver = spark.table("{self._table_path("silver")}")',
141
+ "",
142
+ "# Apply feature transformations",
143
+ "df_gold = df_silver",
144
+ ]
145
+ if self.registry.gold:
146
+ for rec in self.registry.gold.encoding:
147
+ lines.extend(self._pyspark_encoding(rec))
148
+ for rec in self.registry.gold.scaling:
149
+ lines.extend(self._pyspark_scaling(rec))
150
+ for rec in self.registry.gold.transformations:
151
+ lines.extend(self._pyspark_transformation(rec))
152
+ lines.extend([
153
+ "",
154
+ "# Write to gold Delta table (ML-ready)",
155
+ f'df_gold.write.format("delta").mode("overwrite").saveAsTable("{self._table_path("gold")}")',
156
+ "",
157
+ "display(df_gold.limit(10))",
158
+ ])
159
+ return "\n".join(lines)
160
+
161
+ def export_notebook_structure(self) -> Dict[str, Any]:
162
+ structure = {
163
+ "bronze": {},
164
+ "silver": self.generate_silver_merge_notebook(),
165
+ "gold": self.generate_gold_features_notebook(),
166
+ }
167
+ for name, code in self.generate_source_notebooks().items():
168
+ structure["bronze"][name] = code
169
+ return structure
170
+
171
+ def generate_bronze_notebook(self) -> str:
172
+ lines = [
173
+ "# MAGIC %md",
174
+ "# MAGIC ## Bronze Layer: Data Cleaning",
175
+ "",
176
+ "# COMMAND ----------",
177
+ "",
178
+ "# Read from landing zone",
179
+ f'df_raw = spark.read.format("csv").option("header", "true").option("inferSchema", "true").load("{self._landing_path()}")',
180
+ "",
181
+ "# Apply cleaning transformations",
182
+ "df_bronze = df_raw",
183
+ ]
184
+ if self.registry.bronze:
185
+ for rec in self.registry.bronze.null_handling:
186
+ lines.extend(self._pyspark_null_handling(rec))
187
+ for rec in self.registry.bronze.outlier_handling:
188
+ lines.extend(self._pyspark_outlier_handling(rec))
189
+ for rec in self.registry.bronze.type_conversions:
190
+ lines.extend(self._pyspark_type_conversion(rec))
191
+ lines.extend([
192
+ "",
193
+ "# Write to bronze Delta table",
194
+ f'df_bronze.write.format("delta").mode("overwrite").saveAsTable("{self._table_path("bronze")}")',
195
+ "",
196
+ "display(df_bronze.limit(10))",
197
+ ])
198
+ return "\n".join(lines)
199
+
200
+ def generate_silver_notebook(self) -> str:
201
+ lines = [
202
+ "# MAGIC %md",
203
+ "# MAGIC ## Silver Layer: Joins & Aggregations",
204
+ "",
205
+ "# COMMAND ----------",
206
+ "",
207
+ "# Read from bronze",
208
+ f'df_bronze = spark.table("{self._table_path("bronze")}")',
209
+ "",
210
+ "# Apply aggregations",
211
+ "df_silver = df_bronze",
212
+ ]
213
+ if self.registry.silver:
214
+ entity_col = self.registry.silver.entity_column
215
+ for rec in self.registry.silver.aggregations:
216
+ lines.extend(self._pyspark_aggregation(rec, entity_col))
217
+ lines.extend([
218
+ "",
219
+ "# Write to silver Delta table",
220
+ f'df_silver.write.format("delta").mode("overwrite").saveAsTable("{self._table_path("silver")}")',
221
+ "",
222
+ "display(df_silver.limit(10))",
223
+ ])
224
+ return "\n".join(lines)
225
+
226
+ def generate_gold_notebook(self) -> str:
227
+ lines = [
228
+ "# MAGIC %md",
229
+ "# MAGIC ## Gold Layer: Feature Engineering",
230
+ "",
231
+ "# COMMAND ----------",
232
+ "",
233
+ "# Read from silver",
234
+ f'df_silver = spark.table("{self._table_path("silver")}")',
235
+ "",
236
+ "# Apply feature transformations",
237
+ "df_gold = df_silver",
238
+ ]
239
+ if self.registry.gold:
240
+ for rec in self.registry.gold.encoding:
241
+ lines.extend(self._pyspark_encoding(rec))
242
+ for rec in self.registry.gold.scaling:
243
+ lines.extend(self._pyspark_scaling(rec))
244
+ for rec in self.registry.gold.transformations:
245
+ lines.extend(self._pyspark_transformation(rec))
246
+ lines.extend([
247
+ "",
248
+ "# Write to gold Delta table (ML-ready)",
249
+ f'df_gold.write.format("delta").mode("overwrite").saveAsTable("{self._table_path("gold")}")',
250
+ "",
251
+ "display(df_gold.limit(10))",
252
+ ])
253
+ return "\n".join(lines)
254
+
255
+ def to_notebook_cells(self) -> List[Dict[str, str]]:
256
+ return [
257
+ {"content": self._header_cell(), "type": "markdown"},
258
+ {"content": self._imports_cell(), "type": "code"},
259
+ {"content": self._config_cell(), "type": "code"},
260
+ {"content": self.generate_bronze_notebook(), "type": "code"},
261
+ {"content": self.generate_silver_notebook(), "type": "code"},
262
+ {"content": self.generate_gold_notebook(), "type": "code"},
263
+ ]
264
+
265
+ def _header_cell(self) -> str:
266
+ source = self.findings.source_path if self.findings else "data"
267
+ return f"""# MAGIC %md
268
+ # MAGIC # Data Pipeline: {source}
269
+ # MAGIC
270
+ # MAGIC Auto-generated pipeline using medallion architecture.
271
+ # MAGIC
272
+ # MAGIC | Layer | Description |
273
+ # MAGIC |-------|-------------|
274
+ # MAGIC | Bronze | Cleaned raw data |
275
+ # MAGIC | Silver | Aggregated/joined data |
276
+ # MAGIC | Gold | ML-ready features |"""
277
+
278
+ def _imports_cell(self) -> str:
279
+ return """from pyspark.sql import functions as F
280
+ from pyspark.sql.window import Window
281
+ from pyspark.ml.feature import StringIndexer, OneHotEncoder, StandardScaler, VectorAssembler
282
+ from pyspark.ml import Pipeline"""
283
+
284
+ def _config_cell(self) -> str:
285
+ return f"""# Configuration
286
+ CATALOG = "{self.catalog}"
287
+ SCHEMA = "{self.schema}"
288
+ LANDING_PATH = "{self._landing_path()}"
289
+
290
+ # Set catalog context
291
+ spark.sql(f"USE CATALOG {{CATALOG}}")
292
+ spark.sql(f"USE SCHEMA {{SCHEMA}}")"""
293
+
294
+ def _landing_path(self) -> str:
295
+ if self.findings:
296
+ return self.findings.source_path
297
+ if self.registry.bronze:
298
+ return self.registry.bronze.source_file
299
+ return "/mnt/landing/data"
300
+
301
+ def _table_path(self, layer: str) -> str:
302
+ return f"{self.catalog}.{self.schema}.{layer}_customers"
303
+
304
+ def _source_table_path(self, layer: str, source_name: str) -> str:
305
+ return f"{self.catalog}.{self.schema}.{layer}_{source_name}"
306
+
307
+ def _pyspark_null_handling(self, rec: LayeredRecommendation) -> List[str]:
308
+ col = rec.target_column
309
+ strategy = rec.parameters.get("strategy", "median")
310
+ lines = ["", f"# {rec.rationale}"]
311
+ if strategy == "median":
312
+ lines.extend([
313
+ f"median_val = df_bronze.approxQuantile('{col}', [0.5], 0.01)[0]",
314
+ f"df_bronze = df_bronze.na.fill({{'{col}': median_val}})",
315
+ ])
316
+ elif strategy == "mean":
317
+ lines.extend([
318
+ f"mean_val = df_bronze.agg(F.mean('{col}')).collect()[0][0]",
319
+ f"df_bronze = df_bronze.na.fill({{'{col}': mean_val}})",
320
+ ])
321
+ elif strategy == "mode":
322
+ lines.extend([
323
+ f"mode_val = df_bronze.groupBy('{col}').count().orderBy(F.desc('count')).first()[0]",
324
+ f"df_bronze = df_bronze.na.fill({{'{col}': mode_val}})",
325
+ ])
326
+ else:
327
+ lines.append(f"df_bronze = df_bronze.na.fill({{'{col}': 0}})")
328
+ return lines
329
+
330
+ def _pyspark_outlier_handling(self, rec: LayeredRecommendation) -> List[str]:
331
+ col = rec.target_column
332
+ factor = rec.parameters.get("factor", 1.5)
333
+ return [
334
+ "",
335
+ f"# {rec.rationale}",
336
+ f"quantiles = df_bronze.approxQuantile('{col}', [0.25, 0.75], 0.01)",
337
+ "q1, q3 = quantiles[0], quantiles[1]",
338
+ "iqr = q3 - q1",
339
+ f"lower_bound = q1 - {factor} * iqr",
340
+ f"upper_bound = q3 + {factor} * iqr",
341
+ f"df_bronze = df_bronze.withColumn('{col}', F.when(F.col('{col}') < lower_bound, lower_bound)",
342
+ f" .when(F.col('{col}') > upper_bound, upper_bound)",
343
+ f" .otherwise(F.col('{col}')))",
344
+ ]
345
+
346
+ def _pyspark_type_conversion(self, rec: LayeredRecommendation) -> List[str]:
347
+ col = rec.target_column
348
+ target_type = rec.parameters.get("target_type", "string")
349
+ pyspark_type = {"datetime": "timestamp", "int": "integer", "float": "double"}.get(target_type, target_type)
350
+ return [
351
+ "",
352
+ f"# {rec.rationale}",
353
+ f"df_bronze = df_bronze.withColumn('{col}', F.col('{col}').cast('{pyspark_type}'))",
354
+ ]
355
+
356
+ def _pyspark_aggregation(self, rec: LayeredRecommendation, entity_col: str) -> List[str]:
357
+ col = rec.target_column
358
+ agg = rec.parameters.get("aggregation", "sum")
359
+ feature_name = f"{col}_{agg}"
360
+ window = f"Window.partitionBy('{entity_col}')"
361
+ agg_func = {"sum": "F.sum", "mean": "F.mean", "avg": "F.avg", "count": "F.count", "max": "F.max", "min": "F.min"}.get(agg, "F.sum")
362
+ return [
363
+ "",
364
+ f"# {rec.rationale}",
365
+ f"window_spec = {window}",
366
+ f"df_silver = df_silver.withColumn('{feature_name}', {agg_func}('{col}').over(window_spec))",
367
+ ]
368
+
369
+ def _pyspark_encoding(self, rec: LayeredRecommendation) -> List[str]:
370
+ col = rec.target_column
371
+ method = rec.parameters.get("method", "one_hot")
372
+ if method == "one_hot":
373
+ return [
374
+ "",
375
+ f"# {rec.rationale}",
376
+ f"indexer_{col} = StringIndexer(inputCol='{col}', outputCol='{col}_idx', handleInvalid='keep')",
377
+ f"encoder_{col} = OneHotEncoder(inputCol='{col}_idx', outputCol='{col}_onehot')",
378
+ f"pipeline_{col} = Pipeline(stages=[indexer_{col}, encoder_{col}])",
379
+ f"df_gold = pipeline_{col}.fit(df_gold).transform(df_gold)",
380
+ f"df_gold = df_gold.drop('{col}', '{col}_idx')",
381
+ ]
382
+ return ["", f"# {rec.rationale} - {method} encoding (implement as needed)"]
383
+
384
+ def _pyspark_scaling(self, rec: LayeredRecommendation) -> List[str]:
385
+ col = rec.target_column
386
+ return [
387
+ "",
388
+ f"# {rec.rationale}",
389
+ f"assembler_{col} = VectorAssembler(inputCols=['{col}'], outputCol='{col}_vec')",
390
+ f"scaler_{col} = StandardScaler(inputCol='{col}_vec', outputCol='{col}_scaled', withMean=True, withStd=True)",
391
+ f"df_gold = assembler_{col}.transform(df_gold)",
392
+ f"df_gold = scaler_{col}.fit(df_gold).transform(df_gold)",
393
+ f"df_gold = df_gold.drop('{col}', '{col}_vec')",
394
+ ]
395
+
396
+ def _pyspark_transformation(self, rec: LayeredRecommendation) -> List[str]:
397
+ col = rec.target_column
398
+ method = rec.parameters.get("method", "log")
399
+ if method == "log":
400
+ return [
401
+ "",
402
+ f"# {rec.rationale}",
403
+ f"df_gold = df_gold.withColumn('{col}', F.log1p(F.col('{col}')))",
404
+ ]
405
+ elif method == "sqrt":
406
+ return [
407
+ "",
408
+ f"# {rec.rationale}",
409
+ f"df_gold = df_gold.withColumn('{col}', F.sqrt(F.col('{col}')))",
410
+ ]
411
+ return []