bidviz 1.0.0__py3-none-any.whl → 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bidviz/__init__.py +2 -0
- bidviz/polars/__init__.py +51 -0
- bidviz/polars/core/__init__.py +5 -0
- bidviz/polars/core/base.py +43 -0
- bidviz/polars/transformer.py +337 -0
- bidviz/polars/transformers/__init__.py +31 -0
- bidviz/polars/transformers/bar.py +68 -0
- bidviz/polars/transformers/heatmap.py +120 -0
- bidviz/polars/transformers/kpi.py +60 -0
- bidviz/polars/transformers/line.py +126 -0
- bidviz/polars/transformers/other.py +108 -0
- bidviz/polars/transformers/pie.py +57 -0
- bidviz/polars/transformers/table.py +48 -0
- bidviz/polars/utils.py +220 -0
- {bidviz-1.0.0.dist-info → bidviz-1.1.1.dist-info}/METADATA +52 -4
- bidviz-1.1.1.dist-info/RECORD +32 -0
- {bidviz-1.0.0.dist-info → bidviz-1.1.1.dist-info}/WHEEL +1 -1
- {bidviz-1.0.0.dist-info → bidviz-1.1.1.dist-info}/licenses/LICENSE +0 -0
- {bidviz-1.0.0.dist-info → bidviz-1.1.1.dist-info}/top_level.txt +0 -0
- bidviz-1.0.0.dist-info/RECORD +0 -19
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"""KPI Cards transformer for Polars DataFrames."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Dict
|
|
4
|
+
|
|
5
|
+
import polars as pl
|
|
6
|
+
|
|
7
|
+
from bidviz.exceptions import TransformationError
|
|
8
|
+
from bidviz.polars.core.base import BaseChartTransformer
|
|
9
|
+
from bidviz.polars.utils import format_label, safe_get_value
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class KPICardsTransformer(BaseChartTransformer):
|
|
13
|
+
"""Transform single-row Polars DataFrame into KPI cards."""
|
|
14
|
+
|
|
15
|
+
def transform(self, df: pl.DataFrame) -> Dict[str, Any]:
|
|
16
|
+
"""
|
|
17
|
+
Transform a single-row Polars DataFrame into KPI cards for dashboard metrics.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
df: Single-row Polars DataFrame containing metrics
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
Dict with chart_type='kpi_cards' and list of card data
|
|
24
|
+
|
|
25
|
+
Raises:
|
|
26
|
+
TransformationError: If DataFrame has more than one row
|
|
27
|
+
"""
|
|
28
|
+
try:
|
|
29
|
+
if len(df) == 0:
|
|
30
|
+
return {"chart_type": "kpi_cards", "data": []}
|
|
31
|
+
|
|
32
|
+
if len(df) > 1:
|
|
33
|
+
raise TransformationError(
|
|
34
|
+
"KPI cards expect a single-row DataFrame",
|
|
35
|
+
chart_type="kpi_cards",
|
|
36
|
+
df_shape=df.shape,
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
row = df.row(0, named=True)
|
|
40
|
+
cards = []
|
|
41
|
+
|
|
42
|
+
for column in df.columns:
|
|
43
|
+
cards.append(
|
|
44
|
+
{
|
|
45
|
+
"key": column,
|
|
46
|
+
"label": format_label(column),
|
|
47
|
+
"value": safe_get_value(row[column]),
|
|
48
|
+
}
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
return {"chart_type": "kpi_cards", "data": cards}
|
|
52
|
+
|
|
53
|
+
except Exception as e:
|
|
54
|
+
if isinstance(e, TransformationError):
|
|
55
|
+
raise
|
|
56
|
+
raise TransformationError(
|
|
57
|
+
f"Failed to transform KPI cards: {str(e)}",
|
|
58
|
+
chart_type="kpi_cards",
|
|
59
|
+
df_shape=df.shape,
|
|
60
|
+
)
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
"""Line chart transformers for Polars DataFrames."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Dict, List, Optional
|
|
4
|
+
|
|
5
|
+
import polars as pl
|
|
6
|
+
|
|
7
|
+
from bidviz.exceptions import TransformationError
|
|
8
|
+
from bidviz.polars.core.base import BaseChartTransformer
|
|
9
|
+
from bidviz.polars.utils import format_label, safe_get_value, validate_columns
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class LineChartTransformer(BaseChartTransformer):
|
|
13
|
+
"""Transform Polars DataFrame into line chart data."""
|
|
14
|
+
|
|
15
|
+
def transform(
|
|
16
|
+
self,
|
|
17
|
+
df: pl.DataFrame,
|
|
18
|
+
x_column: str,
|
|
19
|
+
y_column: str,
|
|
20
|
+
series_name: Optional[str] = None,
|
|
21
|
+
) -> Dict[str, Any]:
|
|
22
|
+
"""
|
|
23
|
+
Transform Polars DataFrame into line chart data for time series or trends.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
df: Polars DataFrame containing the data
|
|
27
|
+
x_column: Column name for x-axis
|
|
28
|
+
y_column: Column name for y-axis
|
|
29
|
+
series_name: Optional custom name for the data series
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
Dict with chart_type='line_chart', data points, and labels
|
|
33
|
+
"""
|
|
34
|
+
try:
|
|
35
|
+
validate_columns(df, [x_column, y_column])
|
|
36
|
+
|
|
37
|
+
data = []
|
|
38
|
+
for row in df.iter_rows(named=True):
|
|
39
|
+
data.append(
|
|
40
|
+
{
|
|
41
|
+
"x": str(safe_get_value(row[x_column])),
|
|
42
|
+
"y": safe_get_value(row[y_column]),
|
|
43
|
+
}
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
return {
|
|
47
|
+
"chart_type": "line_chart",
|
|
48
|
+
"data": data,
|
|
49
|
+
"series_name": series_name or format_label(y_column),
|
|
50
|
+
"x_label": format_label(x_column),
|
|
51
|
+
"y_label": format_label(y_column),
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
except ValueError as e:
|
|
55
|
+
raise TransformationError(str(e), chart_type="line_chart", df_shape=df.shape)
|
|
56
|
+
except Exception as e:
|
|
57
|
+
raise TransformationError(
|
|
58
|
+
f"Failed to transform line chart: {str(e)}",
|
|
59
|
+
chart_type="line_chart",
|
|
60
|
+
df_shape=df.shape,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class MultiLineChartTransformer(BaseChartTransformer):
|
|
65
|
+
"""Transform Polars DataFrame into multi-line chart data."""
|
|
66
|
+
|
|
67
|
+
def transform(
|
|
68
|
+
self,
|
|
69
|
+
df: pl.DataFrame,
|
|
70
|
+
x_column: str,
|
|
71
|
+
y_columns: List[str],
|
|
72
|
+
series_names: Optional[List[str]] = None,
|
|
73
|
+
) -> Dict[str, Any]:
|
|
74
|
+
"""
|
|
75
|
+
Transform Polars DataFrame into multi-line chart for comparing multiple series.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
df: Polars DataFrame containing the data
|
|
79
|
+
x_column: Column name for x-axis
|
|
80
|
+
y_columns: List of column names for y-axis
|
|
81
|
+
series_names: Optional custom names for each series
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
Dict with chart_type='multi_line_chart' and series data
|
|
85
|
+
"""
|
|
86
|
+
try:
|
|
87
|
+
validate_columns(df, [x_column] + y_columns)
|
|
88
|
+
|
|
89
|
+
if series_names and len(series_names) != len(y_columns):
|
|
90
|
+
raise TransformationError(
|
|
91
|
+
"Number of series_names must match number of y_columns",
|
|
92
|
+
chart_type="multi_line_chart",
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
series = []
|
|
96
|
+
for idx, y_col in enumerate(y_columns):
|
|
97
|
+
data = []
|
|
98
|
+
for row in df.iter_rows(named=True):
|
|
99
|
+
data.append(
|
|
100
|
+
{
|
|
101
|
+
"x": str(safe_get_value(row[x_column])),
|
|
102
|
+
"y": safe_get_value(row[y_col]),
|
|
103
|
+
}
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
series.append(
|
|
107
|
+
{
|
|
108
|
+
"name": series_names[idx] if series_names else format_label(y_col),
|
|
109
|
+
"data": data,
|
|
110
|
+
}
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
return {
|
|
114
|
+
"chart_type": "multi_line_chart",
|
|
115
|
+
"series": series,
|
|
116
|
+
"x_label": format_label(x_column),
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
except ValueError as e:
|
|
120
|
+
raise TransformationError(str(e), chart_type="multi_line_chart", df_shape=df.shape)
|
|
121
|
+
except Exception as e:
|
|
122
|
+
raise TransformationError(
|
|
123
|
+
f"Failed to transform multi-line chart: {str(e)}",
|
|
124
|
+
chart_type="multi_line_chart",
|
|
125
|
+
df_shape=df.shape,
|
|
126
|
+
)
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
"""Funnel and stacked bar chart transformers for Polars DataFrames."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Dict, List, Optional
|
|
4
|
+
|
|
5
|
+
import polars as pl
|
|
6
|
+
|
|
7
|
+
from bidviz.exceptions import TransformationError
|
|
8
|
+
from bidviz.polars.core.base import BaseChartTransformer
|
|
9
|
+
from bidviz.polars.utils import format_label, safe_get_value, validate_columns
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class FunnelChartTransformer(BaseChartTransformer):
|
|
13
|
+
"""Transform Polars DataFrame into funnel chart data."""
|
|
14
|
+
|
|
15
|
+
def transform(self, df: pl.DataFrame, stage_column: str, value_column: str) -> Dict[str, Any]:
|
|
16
|
+
"""
|
|
17
|
+
Transform Polars DataFrame into funnel chart data for conversion pipelines.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
df: Polars DataFrame containing the data
|
|
21
|
+
stage_column: Column name for funnel stages
|
|
22
|
+
value_column: Column name for stage values
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
Dict with chart_type='funnel_chart' and data points
|
|
26
|
+
"""
|
|
27
|
+
try:
|
|
28
|
+
validate_columns(df, [stage_column, value_column])
|
|
29
|
+
|
|
30
|
+
data = []
|
|
31
|
+
for row in df.iter_rows(named=True):
|
|
32
|
+
data.append(
|
|
33
|
+
{
|
|
34
|
+
"stage": str(safe_get_value(row[stage_column])),
|
|
35
|
+
"value": safe_get_value(row[value_column]),
|
|
36
|
+
}
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
return {"chart_type": "funnel_chart", "data": data}
|
|
40
|
+
|
|
41
|
+
except ValueError as e:
|
|
42
|
+
raise TransformationError(str(e), chart_type="funnel_chart", df_shape=df.shape)
|
|
43
|
+
except Exception as e:
|
|
44
|
+
raise TransformationError(
|
|
45
|
+
f"Failed to transform funnel chart: {str(e)}",
|
|
46
|
+
chart_type="funnel_chart",
|
|
47
|
+
df_shape=df.shape,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class StackedBarChartTransformer(BaseChartTransformer):
|
|
52
|
+
"""Transform Polars DataFrame into stacked bar chart data."""
|
|
53
|
+
|
|
54
|
+
def transform(
|
|
55
|
+
self,
|
|
56
|
+
df: pl.DataFrame,
|
|
57
|
+
x_column: str,
|
|
58
|
+
y_columns: List[str],
|
|
59
|
+
category_names: Optional[List[str]] = None,
|
|
60
|
+
) -> Dict[str, Any]:
|
|
61
|
+
"""
|
|
62
|
+
Transform Polars DataFrame into stacked bar chart for composed comparisons.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
df: Polars DataFrame containing the data
|
|
66
|
+
x_column: Column name for x-axis
|
|
67
|
+
y_columns: List of column names for stacked values
|
|
68
|
+
category_names: Optional custom names for each stack
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
Dict with chart_type='stacked_bar_chart' and data
|
|
72
|
+
"""
|
|
73
|
+
try:
|
|
74
|
+
validate_columns(df, [x_column] + y_columns)
|
|
75
|
+
|
|
76
|
+
if category_names and len(category_names) != len(y_columns):
|
|
77
|
+
raise TransformationError(
|
|
78
|
+
"Number of category_names must match number of y_columns",
|
|
79
|
+
chart_type="stacked_bar_chart",
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
data = []
|
|
83
|
+
for row in df.iter_rows(named=True):
|
|
84
|
+
point = {"x": str(safe_get_value(row[x_column]))}
|
|
85
|
+
for y_col in y_columns:
|
|
86
|
+
point[y_col] = safe_get_value(row[y_col])
|
|
87
|
+
data.append(point)
|
|
88
|
+
|
|
89
|
+
categories = [
|
|
90
|
+
category_names[i] if category_names else format_label(y_col)
|
|
91
|
+
for i, y_col in enumerate(y_columns)
|
|
92
|
+
]
|
|
93
|
+
|
|
94
|
+
return {
|
|
95
|
+
"chart_type": "stacked_bar_chart",
|
|
96
|
+
"data": data,
|
|
97
|
+
"categories": categories,
|
|
98
|
+
"x_label": format_label(x_column),
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
except ValueError as e:
|
|
102
|
+
raise TransformationError(str(e), chart_type="stacked_bar_chart", df_shape=df.shape)
|
|
103
|
+
except Exception as e:
|
|
104
|
+
raise TransformationError(
|
|
105
|
+
f"Failed to transform stacked bar chart: {str(e)}",
|
|
106
|
+
chart_type="stacked_bar_chart",
|
|
107
|
+
df_shape=df.shape,
|
|
108
|
+
)
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"""Pie chart transformer for Polars DataFrames."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Dict
|
|
4
|
+
|
|
5
|
+
import polars as pl
|
|
6
|
+
|
|
7
|
+
from bidviz.exceptions import TransformationError
|
|
8
|
+
from bidviz.polars.core.base import BaseChartTransformer
|
|
9
|
+
from bidviz.polars.utils import format_label, safe_get_value, validate_columns
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class PieChartTransformer(BaseChartTransformer):
|
|
13
|
+
"""Transform Polars DataFrame into pie chart data."""
|
|
14
|
+
|
|
15
|
+
def transform(
|
|
16
|
+
self, df: pl.DataFrame, label_column: str, value_column: str
|
|
17
|
+
) -> Dict[str, Any]:
|
|
18
|
+
"""
|
|
19
|
+
Transform Polars DataFrame into pie chart data for part-to-whole relationships.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
df: Polars DataFrame containing the data
|
|
23
|
+
label_column: Column name for slice labels
|
|
24
|
+
value_column: Column name for slice values
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
Dict with chart_type='pie_chart' and data points
|
|
28
|
+
|
|
29
|
+
Raises:
|
|
30
|
+
TransformationError: If required columns are missing
|
|
31
|
+
"""
|
|
32
|
+
try:
|
|
33
|
+
validate_columns(df, [label_column, value_column])
|
|
34
|
+
|
|
35
|
+
data = []
|
|
36
|
+
for row in df.iter_rows(named=True):
|
|
37
|
+
data.append(
|
|
38
|
+
{
|
|
39
|
+
"label": str(safe_get_value(row[label_column])),
|
|
40
|
+
"value": safe_get_value(row[value_column]),
|
|
41
|
+
}
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
return {
|
|
45
|
+
"chart_type": "pie_chart",
|
|
46
|
+
"data": data,
|
|
47
|
+
"label": format_label(label_column),
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
except ValueError as e:
|
|
51
|
+
raise TransformationError(str(e), chart_type="pie_chart", df_shape=df.shape)
|
|
52
|
+
except Exception as e:
|
|
53
|
+
raise TransformationError(
|
|
54
|
+
f"Failed to transform pie chart: {str(e)}",
|
|
55
|
+
chart_type="pie_chart",
|
|
56
|
+
df_shape=df.shape,
|
|
57
|
+
)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""Data table transformer for Polars DataFrames."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Dict
|
|
4
|
+
|
|
5
|
+
import polars as pl
|
|
6
|
+
|
|
7
|
+
from bidviz.exceptions import TransformationError
|
|
8
|
+
from bidviz.polars.core.base import BaseChartTransformer
|
|
9
|
+
from bidviz.polars.utils import format_label, paginate_dataframe, safe_get_value
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class DataTableTransformer(BaseChartTransformer):
|
|
13
|
+
"""Transform Polars DataFrame into paginated data table."""
|
|
14
|
+
|
|
15
|
+
def transform(self, df: pl.DataFrame, page: int = 1, page_size: int = 50) -> Dict[str, Any]:
|
|
16
|
+
"""
|
|
17
|
+
Transform Polars DataFrame into paginated data table structure.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
df: Polars DataFrame containing the data
|
|
21
|
+
page: Page number (1-indexed)
|
|
22
|
+
page_size: Number of rows per page
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
Dict with chart_type='data_table', columns, rows, and pagination
|
|
26
|
+
"""
|
|
27
|
+
try:
|
|
28
|
+
paginated_df, metadata = paginate_dataframe(df, page, page_size)
|
|
29
|
+
|
|
30
|
+
columns = []
|
|
31
|
+
for col in df.columns:
|
|
32
|
+
columns.append({"key": col, "label": format_label(col)})
|
|
33
|
+
|
|
34
|
+
rows = []
|
|
35
|
+
for row in paginated_df.iter_rows(named=True):
|
|
36
|
+
row_data = {}
|
|
37
|
+
for col in df.columns:
|
|
38
|
+
row_data[col] = safe_get_value(row[col])
|
|
39
|
+
rows.append(row_data)
|
|
40
|
+
|
|
41
|
+
return {"chart_type": "data_table", "columns": columns, "rows": rows, **metadata}
|
|
42
|
+
|
|
43
|
+
except Exception as e:
|
|
44
|
+
raise TransformationError(
|
|
45
|
+
f"Failed to transform data table: {str(e)}",
|
|
46
|
+
chart_type="data_table",
|
|
47
|
+
df_shape=df.shape,
|
|
48
|
+
)
|
bidviz/polars/utils.py
ADDED
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utility functions for Polars data transformation and formatting.
|
|
3
|
+
|
|
4
|
+
These utilities handle Polars-specific data types and conversions,
|
|
5
|
+
leveraging Polars' high-performance API for data manipulation.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Any, List
|
|
9
|
+
|
|
10
|
+
import polars as pl
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def safe_get_value(value: Any) -> Any:
|
|
14
|
+
"""
|
|
15
|
+
Safely extract a value from Polars objects, converting null to None.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
value: Value to extract (can be Polars type or Python type)
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
Python-native value with null converted to None
|
|
22
|
+
|
|
23
|
+
Examples:
|
|
24
|
+
>>> safe_get_value(None)
|
|
25
|
+
None
|
|
26
|
+
>>> safe_get_value(42)
|
|
27
|
+
42
|
|
28
|
+
>>> safe_get_value(3.14)
|
|
29
|
+
3.14
|
|
30
|
+
"""
|
|
31
|
+
if value is None:
|
|
32
|
+
return None
|
|
33
|
+
if isinstance(value, (int, float, str, bool)):
|
|
34
|
+
return value
|
|
35
|
+
# Handle Polars temporal types
|
|
36
|
+
if hasattr(value, "isoformat"): # datetime/date/time objects
|
|
37
|
+
return str(value)
|
|
38
|
+
return value
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def format_label(column_name: str) -> str:
|
|
42
|
+
"""
|
|
43
|
+
Convert snake_case column name to Title Case label.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
column_name: Column name in snake_case format
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
Formatted label in Title Case
|
|
50
|
+
|
|
51
|
+
Examples:
|
|
52
|
+
>>> format_label('total_gmv')
|
|
53
|
+
'Total Gmv'
|
|
54
|
+
>>> format_label('customer_id')
|
|
55
|
+
'Customer Id'
|
|
56
|
+
>>> format_label('avg_days_to_ship')
|
|
57
|
+
'Avg Days To Ship'
|
|
58
|
+
"""
|
|
59
|
+
return column_name.replace("_", " ").title()
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def validate_columns(df: pl.DataFrame, required_columns: List[str]) -> None:
|
|
63
|
+
"""
|
|
64
|
+
Validate that required columns exist in the Polars DataFrame.
|
|
65
|
+
|
|
66
|
+
Args:
|
|
67
|
+
df: Polars DataFrame to validate
|
|
68
|
+
required_columns: List of required column names
|
|
69
|
+
|
|
70
|
+
Raises:
|
|
71
|
+
ValueError: If any required columns are missing
|
|
72
|
+
|
|
73
|
+
Examples:
|
|
74
|
+
>>> df = pl.DataFrame({'a': [1, 2], 'b': [3, 4]})
|
|
75
|
+
>>> validate_columns(df, ['a', 'b']) # No error
|
|
76
|
+
>>> validate_columns(df, ['a', 'c']) # Raises ValueError
|
|
77
|
+
Traceback (most recent call last):
|
|
78
|
+
...
|
|
79
|
+
ValueError: Missing required columns: c
|
|
80
|
+
"""
|
|
81
|
+
missing = [col for col in required_columns if col not in df.columns]
|
|
82
|
+
if missing:
|
|
83
|
+
raise ValueError(f"Missing required columns: {', '.join(missing)}")
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def safe_convert_to_numeric(series: pl.Series) -> pl.Series:
|
|
87
|
+
"""
|
|
88
|
+
Safely convert a Polars Series to numeric type.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
series: Series to convert
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
Numeric series with errors converted to null
|
|
95
|
+
|
|
96
|
+
Examples:
|
|
97
|
+
>>> s = pl.Series(['1', '2', 'abc'])
|
|
98
|
+
>>> result = safe_convert_to_numeric(s)
|
|
99
|
+
>>> result.to_list()
|
|
100
|
+
[1.0, 2.0, None]
|
|
101
|
+
"""
|
|
102
|
+
try:
|
|
103
|
+
return series.cast(pl.Float64, strict=False)
|
|
104
|
+
except Exception:
|
|
105
|
+
return series
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def clean_dataframe(df: pl.DataFrame) -> pl.DataFrame:
|
|
109
|
+
"""
|
|
110
|
+
Clean DataFrame column names by converting to lowercase and replacing spaces.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
df: Polars DataFrame to clean
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
DataFrame with cleaned column names
|
|
117
|
+
|
|
118
|
+
Examples:
|
|
119
|
+
>>> df = pl.DataFrame({'Total GMV': [100], 'Customer Name': ['John']})
|
|
120
|
+
>>> clean_df = clean_dataframe(df)
|
|
121
|
+
>>> clean_df.columns
|
|
122
|
+
['total_gmv', 'customer_name']
|
|
123
|
+
"""
|
|
124
|
+
new_columns = [col.lower().replace(" ", "_") for col in df.columns]
|
|
125
|
+
return df.rename(dict(zip(df.columns, new_columns)))
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def get_numeric_columns(df: pl.DataFrame) -> List[str]:
|
|
129
|
+
"""
|
|
130
|
+
Get list of numeric column names from Polars DataFrame.
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
df: Polars DataFrame to analyze
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
List of numeric column names
|
|
137
|
+
|
|
138
|
+
Examples:
|
|
139
|
+
>>> df = pl.DataFrame({'a': [1, 2], 'b': ['x', 'y'], 'c': [1.5, 2.5]})
|
|
140
|
+
>>> get_numeric_columns(df)
|
|
141
|
+
['a', 'c']
|
|
142
|
+
"""
|
|
143
|
+
numeric_types = [
|
|
144
|
+
pl.Int8,
|
|
145
|
+
pl.Int16,
|
|
146
|
+
pl.Int32,
|
|
147
|
+
pl.Int64,
|
|
148
|
+
pl.UInt8,
|
|
149
|
+
pl.UInt16,
|
|
150
|
+
pl.UInt32,
|
|
151
|
+
pl.UInt64,
|
|
152
|
+
pl.Float32,
|
|
153
|
+
pl.Float64,
|
|
154
|
+
]
|
|
155
|
+
return [col for col in df.columns if df[col].dtype in numeric_types]
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def paginate_dataframe(
|
|
159
|
+
df: pl.DataFrame, page: int = 1, page_size: int = 50
|
|
160
|
+
) -> tuple[pl.DataFrame, dict]:
|
|
161
|
+
"""
|
|
162
|
+
Paginate a Polars DataFrame and return pagination metadata.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
df: Polars DataFrame to paginate
|
|
166
|
+
page: Page number (1-indexed)
|
|
167
|
+
page_size: Number of rows per page
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
Tuple of (paginated DataFrame, pagination metadata dict)
|
|
171
|
+
|
|
172
|
+
Examples:
|
|
173
|
+
>>> df = pl.DataFrame({'a': range(100)})
|
|
174
|
+
>>> page_df, meta = paginate_dataframe(df, page=2, page_size=25)
|
|
175
|
+
>>> len(page_df)
|
|
176
|
+
25
|
|
177
|
+
>>> meta['total']
|
|
178
|
+
100
|
|
179
|
+
>>> meta['page']
|
|
180
|
+
2
|
|
181
|
+
"""
|
|
182
|
+
total = len(df)
|
|
183
|
+
total_pages = (total + page_size - 1) // page_size # Ceiling division
|
|
184
|
+
|
|
185
|
+
# Ensure page is within valid range
|
|
186
|
+
page = max(1, min(page, total_pages if total_pages > 0 else 1))
|
|
187
|
+
|
|
188
|
+
start_idx = (page - 1) * page_size
|
|
189
|
+
end_idx = start_idx + page_size
|
|
190
|
+
|
|
191
|
+
paginated_df = df.slice(start_idx, page_size)
|
|
192
|
+
|
|
193
|
+
metadata = {
|
|
194
|
+
"total": total,
|
|
195
|
+
"page": page,
|
|
196
|
+
"page_size": page_size,
|
|
197
|
+
"total_pages": total_pages,
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
return paginated_df, metadata
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def dataframe_to_dicts(df: pl.DataFrame) -> List[dict]:
|
|
204
|
+
"""
|
|
205
|
+
Convert Polars DataFrame to list of dictionaries with safe value conversion.
|
|
206
|
+
|
|
207
|
+
This function handles null values and Polars-specific types properly.
|
|
208
|
+
|
|
209
|
+
Args:
|
|
210
|
+
df: Polars DataFrame to convert
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
List of dictionaries representing rows
|
|
214
|
+
|
|
215
|
+
Examples:
|
|
216
|
+
>>> df = pl.DataFrame({'a': [1, 2], 'b': ['x', 'y']})
|
|
217
|
+
>>> dataframe_to_dicts(df)
|
|
218
|
+
[{'a': 1, 'b': 'x'}, {'a': 2, 'b': 'y'}]
|
|
219
|
+
"""
|
|
220
|
+
return [{k: safe_get_value(v) for k, v in row.items()} for row in df.iter_rows(named=True)]
|