gov-uk-dashboards 26.8.0__py3-none-any.whl → 26.26.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gov_uk_dashboards/assets/dashboard.css +177 -0
- gov_uk_dashboards/assets/images/CHASE_icon.svg +17 -0
- gov_uk_dashboards/assets/images/explore_data_logo.svg +87 -0
- gov_uk_dashboards/colours.py +22 -0
- gov_uk_dashboards/components/dash/__init__.py +1 -1
- gov_uk_dashboards/components/dash/banners.py +20 -0
- gov_uk_dashboards/components/dash/context_card.py +472 -53
- gov_uk_dashboards/components/dash/data_quality_banner.py +91 -0
- gov_uk_dashboards/components/dash/download_button.py +0 -30
- gov_uk_dashboards/components/dash/footer.py +72 -29
- gov_uk_dashboards/components/dash/green_button.py +25 -0
- gov_uk_dashboards/components/dash/header.py +44 -0
- gov_uk_dashboards/components/dash/main_content.py +8 -1
- gov_uk_dashboards/components/dash/notification_banner.py +8 -5
- gov_uk_dashboards/components/leaflet/leaflet_choropleth_map.py +66 -27
- gov_uk_dashboards/components/plotly/enums.py +2 -0
- gov_uk_dashboards/components/plotly/stacked_barchart.py +53 -15
- gov_uk_dashboards/components/plotly/time_series_chart.py +140 -15
- gov_uk_dashboards/constants.py +19 -0
- gov_uk_dashboards/formatting/number_formatting.py +7 -0
- gov_uk_dashboards/lib/datetime_functions/datetime_functions.py +85 -0
- gov_uk_dashboards/lib/http_headers.py +3 -2
- gov_uk_dashboards/lib/testing_functions/data_test_assertions.py +3 -3
- gov_uk_dashboards/lib/testing_functions/data_test_helper_functions.py +2 -1
- gov_uk_dashboards/log_kpi.py +37 -0
- gov_uk_dashboards/template.html +1 -1
- gov_uk_dashboards/template.py +6 -0
- {gov_uk_dashboards-26.8.0.dist-info → gov_uk_dashboards-26.26.0.dist-info}/METADATA +2 -2
- {gov_uk_dashboards-26.8.0.dist-info → gov_uk_dashboards-26.26.0.dist-info}/RECORD +32 -27
- {gov_uk_dashboards-26.8.0.dist-info → gov_uk_dashboards-26.26.0.dist-info}/WHEEL +1 -1
- {gov_uk_dashboards-26.8.0.dist-info → gov_uk_dashboards-26.26.0.dist-info}/licenses/LICENSE +0 -0
- {gov_uk_dashboards-26.8.0.dist-info → gov_uk_dashboards-26.26.0.dist-info}/top_level.txt +0 -0
|
@@ -23,7 +23,7 @@ from gov_uk_dashboards.constants import (
|
|
|
23
23
|
SUBTITLE,
|
|
24
24
|
VALUE,
|
|
25
25
|
)
|
|
26
|
-
from gov_uk_dashboards.colours import AFAccessibleColours
|
|
26
|
+
from gov_uk_dashboards.colours import AFAccessibleColours, ONSAccessibleColours
|
|
27
27
|
from gov_uk_dashboards.components.helpers.display_chart_or_table_with_header import (
|
|
28
28
|
display_chart_or_table_with_header,
|
|
29
29
|
)
|
|
@@ -73,6 +73,8 @@ class StackedBarChart:
|
|
|
73
73
|
alternative_data_button_text: Optional[str] = None,
|
|
74
74
|
alternative_all_data_button_text: Optional[str] = None,
|
|
75
75
|
total_trace_name: Optional[str] = None,
|
|
76
|
+
y_axis_tick_prefix: Optional[str] = "£",
|
|
77
|
+
x_hoverformat: Optional[str] = "%b %Y",
|
|
76
78
|
):
|
|
77
79
|
"""Initializes the StackedBarChart instance.
|
|
78
80
|
To display the chart, call the `get_stacked_bar_chart()` method.
|
|
@@ -122,6 +124,8 @@ class StackedBarChart:
|
|
|
122
124
|
self.alternative_data_button_text = alternative_data_button_text
|
|
123
125
|
self.alternative_all_data_button_text = alternative_all_data_button_text
|
|
124
126
|
self.total_trace_name = total_trace_name
|
|
127
|
+
self.y_axis_tick_prefix = y_axis_tick_prefix
|
|
128
|
+
self.x_hoverformat = x_hoverformat
|
|
125
129
|
self.fig = self.create_stacked_bar_chart()
|
|
126
130
|
|
|
127
131
|
def get_stacked_bar_chart(self) -> html.Div:
|
|
@@ -211,14 +215,17 @@ class StackedBarChart:
|
|
|
211
215
|
legendrank=999,
|
|
212
216
|
)
|
|
213
217
|
)
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
+
|
|
219
|
+
if len(self.trace_name_list) > 6:
|
|
220
|
+
colour_list = ONSAccessibleColours.CATEGORICAL_MANY.value
|
|
221
|
+
elif len(self.trace_name_list) != 2:
|
|
222
|
+
colour_list = AFAccessibleColours.CATEGORICAL.value
|
|
223
|
+
else:
|
|
224
|
+
colour_list = [
|
|
218
225
|
AFAccessibleColours.DARK_BLUE.value,
|
|
219
226
|
AFAccessibleColours.ORANGE.value,
|
|
220
227
|
] # if 2 lines should use dark blue & orange as have highest contrast ratio
|
|
221
|
-
|
|
228
|
+
|
|
222
229
|
for _, (df, trace_name, colour) in enumerate(
|
|
223
230
|
zip(
|
|
224
231
|
self._get_df_list_for_bar_chart(),
|
|
@@ -262,7 +269,7 @@ class StackedBarChart:
|
|
|
262
269
|
font={"size": CHART_LABEL_FONT_SIZE},
|
|
263
270
|
yaxis={
|
|
264
271
|
# "range": [min_y * 1.1, max_y * 1.1],
|
|
265
|
-
"tickprefix":
|
|
272
|
+
"tickprefix": self.y_axis_tick_prefix,
|
|
266
273
|
"exponentformat": "B",
|
|
267
274
|
# "tickmode": "array",
|
|
268
275
|
# "tickvals": tickvals,
|
|
@@ -270,12 +277,13 @@ class StackedBarChart:
|
|
|
270
277
|
},
|
|
271
278
|
showlegend=True,
|
|
272
279
|
barmode="relative",
|
|
273
|
-
xaxis={"categoryorder": "
|
|
280
|
+
xaxis={"categoryorder": "array", "categoryarray": self.trace_name_list},
|
|
274
281
|
xaxis_title=self.x_axis_column if self.show_x_axis_title else None,
|
|
275
282
|
## copied from timeseries
|
|
276
283
|
hovermode="x unified" if self.x_unified_hovermode is True else "closest",
|
|
277
284
|
hoverdistance=self.hover_distance, # Increase distance to simulate hover 'always on'
|
|
278
285
|
)
|
|
286
|
+
self._format_xaxis(fig)
|
|
279
287
|
return fig
|
|
280
288
|
|
|
281
289
|
def create_bar_chart_trace(
|
|
@@ -320,13 +328,13 @@ class StackedBarChart:
|
|
|
320
328
|
if self.x_unified_hovermode is True:
|
|
321
329
|
return f"{trace_name}: " + "%{customdata[0]}<extra></extra>"
|
|
322
330
|
hover_text_headers = self.hover_data[trace_name][HOVER_TEXT_HEADERS]
|
|
323
|
-
hover_template =
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
": %{customdata[
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
)
|
|
331
|
+
hover_template = f"{trace_name}<br>"
|
|
332
|
+
|
|
333
|
+
for i, header in enumerate(hover_text_headers):
|
|
334
|
+
hover_template += f"{header}: %{{customdata[{i}]}}<br>"
|
|
335
|
+
|
|
336
|
+
# Remove the last <br> and add <extra></extra>
|
|
337
|
+
hover_template = hover_template.rstrip("<br>") + "<extra></extra>"
|
|
330
338
|
return hover_template
|
|
331
339
|
|
|
332
340
|
def _get_custom_data(self, trace_name, df):
|
|
@@ -397,6 +405,36 @@ class StackedBarChart:
|
|
|
397
405
|
tickvals = list(range(int(start), int(end) + 1, int(step_size)))
|
|
398
406
|
return tickvals
|
|
399
407
|
|
|
408
|
+
def _format_xaxis(self, fig):
|
|
409
|
+
if self.xaxis_tick_text_format == "quarter":
|
|
410
|
+
df = self.df.with_columns(
|
|
411
|
+
pl.col(self.x_axis_column).str.strptime(pl.Date, "%Y-%m-%d")
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
first_year = df.select(pl.col(self.x_axis_column).min()).item().year
|
|
415
|
+
next_year = first_year + 1
|
|
416
|
+
|
|
417
|
+
max_in_first_year = (
|
|
418
|
+
df.filter(pl.col(self.x_axis_column).dt.year() == first_year)
|
|
419
|
+
.select(pl.col(self.x_axis_column).max())
|
|
420
|
+
.item()
|
|
421
|
+
)
|
|
422
|
+
min_in_next_year = (
|
|
423
|
+
df.filter(pl.col(self.x_axis_column).dt.year() == next_year)
|
|
424
|
+
.select(pl.col(self.x_axis_column).min())
|
|
425
|
+
.item()
|
|
426
|
+
)
|
|
427
|
+
|
|
428
|
+
tick0 = max_in_first_year + (min_in_next_year - max_in_first_year) / 2
|
|
429
|
+
|
|
430
|
+
fig.update_xaxes(
|
|
431
|
+
tick0=tick0, # start tick halfway between Dec & Mar
|
|
432
|
+
dtick="M12", # one tick per year
|
|
433
|
+
tickformat="%Y",
|
|
434
|
+
hoverformat=self.x_hoverformat,
|
|
435
|
+
)
|
|
436
|
+
return fig
|
|
437
|
+
|
|
400
438
|
|
|
401
439
|
def get_tracenamelist_and_legend_order(df_function, barchart_measures=None):
|
|
402
440
|
"""
|
|
@@ -63,6 +63,9 @@ class TimeSeriesChart:
|
|
|
63
63
|
hover_data: HoverDataByTrace,
|
|
64
64
|
filtered_df: pl.DataFrame,
|
|
65
65
|
trace_name_list: list[str],
|
|
66
|
+
dashed_trace_name_list: list[str] = None,
|
|
67
|
+
trace_colour_groups: list[str, str] = None,
|
|
68
|
+
initially_hidden_traces: Optional[list[str]] = None,
|
|
66
69
|
hover_data_for_traces_with_different_hover_for_last_point: Optional[
|
|
67
70
|
HoverDataByTrace
|
|
68
71
|
] = None,
|
|
@@ -86,6 +89,7 @@ class TimeSeriesChart:
|
|
|
86
89
|
additional_line: Optional[dict] = None,
|
|
87
90
|
hover_distance: Optional[int] = 1,
|
|
88
91
|
footnote: Optional[str] = None,
|
|
92
|
+
stacked: Optional[bool] = False,
|
|
89
93
|
): # pylint: disable=duplicate-code
|
|
90
94
|
self.title_data = title_data
|
|
91
95
|
self.y_axis_column = y_axis_column
|
|
@@ -95,6 +99,9 @@ class TimeSeriesChart:
|
|
|
95
99
|
)
|
|
96
100
|
self.filtered_df = filtered_df
|
|
97
101
|
self.trace_name_list = trace_name_list
|
|
102
|
+
self.dashed_trace_name_list = dashed_trace_name_list
|
|
103
|
+
self.trace_colour_groups = trace_colour_groups
|
|
104
|
+
self.initially_hidden_traces = initially_hidden_traces
|
|
98
105
|
self.legend_dict = legend_dict
|
|
99
106
|
self.trace_name_column = trace_name_column
|
|
100
107
|
self.xaxis_tick_text_format = xaxis_tick_text_format
|
|
@@ -124,6 +131,7 @@ class TimeSeriesChart:
|
|
|
124
131
|
self.number_of_traces_colour_shift_dict = number_of_traces_colour_shift_dict
|
|
125
132
|
self.additional_line = additional_line
|
|
126
133
|
self.hover_distance = hover_distance
|
|
134
|
+
self.stacked = stacked
|
|
127
135
|
self.colour_list = self._get_colour_list()
|
|
128
136
|
self.fig = self.create_time_series_chart()
|
|
129
137
|
self.footnote = footnote
|
|
@@ -253,7 +261,12 @@ class TimeSeriesChart:
|
|
|
253
261
|
self.create_time_series_trace(
|
|
254
262
|
df.sort(self.x_axis_column),
|
|
255
263
|
trace_name,
|
|
256
|
-
line_style=
|
|
264
|
+
line_style=(
|
|
265
|
+
{"dash": "dot", "color": colour}
|
|
266
|
+
if self.dashed_trace_name_list is not None
|
|
267
|
+
and trace_name in self.dashed_trace_name_list
|
|
268
|
+
else {"dash": "solid", "color": colour}
|
|
269
|
+
),
|
|
257
270
|
marker={"symbol": marker, "size": marker_sizes, "opacity": 1},
|
|
258
271
|
legendgroup=legendgroup,
|
|
259
272
|
),
|
|
@@ -415,6 +428,14 @@ class TimeSeriesChart:
|
|
|
415
428
|
marker (dict[str,str]): Properties for marker parameter.
|
|
416
429
|
legendgroup (str): Name to group by in legend,
|
|
417
430
|
"""
|
|
431
|
+
if (
|
|
432
|
+
self.initially_hidden_traces is not None
|
|
433
|
+
and trace_name in self.initially_hidden_traces
|
|
434
|
+
):
|
|
435
|
+
visible = "legendonly"
|
|
436
|
+
else:
|
|
437
|
+
visible = True
|
|
438
|
+
|
|
418
439
|
return go.Scatter(
|
|
419
440
|
x=df[self.x_axis_column],
|
|
420
441
|
y=df[self.y_axis_column],
|
|
@@ -428,6 +449,8 @@ class TimeSeriesChart:
|
|
|
428
449
|
trace_name in self.legend_dict if self.legend_dict is not None else True
|
|
429
450
|
),
|
|
430
451
|
legendgroup=legendgroup,
|
|
452
|
+
stackgroup="one" if self.stacked else None,
|
|
453
|
+
visible=visible,
|
|
431
454
|
)
|
|
432
455
|
|
|
433
456
|
def _get_hover_template(self, df, trace_name):
|
|
@@ -500,6 +523,7 @@ class TimeSeriesChart:
|
|
|
500
523
|
return self.legend_dict[trace_name]
|
|
501
524
|
return trace_name
|
|
502
525
|
|
|
526
|
+
# pylint: disable=too-many-statements
|
|
503
527
|
def _get_x_axis_content(self):
|
|
504
528
|
"""Generates tick text and values for the x-axis based on the unique years calculated from
|
|
505
529
|
the DATE_VALID column in the dataframe.
|
|
@@ -599,6 +623,30 @@ class TimeSeriesChart:
|
|
|
599
623
|
]
|
|
600
624
|
|
|
601
625
|
range_x = [0.5, 4.5]
|
|
626
|
+
elif self.xaxis_tick_text_format == XAxisFormat.WEEK.value:
|
|
627
|
+
df = self.filtered_df.with_columns(
|
|
628
|
+
pl.col(self.x_axis_column)
|
|
629
|
+
.str.strptime(pl.Datetime, "%Y-%m-%d", strict=False)
|
|
630
|
+
.alias(self.x_axis_column)
|
|
631
|
+
).sort(self.x_axis_column)
|
|
632
|
+
|
|
633
|
+
start_datetime = df[self.x_axis_column].min() - relativedelta(weeks=1)
|
|
634
|
+
latest_datetime = df[self.x_axis_column].max() + relativedelta(weeks=1)
|
|
635
|
+
|
|
636
|
+
start_of_week = start_datetime - relativedelta(
|
|
637
|
+
days=start_datetime.weekday()
|
|
638
|
+
)
|
|
639
|
+
|
|
640
|
+
tick_values = []
|
|
641
|
+
tick_text = []
|
|
642
|
+
|
|
643
|
+
current = start_of_week
|
|
644
|
+
while current <= latest_datetime:
|
|
645
|
+
tick_values.append(current)
|
|
646
|
+
tick_text.append(current.strftime("%d %b %Y")) # e.g. "29 Sep 2025"
|
|
647
|
+
current += relativedelta(weeks=1)
|
|
648
|
+
|
|
649
|
+
range_x = [start_datetime, latest_datetime]
|
|
602
650
|
else:
|
|
603
651
|
raise ValueError(
|
|
604
652
|
f"Invalid xaxis_tick_text_format: {self.xaxis_tick_text_format}"
|
|
@@ -608,9 +656,16 @@ class TimeSeriesChart:
|
|
|
608
656
|
def _get_y_axis_range_max(self):
|
|
609
657
|
"""Get the y axis range maximum value to ensure there is an axis label greater than the
|
|
610
658
|
maximum y value."""
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
659
|
+
if self.stacked:
|
|
660
|
+
largest_y_value = (
|
|
661
|
+
self.filtered_df.group_by(self.x_axis_column) # group by date
|
|
662
|
+
.agg(pl.col(self.y_axis_column).sum()) # total per date
|
|
663
|
+
.select(pl.col(self.y_axis_column).max()) # largest daily total
|
|
664
|
+
.item() # extract scalar
|
|
665
|
+
)
|
|
666
|
+
else:
|
|
667
|
+
largest_y_value = self.filtered_df[self.y_axis_column].max()
|
|
668
|
+
y_axis_max = largest_y_value + (0.3 * largest_y_value)
|
|
614
669
|
return y_axis_max
|
|
615
670
|
|
|
616
671
|
def _get_df_list_for_time_series(self) -> list[pl.DataFrame]:
|
|
@@ -624,27 +679,97 @@ class TimeSeriesChart:
|
|
|
624
679
|
return df_list
|
|
625
680
|
|
|
626
681
|
def _get_colour_list(self):
|
|
627
|
-
"""Returns a list of colours.
|
|
682
|
+
"""Returns a list of colours (one per trace in trace_name_list).
|
|
683
|
+
|
|
684
|
+
If `trace_colour_groups` is provided, traces in the same group share a colour.
|
|
685
|
+
Traces not in any group get their own colour from the palette.
|
|
686
|
+
"""
|
|
687
|
+
palette = self._get_base_palette()
|
|
688
|
+
palette = self._apply_colour_shift(palette)
|
|
689
|
+
|
|
690
|
+
groups = getattr(self, "trace_colour_groups", None) or []
|
|
691
|
+
if not groups:
|
|
692
|
+
return palette[: len(self.trace_name_list)]
|
|
693
|
+
|
|
694
|
+
trace_to_group_id = self._build_trace_to_group_id(groups)
|
|
695
|
+
return self._assign_colours_with_groups(palette, trace_to_group_id)
|
|
696
|
+
|
|
697
|
+
def _get_base_palette(self) -> list[str]:
|
|
628
698
|
number_of_traces = len(self.trace_name_list)
|
|
629
699
|
if number_of_traces == 2 and self.filled_traces_dict is None:
|
|
630
|
-
|
|
700
|
+
return [
|
|
631
701
|
AFAccessibleColours.DARK_BLUE.value,
|
|
632
702
|
AFAccessibleColours.ORANGE.value,
|
|
633
|
-
]
|
|
634
|
-
|
|
635
|
-
|
|
703
|
+
]
|
|
704
|
+
return AFAccessibleColours.CATEGORICAL.value.copy()
|
|
705
|
+
|
|
706
|
+
def _apply_colour_shift(self, palette: list[str]) -> list[str]:
|
|
707
|
+
"""Apply number_of_traces_colour_shift_dict; may replace palette with explicit list."""
|
|
708
|
+
number_of_traces = len(self.trace_name_list)
|
|
636
709
|
colour_shift_dict = (
|
|
637
710
|
{"default": 0}
|
|
638
711
|
if self.number_of_traces_colour_shift_dict is None
|
|
639
712
|
else self.number_of_traces_colour_shift_dict
|
|
640
713
|
)
|
|
641
|
-
|
|
642
714
|
colour_shift_value = colour_shift_dict.get(
|
|
643
715
|
number_of_traces, colour_shift_dict["default"]
|
|
644
716
|
)
|
|
717
|
+
|
|
645
718
|
if isinstance(colour_shift_value, list):
|
|
646
|
-
return colour_shift_value
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
719
|
+
return colour_shift_value
|
|
720
|
+
|
|
721
|
+
# rotate left by colour_shift_value
|
|
722
|
+
shift = int(colour_shift_value)
|
|
723
|
+
if shift <= 0:
|
|
724
|
+
return palette
|
|
725
|
+
|
|
726
|
+
shift = shift % len(palette)
|
|
727
|
+
return palette[shift:] + palette[:shift]
|
|
728
|
+
|
|
729
|
+
def _build_trace_to_group_id(self, groups: list[list[str]]) -> dict[str, int]:
|
|
730
|
+
"""Validate groups and return a mapping of trace -> group_id."""
|
|
731
|
+
trace_set = set(self.trace_name_list)
|
|
732
|
+
trace_to_group_id: dict[str, int] = {}
|
|
733
|
+
|
|
734
|
+
for group_id, group in enumerate(groups):
|
|
735
|
+
for trace in group:
|
|
736
|
+
if trace not in trace_set:
|
|
737
|
+
raise ValueError(
|
|
738
|
+
f"trace_colour_groups contains '{trace}', but it's not in trace_name_list."
|
|
739
|
+
)
|
|
740
|
+
if trace in trace_to_group_id:
|
|
741
|
+
raise ValueError(
|
|
742
|
+
f"Trace '{trace}' appears in more than one trace_colour_group."
|
|
743
|
+
)
|
|
744
|
+
trace_to_group_id[trace] = group_id
|
|
745
|
+
|
|
746
|
+
return trace_to_group_id
|
|
747
|
+
|
|
748
|
+
def _assign_colours_with_groups(
|
|
749
|
+
self,
|
|
750
|
+
palette: list[str],
|
|
751
|
+
trace_to_group_id: dict[str, int],
|
|
752
|
+
) -> list[str]:
|
|
753
|
+
"""Assign one colour per group (first-seen), and one per ungrouped trace."""
|
|
754
|
+
group_colour: dict[int, str] = {}
|
|
755
|
+
trace_colour: dict[str, str] = {}
|
|
756
|
+
colour_idx = 0
|
|
757
|
+
|
|
758
|
+
# assign colours to groups in order of first appearance in trace_name_list
|
|
759
|
+
for trace in self.trace_name_list:
|
|
760
|
+
group_id = trace_to_group_id.get(trace)
|
|
761
|
+
if group_id is not None and group_id not in group_colour:
|
|
762
|
+
group_colour[group_id] = palette[colour_idx % len(palette)]
|
|
763
|
+
colour_idx += 1
|
|
764
|
+
|
|
765
|
+
# apply group colours to grouped traces
|
|
766
|
+
for trace, group_id in trace_to_group_id.items():
|
|
767
|
+
trace_colour[trace] = group_colour[group_id]
|
|
768
|
+
|
|
769
|
+
# assign colours to ungrouped traces
|
|
770
|
+
for trace in self.trace_name_list:
|
|
771
|
+
if trace not in trace_colour:
|
|
772
|
+
trace_colour[trace] = palette[colour_idx % len(palette)]
|
|
773
|
+
colour_idx += 1
|
|
774
|
+
|
|
775
|
+
return [trace_colour[t] for t in self.trace_name_list]
|
gov_uk_dashboards/constants.py
CHANGED
|
@@ -39,3 +39,22 @@ LATEST_YEAR = "latest_year"
|
|
|
39
39
|
PREVIOUS_YEAR = "previous_year"
|
|
40
40
|
PREVIOUS_2YEAR = "previous_2year"
|
|
41
41
|
TWENTY_NINETEEN = "twenty_nineteen"
|
|
42
|
+
|
|
43
|
+
NOTIFICATION_STYLE_RED = {"borderColor": "#d4351c", "backgroundColor": "#d4351c"}
|
|
44
|
+
NOTIFICATION_STYLE_ORANGE = {"borderColor": "#f47738", "backgroundColor": "#f47738"}
|
|
45
|
+
NOTIFICATION_STYLE_YELLOW = {"borderColor": "#ffdd00", "backgroundColor": "#ffdd00"}
|
|
46
|
+
NOTIFICATION_STYLE_GREEN = {"borderColor": "#00703c", "backgroundColor": "#00703c"}
|
|
47
|
+
|
|
48
|
+
BANNER_STYLE = {
|
|
49
|
+
"display": "flex",
|
|
50
|
+
"visibility": "visible",
|
|
51
|
+
"margin": "-10px -15px 0px -15px",
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
ERROR_MESSAGE_BANNER_STYLE = {
|
|
55
|
+
"width": "100%",
|
|
56
|
+
"paddingLeft": "30px",
|
|
57
|
+
"paddingRight": "30px",
|
|
58
|
+
"justifyContent": "left",
|
|
59
|
+
"backgroundColor": "#FFDD00",
|
|
60
|
+
}
|
|
@@ -29,3 +29,10 @@ def add_commas(number: int, remove_decimal_places: bool = False) -> str:
|
|
|
29
29
|
if remove_decimal_places:
|
|
30
30
|
return f"{number:,.0f}"
|
|
31
31
|
return f"{number:,}"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def format_percentage(percentage):
|
|
35
|
+
"""Formats percentages to remove decimal place on numbers 10 or more"""
|
|
36
|
+
if abs(percentage) < 10:
|
|
37
|
+
return percentage
|
|
38
|
+
return int(percentage)
|
|
@@ -7,6 +7,91 @@ import re
|
|
|
7
7
|
from typing import Optional
|
|
8
8
|
|
|
9
9
|
|
|
10
|
+
def convert_date(
|
|
11
|
+
date_input,
|
|
12
|
+
input_format=None,
|
|
13
|
+
output_format=None,
|
|
14
|
+
convert_to_datetime=False,
|
|
15
|
+
abbreviate_jun_jul=False,
|
|
16
|
+
):
|
|
17
|
+
"""
|
|
18
|
+
Convert a date input (string, date, or datetime) into either a datetime object or a formatted
|
|
19
|
+
string.
|
|
20
|
+
|
|
21
|
+
Behaviour:
|
|
22
|
+
- If `date_input` is a string, `input_format` must be provided and is used with
|
|
23
|
+
`datetime.strptime`.
|
|
24
|
+
- If `convert_to_datetime` is True, returns a `datetime.datetime` (at midnight if the input was
|
|
25
|
+
a `date`), and `output_format` is ignored.
|
|
26
|
+
- If `convert_to_datetime` is False, `output_format` must be provided and is used with
|
|
27
|
+
`strftime`.
|
|
28
|
+
|
|
29
|
+
Month abbreviation tweak:
|
|
30
|
+
- If `abbreviate_jun_jul` is False (default), and your `output_format` produces abbreviated
|
|
31
|
+
months (e.g., via `%b`), any standalone "Jun" or "Jul" tokens in the formatted output are
|
|
32
|
+
expanded to "June" / "July".
|
|
33
|
+
- If `abbreviate_jun_jul` is True, the output is left exactly as produced by `strftime`.
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
date_input (str | datetime.datetime | datetime.date):
|
|
37
|
+
The date to convert.
|
|
38
|
+
input_format (str | None):
|
|
39
|
+
Format string for parsing `date_input` when it is a string. Required if `date_input` is
|
|
40
|
+
a string.
|
|
41
|
+
output_format (str | None):
|
|
42
|
+
Format string used when returning a string. Required if `convert_to_datetime` is False.
|
|
43
|
+
convert_to_datetime (bool):
|
|
44
|
+
If True, return a `datetime.datetime`. If False, return a formatted string.
|
|
45
|
+
abbreviate_jun_jul (bool):
|
|
46
|
+
If False, expand "Jun"/"Jul" to "June"/"July" in the final formatted string.
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
datetime.datetime | str:
|
|
50
|
+
A datetime object if `convert_to_datetime` is True, otherwise a formatted string.
|
|
51
|
+
|
|
52
|
+
Raises:
|
|
53
|
+
ValueError:
|
|
54
|
+
If `date_input` is a string and `input_format` is None, or if parsing fails.
|
|
55
|
+
If `convert_to_datetime` is False and `output_format` is None.
|
|
56
|
+
TypeError:
|
|
57
|
+
If `date_input` is not a string, date, or datetime.
|
|
58
|
+
"""
|
|
59
|
+
# Parse / normalise to datetime
|
|
60
|
+
if isinstance(date_input, str):
|
|
61
|
+
if input_format is None:
|
|
62
|
+
raise ValueError(
|
|
63
|
+
"input_format must be provided when date_input is a string"
|
|
64
|
+
)
|
|
65
|
+
try:
|
|
66
|
+
dt = datetime.strptime(date_input, input_format)
|
|
67
|
+
except ValueError as e:
|
|
68
|
+
raise ValueError(
|
|
69
|
+
f"Could not parse date_input={date_input!r} with input_format={input_format!r}"
|
|
70
|
+
) from e
|
|
71
|
+
elif isinstance(date_input, datetime):
|
|
72
|
+
dt = date_input
|
|
73
|
+
elif isinstance(date_input, date):
|
|
74
|
+
dt = datetime.combine(date_input, datetime.min.time())
|
|
75
|
+
else:
|
|
76
|
+
raise TypeError("date_input must be a str, datetime.datetime, or datetime.date")
|
|
77
|
+
|
|
78
|
+
if convert_to_datetime:
|
|
79
|
+
return dt
|
|
80
|
+
|
|
81
|
+
if output_format is None:
|
|
82
|
+
raise ValueError(
|
|
83
|
+
"output_format must be provided when convert_to_datetime is False"
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
output_str = dt.strftime(output_format)
|
|
87
|
+
|
|
88
|
+
if not abbreviate_jun_jul:
|
|
89
|
+
output_str = re.sub(r"\bJun\b", "June", output_str)
|
|
90
|
+
output_str = re.sub(r"\bJul\b", "July", output_str)
|
|
91
|
+
|
|
92
|
+
return output_str
|
|
93
|
+
|
|
94
|
+
|
|
10
95
|
def convert_date_string_to_text_string(
|
|
11
96
|
date_str: str,
|
|
12
97
|
date_format: Optional[str] = "%Y-%m-%d",
|
|
@@ -15,9 +15,10 @@ def setup_application_http_response_headers(dash_app: dash.Dash):
|
|
|
15
15
|
"script-src 'self' 'unsafe-inline' https://*.googletagmanager.com "
|
|
16
16
|
"https://cdn.jsdelivr.net/npm/web-vitals@4.2.4/dist/web-vitals.min.js "
|
|
17
17
|
"https://cdn.jsdelivr.net/npm/html2canvas@1.4.1/dist/html2canvas.min.js "
|
|
18
|
-
"https://*.google-analytics.com;"
|
|
18
|
+
"https://*.google-analytics.com https://js.monitor.azure.com/scripts/b/ai.3.gbl.min.js;"
|
|
19
19
|
"connect-src 'self' https://*.googletagmanager.com https://*.google-analytics.com "
|
|
20
|
-
"https://*.analytics.google.com
|
|
20
|
+
"https://*.analytics.google.com https://js.monitor.azure.com "
|
|
21
|
+
"https://*.applicationinsights.azure.com/v2/track; "
|
|
21
22
|
"img-src 'self' https://*.googletagmanager.com https://*.google-analytics.com "
|
|
22
23
|
"https://*.analytics.google.com data: https://*.tile.openstreetmap.org; "
|
|
23
24
|
)
|
|
@@ -46,7 +46,7 @@ def cvs_contains_no_duplicate_rows(csv_absolute_filepath: str):
|
|
|
46
46
|
Returns: bool: True if there were no duplicates in the df, False otherwise.
|
|
47
47
|
"""
|
|
48
48
|
|
|
49
|
-
df = pl.read_csv(csv_absolute_filepath)
|
|
49
|
+
df = pl.read_csv(csv_absolute_filepath, infer_schema_length=1000)
|
|
50
50
|
num_duplicate_rows = df.is_duplicated().sum()
|
|
51
51
|
assert (
|
|
52
52
|
num_duplicate_rows == 0
|
|
@@ -71,7 +71,7 @@ def inferred_df_has_correct_column_types(
|
|
|
71
71
|
for field, typ in schema.__annotations__.items()
|
|
72
72
|
}
|
|
73
73
|
|
|
74
|
-
df = pl.read_csv(csv_absolute_filepath)
|
|
74
|
+
df = pl.read_csv(csv_absolute_filepath, infer_schema_length=1000)
|
|
75
75
|
assert df.schema == expected_schema
|
|
76
76
|
|
|
77
77
|
|
|
@@ -86,7 +86,7 @@ def df_has_valid_schema(csv_absolute_filepath: str, schema: Type[BaseModel]):
|
|
|
86
86
|
schema (Type[BaseModel]): Pydantic model class to extract the expected column types
|
|
87
87
|
"""
|
|
88
88
|
|
|
89
|
-
df = pl.read_csv(csv_absolute_filepath)
|
|
89
|
+
df = pl.read_csv(csv_absolute_filepath, infer_schema_length=1000)
|
|
90
90
|
|
|
91
91
|
if hasattr(schema, "from_polars"): # column level validation
|
|
92
92
|
try:
|
|
@@ -30,7 +30,7 @@ def extract_main_type(typ):
|
|
|
30
30
|
|
|
31
31
|
# If it's a tuple containing types, filter out NoneType and return the first non-None
|
|
32
32
|
non_none_args = [
|
|
33
|
-
arg for arg in args if arg
|
|
33
|
+
arg for arg in args if not isinstance(arg, type(None))
|
|
34
34
|
] # Remove NoneType
|
|
35
35
|
return (
|
|
36
36
|
non_none_args[0] if non_none_args else typ
|
|
@@ -240,6 +240,7 @@ def value_matches_regex_pattern_or_alternative_value(
|
|
|
240
240
|
"url": r"https?:\/\/(www\.)?[a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-zA-Z]{2,6}\b([-a-zA-Z0-9@:%_\+.~#?&//=]*)", # pylint: disable=line-too-long
|
|
241
241
|
"x_weeks_and_y_days": r"^\d+\s+weeks\s+and\s+\d+\s+days$",
|
|
242
242
|
"area_code": r"^^E(?:0[6789]|[01][06789])\d{6}$",
|
|
243
|
+
"region_code": r"^E1200000[1-9]$",
|
|
243
244
|
"YYYY-YY": r"^\d{4}-\d{2}$",
|
|
244
245
|
"YYYY-MM": r"^\d{4}-(0[1-9]|1[0-2])$",
|
|
245
246
|
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"""Logging utility for KPI runs.
|
|
2
|
+
|
|
3
|
+
This module provides a reusable function `log_message` that appends
|
|
4
|
+
a message to a KPI log file, prefixed with a UK-time timestamp
|
|
5
|
+
(including milliseconds). It can be called directly from the command
|
|
6
|
+
line or imported and reused in other scripts.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import sys
|
|
10
|
+
import os
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
from zoneinfo import ZoneInfo
|
|
13
|
+
|
|
14
|
+
# Path to KPI log file (should be provided via environment variable)
|
|
15
|
+
KPI_LOG = os.environ.get("KPI_LOG_FILE")
|
|
16
|
+
UK_TZ = ZoneInfo("Europe/London")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def log_message(message: str) -> None:
|
|
20
|
+
"""Write a message to the KPI log with a UK-time timestamp.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
message (str): The log message to append.
|
|
24
|
+
"""
|
|
25
|
+
timestamp = datetime.now(UK_TZ).strftime("%Y-%m-%d %H:%M:%S.%f")[:-4] # keep ms
|
|
26
|
+
line = f"[{timestamp}] {message}\n"
|
|
27
|
+
|
|
28
|
+
with open(KPI_LOG, "a", encoding="utf-8") as f:
|
|
29
|
+
f.write(line)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
if __name__ == "__main__":
|
|
33
|
+
if len(sys.argv) < 2:
|
|
34
|
+
sys.exit(1)
|
|
35
|
+
|
|
36
|
+
MESSAGE = " ".join(sys.argv[1:])
|
|
37
|
+
log_message(MESSAGE)
|
gov_uk_dashboards/template.html
CHANGED
gov_uk_dashboards/template.py
CHANGED
|
@@ -13,6 +13,9 @@ def read_template(favicon_path: str = r"assets\images\MHCLG_favicon.png") -> str
|
|
|
13
13
|
:return: String version of the template.
|
|
14
14
|
"""
|
|
15
15
|
gtag = os.environ.get("GTAG", "")
|
|
16
|
+
app_insights_conn_string = os.environ.get(
|
|
17
|
+
"APPLICATIONINSIGHTS_CONNECTION_STRING", ""
|
|
18
|
+
)
|
|
16
19
|
path = os.path.join(os.path.dirname(__file__), "template.html")
|
|
17
20
|
|
|
18
21
|
with open(path, encoding="utf-8") as file:
|
|
@@ -20,4 +23,7 @@ def read_template(favicon_path: str = r"assets\images\MHCLG_favicon.png") -> str
|
|
|
20
23
|
|
|
21
24
|
rendered_template = template.replace("{{favicon_path}}", favicon_path)
|
|
22
25
|
rendered_template = rendered_template.replace("{{gtag}}", gtag)
|
|
26
|
+
rendered_template = rendered_template.replace(
|
|
27
|
+
"{{app_insights_conn_string}}", app_insights_conn_string
|
|
28
|
+
)
|
|
23
29
|
return rendered_template
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: gov_uk_dashboards
|
|
3
|
-
Version: 26.
|
|
3
|
+
Version: 26.26.0
|
|
4
4
|
Summary: Provides access to functionality common to creating a data dashboard.
|
|
5
5
|
Author: Department for Levelling Up, Housing and Communities
|
|
6
6
|
Description-Content-Type: text/markdown
|
|
@@ -9,7 +9,7 @@ Requires-Dist: setuptools<81.0,>=59.8
|
|
|
9
9
|
Requires-Dist: dash~=3.0
|
|
10
10
|
Requires-Dist: numpy>=2.3.2
|
|
11
11
|
Requires-Dist: dash_bootstrap_components~=2.0.3
|
|
12
|
-
Requires-Dist: plotly
|
|
12
|
+
Requires-Dist: plotly<6.6,>=6.2
|
|
13
13
|
Requires-Dist: flask-basicauth~=0.2.0
|
|
14
14
|
Dynamic: author
|
|
15
15
|
Dynamic: description
|