streamlit-nightly 1.36.1.dev20240702__py2.py3-none-any.whl → 1.36.1.dev20240704__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- streamlit/commands/navigation.py +1 -1
- streamlit/components/v1/component_arrow.py +16 -11
- streamlit/components/v1/custom_component.py +2 -1
- streamlit/dataframe_util.py +884 -0
- streamlit/delta_generator.py +6 -4
- streamlit/elements/arrow.py +26 -45
- streamlit/elements/lib/built_in_chart_utils.py +78 -19
- streamlit/elements/lib/column_config_utils.py +1 -1
- streamlit/elements/lib/pandas_styler_utils.py +4 -2
- streamlit/elements/lib/policies.py +60 -8
- streamlit/elements/lib/utils.py +100 -10
- streamlit/elements/map.py +4 -15
- streamlit/elements/metric.py +5 -2
- streamlit/elements/plotly_chart.py +11 -12
- streamlit/elements/vega_charts.py +19 -31
- streamlit/elements/widgets/button.py +17 -15
- streamlit/elements/widgets/camera_input.py +15 -10
- streamlit/elements/widgets/chat.py +9 -11
- streamlit/elements/widgets/checkbox.py +13 -11
- streamlit/elements/widgets/color_picker.py +14 -10
- streamlit/elements/widgets/data_editor.py +18 -19
- streamlit/elements/widgets/file_uploader.py +15 -10
- streamlit/elements/widgets/multiselect.py +13 -15
- streamlit/elements/widgets/number_input.py +13 -11
- streamlit/elements/widgets/radio.py +13 -15
- streamlit/elements/widgets/select_slider.py +13 -13
- streamlit/elements/widgets/selectbox.py +13 -15
- streamlit/elements/widgets/slider.py +14 -10
- streamlit/elements/widgets/text_widgets.py +21 -17
- streamlit/elements/widgets/time_widgets.py +18 -16
- streamlit/elements/write.py +7 -15
- streamlit/runtime/caching/cache_utils.py +2 -5
- streamlit/runtime/state/common.py +51 -2
- streamlit/runtime/state/session_state.py +2 -1
- streamlit/runtime/state/session_state_proxy.py +1 -1
- streamlit/runtime/state/widgets.py +1 -1
- streamlit/static/asset-manifest.json +2 -2
- streamlit/static/index.html +1 -1
- streamlit/static/static/js/{main.e2ab315a.js → main.28e3c6e9.js} +2 -2
- streamlit/testing/v1/element_tree.py +3 -3
- streamlit/type_util.py +0 -1069
- {streamlit_nightly-1.36.1.dev20240702.dist-info → streamlit_nightly-1.36.1.dev20240704.dist-info}/METADATA +1 -1
- {streamlit_nightly-1.36.1.dev20240702.dist-info → streamlit_nightly-1.36.1.dev20240704.dist-info}/RECORD +48 -47
- /streamlit/static/static/js/{main.e2ab315a.js.LICENSE.txt → main.28e3c6e9.js.LICENSE.txt} +0 -0
- {streamlit_nightly-1.36.1.dev20240702.data → streamlit_nightly-1.36.1.dev20240704.data}/scripts/streamlit.cmd +0 -0
- {streamlit_nightly-1.36.1.dev20240702.dist-info → streamlit_nightly-1.36.1.dev20240704.dist-info}/WHEEL +0 -0
- {streamlit_nightly-1.36.1.dev20240702.dist-info → streamlit_nightly-1.36.1.dev20240704.dist-info}/entry_points.txt +0 -0
- {streamlit_nightly-1.36.1.dev20240702.dist-info → streamlit_nightly-1.36.1.dev20240704.dist-info}/top_level.txt +0 -0
streamlit/delta_generator.py
CHANGED
@@ -38,10 +38,10 @@ from streamlit import (
|
|
38
38
|
cli_util,
|
39
39
|
config,
|
40
40
|
cursor,
|
41
|
+
dataframe_util,
|
41
42
|
env_util,
|
42
43
|
logger,
|
43
44
|
runtime,
|
44
|
-
type_util,
|
45
45
|
util,
|
46
46
|
)
|
47
47
|
from streamlit.elements.alert import AlertMixin
|
@@ -100,7 +100,7 @@ if TYPE_CHECKING:
|
|
100
100
|
from pandas import DataFrame
|
101
101
|
|
102
102
|
from streamlit.cursor import Cursor
|
103
|
-
from streamlit.
|
103
|
+
from streamlit.dataframe_util import Data
|
104
104
|
from streamlit.elements.lib.built_in_chart_utils import AddRowsMetadata
|
105
105
|
|
106
106
|
|
@@ -705,8 +705,10 @@ def _prep_data_for_add_rows(
|
|
705
705
|
add_rows_metadata: AddRowsMetadata | None,
|
706
706
|
) -> tuple[Data, AddRowsMetadata | None]:
|
707
707
|
if not add_rows_metadata:
|
708
|
-
|
709
|
-
|
708
|
+
if dataframe_util.is_pandas_styler(data):
|
709
|
+
# When calling add_rows on st.table or st.dataframe we want styles to pass through.
|
710
|
+
return data, None
|
711
|
+
return dataframe_util.convert_anything_to_pandas_df(data), None
|
710
712
|
|
711
713
|
# If add_rows_metadata is set, it indicates that the add_rows used called
|
712
714
|
# on a chart based on our built-in chart commands.
|
streamlit/elements/arrow.py
CHANGED
@@ -18,21 +18,17 @@ import json
|
|
18
18
|
from dataclasses import dataclass
|
19
19
|
from typing import (
|
20
20
|
TYPE_CHECKING,
|
21
|
-
Any,
|
22
|
-
Dict,
|
23
21
|
Final,
|
24
22
|
Iterable,
|
25
|
-
List,
|
26
23
|
Literal,
|
27
24
|
TypedDict,
|
28
|
-
Union,
|
29
25
|
cast,
|
30
26
|
overload,
|
31
27
|
)
|
32
28
|
|
33
29
|
from typing_extensions import TypeAlias
|
34
30
|
|
35
|
-
from streamlit import
|
31
|
+
from streamlit import dataframe_util
|
36
32
|
from streamlit.elements.lib.column_config_utils import (
|
37
33
|
INDEX_IDENTIFIER,
|
38
34
|
ColumnConfigMappingInput,
|
@@ -43,38 +39,19 @@ from streamlit.elements.lib.column_config_utils import (
|
|
43
39
|
)
|
44
40
|
from streamlit.elements.lib.event_utils import AttributeDictionary
|
45
41
|
from streamlit.elements.lib.pandas_styler_utils import marshall_styler
|
46
|
-
from streamlit.elements.lib.policies import
|
47
|
-
|
48
|
-
check_callback_rules,
|
49
|
-
check_session_state_rules,
|
50
|
-
)
|
42
|
+
from streamlit.elements.lib.policies import check_widget_policies
|
43
|
+
from streamlit.elements.lib.utils import Key, to_key
|
51
44
|
from streamlit.errors import StreamlitAPIException
|
52
45
|
from streamlit.proto.Arrow_pb2 import Arrow as ArrowProto
|
53
46
|
from streamlit.runtime.metrics_util import gather_metrics
|
54
47
|
from streamlit.runtime.scriptrunner import get_script_run_ctx
|
55
48
|
from streamlit.runtime.state import WidgetCallback, register_widget
|
56
49
|
from streamlit.runtime.state.common import compute_widget_id
|
57
|
-
from streamlit.type_util import Key, to_key
|
58
50
|
|
59
51
|
if TYPE_CHECKING:
|
60
|
-
|
61
|
-
from numpy import ndarray
|
62
|
-
from pandas import DataFrame, Index, Series
|
63
|
-
from pandas.io.formats.style import Styler
|
64
|
-
|
52
|
+
from streamlit.dataframe_util import Data
|
65
53
|
from streamlit.delta_generator import DeltaGenerator
|
66
54
|
|
67
|
-
Data: TypeAlias = Union[
|
68
|
-
"DataFrame",
|
69
|
-
"Series",
|
70
|
-
"Styler",
|
71
|
-
"Index",
|
72
|
-
"pa.Table",
|
73
|
-
"ndarray",
|
74
|
-
Iterable,
|
75
|
-
Dict[str, List[Any]],
|
76
|
-
None,
|
77
|
-
]
|
78
55
|
|
79
56
|
SelectionMode: TypeAlias = Literal[
|
80
57
|
"single-row", "multi-row", "single-column", "multi-column"
|
@@ -489,10 +466,15 @@ class ArrowMixin:
|
|
489
466
|
|
490
467
|
if is_selection_activated:
|
491
468
|
# Run some checks that are only relevant when selections are activated
|
492
|
-
|
493
|
-
|
494
|
-
|
495
|
-
|
469
|
+
is_callback = callable(on_select)
|
470
|
+
check_widget_policies(
|
471
|
+
self.dg,
|
472
|
+
key,
|
473
|
+
on_change=cast(WidgetCallback, on_select) if is_callback else None,
|
474
|
+
default_value=None,
|
475
|
+
writes_allowed=False,
|
476
|
+
enable_check_callback_rules=is_callback,
|
477
|
+
)
|
496
478
|
|
497
479
|
# Convert the user provided column config into the frontend compatible format:
|
498
480
|
column_config_mapping = process_config_mapping(column_config)
|
@@ -511,15 +493,15 @@ class ArrowMixin:
|
|
511
493
|
|
512
494
|
if isinstance(data, pa.Table):
|
513
495
|
# For pyarrow tables, we can just serialize the table directly
|
514
|
-
proto.data =
|
496
|
+
proto.data = dataframe_util.convert_arrow_table_to_arrow_bytes(data)
|
515
497
|
else:
|
516
498
|
# For all other data formats, we need to convert them to a pandas.DataFrame
|
517
499
|
# thereby, we also apply some data specific configs
|
518
500
|
|
519
501
|
# Determine the input data format
|
520
|
-
data_format =
|
502
|
+
data_format = dataframe_util.determine_data_format(data)
|
521
503
|
|
522
|
-
if
|
504
|
+
if dataframe_util.is_pandas_styler(data):
|
523
505
|
# If pandas.Styler uuid is not provided, a hash of the position
|
524
506
|
# of the element will be used. This will cause a rerender of the table
|
525
507
|
# when the position of the element is changed.
|
@@ -528,7 +510,9 @@ class ArrowMixin:
|
|
528
510
|
marshall_styler(proto, data, default_uuid)
|
529
511
|
|
530
512
|
# Convert the input data into a pandas.DataFrame
|
531
|
-
data_df =
|
513
|
+
data_df = dataframe_util.convert_anything_to_pandas_df(
|
514
|
+
data, ensure_copy=False
|
515
|
+
)
|
532
516
|
apply_data_specific_configs(
|
533
517
|
column_config_mapping,
|
534
518
|
data_df,
|
@@ -536,7 +520,7 @@ class ArrowMixin:
|
|
536
520
|
check_arrow_compatibility=False,
|
537
521
|
)
|
538
522
|
# Serialize the data to bytes:
|
539
|
-
proto.data =
|
523
|
+
proto.data = dataframe_util.convert_pandas_df_to_arrow_bytes(data_df)
|
540
524
|
|
541
525
|
if hide_index is not None:
|
542
526
|
update_column_config(
|
@@ -615,8 +599,10 @@ class ArrowMixin:
|
|
615
599
|
|
616
600
|
# Check if data is uncollected, and collect it but with 100 rows max, instead of 10k rows, which is done in all other cases.
|
617
601
|
# Avoid this and use 100 rows in st.table, because large tables render slowly, take too much screen space, and can crush the app.
|
618
|
-
if
|
619
|
-
data =
|
602
|
+
if dataframe_util.is_unevaluated_data_object(data):
|
603
|
+
data = dataframe_util.convert_anything_to_pandas_df(
|
604
|
+
data, max_unevaluated_rows=100
|
605
|
+
)
|
620
606
|
|
621
607
|
# If pandas.Styler uuid is not provided, a hash of the position
|
622
608
|
# of the element will be used. This will cause a rerender of the table
|
@@ -705,9 +691,8 @@ def marshall(proto: ArrowProto, data: Data, default_uuid: str | None = None) ->
|
|
705
691
|
(e.g. charts) can ignore it.
|
706
692
|
|
707
693
|
"""
|
708
|
-
import pyarrow as pa
|
709
694
|
|
710
|
-
if
|
695
|
+
if dataframe_util.is_pandas_styler(data):
|
711
696
|
# default_uuid is a string only if the data is a `Styler`,
|
712
697
|
# and `None` otherwise.
|
713
698
|
assert isinstance(
|
@@ -715,8 +700,4 @@ def marshall(proto: ArrowProto, data: Data, default_uuid: str | None = None) ->
|
|
715
700
|
), "Default UUID must be a string for Styler data."
|
716
701
|
marshall_styler(proto, data, default_uuid)
|
717
702
|
|
718
|
-
|
719
|
-
proto.data = type_util.pyarrow_table_to_bytes(data)
|
720
|
-
else:
|
721
|
-
df = type_util.convert_anything_to_df(data)
|
722
|
-
proto.data = type_util.data_frame_to_bytes(df)
|
703
|
+
proto.data = dataframe_util.convert_anything_to_arrow_bytes(data)
|
@@ -25,12 +25,15 @@ from typing import (
|
|
25
25
|
Collection,
|
26
26
|
Final,
|
27
27
|
Hashable,
|
28
|
+
Literal,
|
28
29
|
Sequence,
|
29
30
|
TypedDict,
|
30
31
|
cast,
|
31
32
|
)
|
32
33
|
|
33
|
-
from
|
34
|
+
from typing_extensions import TypeAlias
|
35
|
+
|
36
|
+
from streamlit import dataframe_util, type_util
|
34
37
|
from streamlit.color_util import (
|
35
38
|
Color,
|
36
39
|
is_color_like,
|
@@ -44,8 +47,10 @@ if TYPE_CHECKING:
|
|
44
47
|
import altair as alt
|
45
48
|
import pandas as pd
|
46
49
|
|
47
|
-
from streamlit.
|
48
|
-
|
50
|
+
from streamlit.dataframe_util import Data
|
51
|
+
|
52
|
+
VegaLiteType: TypeAlias = Literal["quantitative", "ordinal", "temporal", "nominal"]
|
53
|
+
ChartStackType: TypeAlias = Literal["normalize", "center", "layered"]
|
49
54
|
|
50
55
|
|
51
56
|
class PrepDataColumns(TypedDict):
|
@@ -129,7 +134,7 @@ def generate_chart(
|
|
129
134
|
"""Function to use the chart's type, data columns and indices to figure out the chart's spec."""
|
130
135
|
import altair as alt
|
131
136
|
|
132
|
-
df =
|
137
|
+
df = dataframe_util.convert_anything_to_pandas_df(data, ensure_copy=True)
|
133
138
|
|
134
139
|
# From now on, use "df" instead of "data". Deleting "data" to guarantee we follow this.
|
135
140
|
del data
|
@@ -248,7 +253,7 @@ def prep_chart_data_for_add_rows(
|
|
248
253
|
"""
|
249
254
|
import pandas as pd
|
250
255
|
|
251
|
-
df = cast(pd.DataFrame,
|
256
|
+
df = cast(pd.DataFrame, dataframe_util.convert_anything_to_pandas_df(data))
|
252
257
|
|
253
258
|
# Make range indices start at last_index.
|
254
259
|
if isinstance(df.index, pd.RangeIndex):
|
@@ -273,6 +278,66 @@ def prep_chart_data_for_add_rows(
|
|
273
278
|
return out_data, add_rows_metadata
|
274
279
|
|
275
280
|
|
281
|
+
def _infer_vegalite_type(
|
282
|
+
data: pd.Series[Any],
|
283
|
+
) -> VegaLiteType:
|
284
|
+
"""
|
285
|
+
From an array-like input, infer the correct vega typecode
|
286
|
+
('ordinal', 'nominal', 'quantitative', or 'temporal')
|
287
|
+
|
288
|
+
Parameters
|
289
|
+
----------
|
290
|
+
data: Numpy array or Pandas Series
|
291
|
+
"""
|
292
|
+
# The code below is copied from Altair, and slightly modified.
|
293
|
+
# We copy this code here so we don't depend on private Altair functions.
|
294
|
+
# Source: https://github.com/altair-viz/altair/blob/62ca5e37776f5cecb27e83c1fbd5d685a173095d/altair/utils/core.py#L193
|
295
|
+
|
296
|
+
from pandas.api.types import infer_dtype
|
297
|
+
|
298
|
+
# STREAMLIT MOD: I'm using infer_dtype directly here, rather than using Altair's wrapper. Their
|
299
|
+
# wrapper is only there to support Pandas < 0.20, but Streamlit requires Pandas 1.3.
|
300
|
+
typ = infer_dtype(data)
|
301
|
+
|
302
|
+
if typ in [
|
303
|
+
"floating",
|
304
|
+
"mixed-integer-float",
|
305
|
+
"integer",
|
306
|
+
"mixed-integer",
|
307
|
+
"complex",
|
308
|
+
]:
|
309
|
+
return "quantitative"
|
310
|
+
|
311
|
+
elif typ == "categorical" and data.cat.ordered:
|
312
|
+
# STREAMLIT MOD: The original code returns a tuple here:
|
313
|
+
# return ("ordinal", data.cat.categories.tolist())
|
314
|
+
# But returning the tuple here isn't compatible with our
|
315
|
+
# built-in chart implementation. And it also doesn't seem to be necessary.
|
316
|
+
# Altair already extracts the correct sort order somewhere else.
|
317
|
+
# More info about the issue here: https://github.com/streamlit/streamlit/issues/7776
|
318
|
+
return "ordinal"
|
319
|
+
elif typ in ["string", "bytes", "categorical", "boolean", "mixed", "unicode"]:
|
320
|
+
return "nominal"
|
321
|
+
elif typ in [
|
322
|
+
"datetime",
|
323
|
+
"datetime64",
|
324
|
+
"timedelta",
|
325
|
+
"timedelta64",
|
326
|
+
"date",
|
327
|
+
"time",
|
328
|
+
"period",
|
329
|
+
]:
|
330
|
+
return "temporal"
|
331
|
+
else:
|
332
|
+
# STREAMLIT MOD: I commented this out since Streamlit doesn't have a warnings object.
|
333
|
+
# warnings.warn(
|
334
|
+
# "I don't know how to infer vegalite type from '{}'. "
|
335
|
+
# "Defaulting to nominal.".format(typ),
|
336
|
+
# stacklevel=1,
|
337
|
+
# )
|
338
|
+
return "nominal"
|
339
|
+
|
340
|
+
|
276
341
|
def _get_pandas_index_attr(
|
277
342
|
data: pd.DataFrame | pd.Series,
|
278
343
|
attr: str,
|
@@ -325,15 +390,9 @@ def _prep_data(
|
|
325
390
|
|
326
391
|
|
327
392
|
def _last_index_for_melted_dataframes(
|
328
|
-
data:
|
393
|
+
data: pd.DataFrame,
|
329
394
|
) -> Hashable | None:
|
330
|
-
|
331
|
-
data = type_util.convert_anything_to_df(data)
|
332
|
-
|
333
|
-
if data.index.size > 0:
|
334
|
-
return cast(Hashable, data.index[-1])
|
335
|
-
|
336
|
-
return None
|
395
|
+
return cast(Hashable, data.index[-1]) if data.index.size > 0 else None
|
337
396
|
|
338
397
|
|
339
398
|
def _is_date_column(df: pd.DataFrame, name: str | None) -> bool:
|
@@ -434,7 +493,7 @@ def _melt_data(
|
|
434
493
|
|
435
494
|
# Arrow has problems with object types after melting two different dtypes
|
436
495
|
# pyarrow.lib.ArrowTypeError: "Expected a <TYPE> object, got a object"
|
437
|
-
fixed_df =
|
496
|
+
fixed_df = dataframe_util.fix_arrow_incompatible_column_types(
|
438
497
|
melted_df,
|
439
498
|
selected_columns=[
|
440
499
|
*columns_to_leave_alone,
|
@@ -862,7 +921,7 @@ def _get_color_encoding(
|
|
862
921
|
if color_column == _MELTED_COLOR_COLUMN_NAME:
|
863
922
|
column_type = "nominal"
|
864
923
|
else:
|
865
|
-
column_type =
|
924
|
+
column_type = _infer_vegalite_type(df[color_column])
|
866
925
|
|
867
926
|
color_enc = alt.Color(
|
868
927
|
field=color_column, legend=_COLOR_LEGEND_SETTINGS, type=column_type
|
@@ -982,7 +1041,7 @@ def _get_tooltip_encoding(
|
|
982
1041
|
|
983
1042
|
def _get_x_encoding_type(
|
984
1043
|
df: pd.DataFrame, chart_type: ChartType, x_column: str | None
|
985
|
-
) ->
|
1044
|
+
) -> VegaLiteType:
|
986
1045
|
if x_column is None:
|
987
1046
|
return "quantitative" # Anything. If None, Vega-Lite may hide the axis.
|
988
1047
|
|
@@ -991,18 +1050,18 @@ def _get_x_encoding_type(
|
|
991
1050
|
if chart_type == ChartType.VERTICAL_BAR and not _is_date_column(df, x_column):
|
992
1051
|
return "ordinal"
|
993
1052
|
|
994
|
-
return
|
1053
|
+
return _infer_vegalite_type(df[x_column])
|
995
1054
|
|
996
1055
|
|
997
1056
|
def _get_y_encoding_type(
|
998
1057
|
df: pd.DataFrame, chart_type: ChartType, y_column: str | None
|
999
|
-
) ->
|
1058
|
+
) -> VegaLiteType:
|
1000
1059
|
# Horizontal bar charts should have a discrete (ordinal) y-axis, UNLESS type is date/time
|
1001
1060
|
if chart_type == ChartType.HORIZONTAL_BAR and not _is_date_column(df, y_column):
|
1002
1061
|
return "ordinal"
|
1003
1062
|
|
1004
1063
|
if y_column:
|
1005
|
-
return
|
1064
|
+
return _infer_vegalite_type(df[y_column])
|
1006
1065
|
|
1007
1066
|
return "quantitative" # Pick anything. If undefined, Vega-Lite may hide the axis.
|
1008
1067
|
|
@@ -21,10 +21,10 @@ from typing import TYPE_CHECKING, Dict, Final, Literal, Mapping, Union
|
|
21
21
|
|
22
22
|
from typing_extensions import TypeAlias
|
23
23
|
|
24
|
+
from streamlit.dataframe_util import DataFormat, is_colum_type_arrow_incompatible
|
24
25
|
from streamlit.elements.lib.column_types import ColumnConfig, ColumnType
|
25
26
|
from streamlit.elements.lib.dicttools import remove_none_values
|
26
27
|
from streamlit.errors import StreamlitAPIException
|
27
|
-
from streamlit.type_util import DataFormat, is_colum_type_arrow_incompatible
|
28
28
|
|
29
29
|
if TYPE_CHECKING:
|
30
30
|
import pyarrow as pa
|
@@ -16,7 +16,7 @@ from __future__ import annotations
|
|
16
16
|
|
17
17
|
from typing import TYPE_CHECKING, Any, Mapping, TypeVar
|
18
18
|
|
19
|
-
from streamlit import
|
19
|
+
from streamlit import dataframe_util
|
20
20
|
from streamlit.errors import StreamlitAPIException
|
21
21
|
|
22
22
|
if TYPE_CHECKING:
|
@@ -236,7 +236,9 @@ def _marshall_display_values(
|
|
236
236
|
|
237
237
|
"""
|
238
238
|
new_df = _use_display_values(df, styles)
|
239
|
-
proto.styler.display_values =
|
239
|
+
proto.styler.display_values = dataframe_util.convert_pandas_df_to_arrow_bytes(
|
240
|
+
new_df
|
241
|
+
)
|
240
242
|
|
241
243
|
|
242
244
|
def _use_display_values(df: DataFrame, styles: Mapping[str, Any]) -> DataFrame:
|
@@ -14,9 +14,9 @@
|
|
14
14
|
|
15
15
|
from __future__ import annotations
|
16
16
|
|
17
|
-
from typing import TYPE_CHECKING, Any
|
17
|
+
from typing import TYPE_CHECKING, Any, Final, Sequence
|
18
18
|
|
19
|
-
from streamlit import config, runtime
|
19
|
+
from streamlit import config, errors, logger, runtime
|
20
20
|
from streamlit.elements.form import is_in_form
|
21
21
|
from streamlit.errors import StreamlitAPIException, StreamlitAPIWarning
|
22
22
|
from streamlit.runtime.scriptrunner.script_run_context import get_script_run_ctx
|
@@ -26,8 +26,12 @@ if TYPE_CHECKING:
|
|
26
26
|
from streamlit.delta_generator import DeltaGenerator
|
27
27
|
|
28
28
|
|
29
|
+
_LOGGER: Final = logger.get_logger(__name__)
|
30
|
+
|
31
|
+
|
29
32
|
def check_callback_rules(dg: DeltaGenerator, on_change: WidgetCallback | None) -> None:
|
30
|
-
"""Ensures that widgets other than `st.
|
33
|
+
"""Ensures that widgets other than `st.form_submit_button` within a form don't have
|
34
|
+
an on_change callback set.
|
31
35
|
|
32
36
|
Raises
|
33
37
|
------
|
@@ -48,9 +52,11 @@ _shown_default_value_warning: bool = False
|
|
48
52
|
def check_session_state_rules(
|
49
53
|
default_value: Any, key: str | None, writes_allowed: bool = True
|
50
54
|
) -> None:
|
51
|
-
"""Ensures that no values are set for widgets with the given key when writing
|
55
|
+
"""Ensures that no values are set for widgets with the given key when writing
|
56
|
+
is not allowed.
|
52
57
|
|
53
|
-
Additionally, if `global.disableWidgetStateDuplicationWarning` is False a warning is
|
58
|
+
Additionally, if `global.disableWidgetStateDuplicationWarning` is False a warning is
|
59
|
+
shown when a widget has a default value but its value is also set via session state.
|
54
60
|
|
55
61
|
Raises
|
56
62
|
------
|
@@ -68,7 +74,8 @@ def check_session_state_rules(
|
|
68
74
|
|
69
75
|
if not writes_allowed:
|
70
76
|
raise StreamlitAPIException(
|
71
|
-
f
|
77
|
+
f"Values for the widget with key '{key}' cannot be set using"
|
78
|
+
" `st.session_state`."
|
72
79
|
)
|
73
80
|
|
74
81
|
if (
|
@@ -123,6 +130,15 @@ _fragment_writes_widget_to_outside_error = (
|
|
123
130
|
|
124
131
|
|
125
132
|
def check_fragment_path_policy(dg: DeltaGenerator):
|
133
|
+
"""Ensures that the current widget is not written outside of the
|
134
|
+
fragment's delta path.
|
135
|
+
|
136
|
+
Should be called by ever element that acts as a widget.
|
137
|
+
We don't allow writing widgets from within a widget to the outside path
|
138
|
+
because it can lead to unexpected behavior. For elements, this is okay
|
139
|
+
because they do not trigger a re-run.
|
140
|
+
"""
|
141
|
+
|
126
142
|
ctx = get_script_run_ctx()
|
127
143
|
# Check is only relevant for fragments
|
128
144
|
if ctx is None or ctx.current_fragment_id is None:
|
@@ -135,11 +151,47 @@ def check_fragment_path_policy(dg: DeltaGenerator):
|
|
135
151
|
|
136
152
|
current_cursor_delta_path = current_cursor.delta_path
|
137
153
|
|
138
|
-
# the elements delta path cannot be smaller than the fragment's delta path if it is
|
154
|
+
# the elements delta path cannot be smaller than the fragment's delta path if it is
|
155
|
+
# inside of the fragment
|
139
156
|
if len(current_cursor_delta_path) < len(current_fragment_delta_path):
|
140
157
|
raise StreamlitAPIException(_fragment_writes_widget_to_outside_error)
|
141
158
|
|
142
|
-
# all path indices of the fragment-path must occur in the inner-elements delta path,
|
159
|
+
# all path indices of the fragment-path must occur in the inner-elements delta path,
|
160
|
+
# otherwise it is outside of the fragment container
|
143
161
|
for index, path_index in enumerate(current_fragment_delta_path):
|
144
162
|
if current_cursor_delta_path[index] != path_index:
|
145
163
|
raise StreamlitAPIException(_fragment_writes_widget_to_outside_error)
|
164
|
+
|
165
|
+
|
166
|
+
def check_widget_policies(
|
167
|
+
dg: DeltaGenerator,
|
168
|
+
key: str | None,
|
169
|
+
on_change: WidgetCallback | None = None,
|
170
|
+
*,
|
171
|
+
default_value: Sequence[Any] | Any | None = None,
|
172
|
+
writes_allowed: bool = True,
|
173
|
+
enable_check_callback_rules: bool = True,
|
174
|
+
):
|
175
|
+
"""Check all widget policies for the given DeltaGenerator."""
|
176
|
+
check_fragment_path_policy(dg)
|
177
|
+
check_cache_replay_rules()
|
178
|
+
if enable_check_callback_rules:
|
179
|
+
check_callback_rules(dg, on_change)
|
180
|
+
check_session_state_rules(
|
181
|
+
default_value=default_value, key=key, writes_allowed=writes_allowed
|
182
|
+
)
|
183
|
+
|
184
|
+
|
185
|
+
def maybe_raise_label_warnings(label: str | None, label_visibility: str | None):
|
186
|
+
if not label:
|
187
|
+
_LOGGER.warning(
|
188
|
+
"`label` got an empty value. This is discouraged for accessibility "
|
189
|
+
"reasons and may be disallowed in the future by raising an exception. "
|
190
|
+
"Please provide a non-empty label and hide it with label_visibility "
|
191
|
+
"if needed."
|
192
|
+
)
|
193
|
+
if label_visibility not in ("visible", "hidden", "collapsed"):
|
194
|
+
raise errors.StreamlitAPIException(
|
195
|
+
f"Unsupported label_visibility option '{label_visibility}'. "
|
196
|
+
f"Valid values are 'visible', 'hidden' or 'collapsed'."
|
197
|
+
)
|
streamlit/elements/lib/utils.py
CHANGED
@@ -15,9 +15,21 @@
|
|
15
15
|
from __future__ import annotations
|
16
16
|
|
17
17
|
from enum import Enum, EnumMeta
|
18
|
-
from typing import
|
19
|
-
|
20
|
-
|
18
|
+
from typing import (
|
19
|
+
TYPE_CHECKING,
|
20
|
+
Any,
|
21
|
+
Final,
|
22
|
+
Iterable,
|
23
|
+
Literal,
|
24
|
+
Sequence,
|
25
|
+
TypeVar,
|
26
|
+
Union,
|
27
|
+
overload,
|
28
|
+
)
|
29
|
+
|
30
|
+
from typing_extensions import TypeAlias
|
31
|
+
|
32
|
+
from streamlit import config, dataframe_util, errors, logger
|
21
33
|
from streamlit.proto.LabelVisibilityMessage_pb2 import LabelVisibilityMessage
|
22
34
|
from streamlit.runtime.state.common import RegisterWidgetResult
|
23
35
|
|
@@ -25,8 +37,15 @@ if TYPE_CHECKING:
|
|
25
37
|
from streamlit.type_util import T
|
26
38
|
|
27
39
|
|
40
|
+
_LOGGER: Final = logger.get_logger(__name__)
|
41
|
+
|
42
|
+
Key: TypeAlias = Union[str, int]
|
43
|
+
|
44
|
+
LabelVisibility: TypeAlias = Literal["visible", "hidden", "collapsed"]
|
45
|
+
|
46
|
+
|
28
47
|
def get_label_visibility_proto_value(
|
29
|
-
label_visibility_string:
|
48
|
+
label_visibility_string: LabelVisibility,
|
30
49
|
) -> LabelVisibilityMessage.LabelVisibilityOptions.ValueType:
|
31
50
|
"""Returns one of LabelVisibilityMessage enum constants.py based on string value."""
|
32
51
|
|
@@ -40,6 +59,78 @@ def get_label_visibility_proto_value(
|
|
40
59
|
raise ValueError(f"Unknown label visibility value: {label_visibility_string}")
|
41
60
|
|
42
61
|
|
62
|
+
@overload
|
63
|
+
def to_key(key: None) -> None: ...
|
64
|
+
|
65
|
+
|
66
|
+
@overload
|
67
|
+
def to_key(key: Key) -> str: ...
|
68
|
+
|
69
|
+
|
70
|
+
def to_key(key: Key | None) -> str | None:
|
71
|
+
return None if key is None else str(key)
|
72
|
+
|
73
|
+
|
74
|
+
E1 = TypeVar("E1", bound=Enum)
|
75
|
+
E2 = TypeVar("E2", bound=Enum)
|
76
|
+
|
77
|
+
_ALLOWED_ENUM_COERCION_CONFIG_SETTINGS = ("off", "nameOnly", "nameAndValue")
|
78
|
+
|
79
|
+
|
80
|
+
def _coerce_enum(from_enum_value: E1, to_enum_class: type[E2]) -> E1 | E2:
|
81
|
+
"""Attempt to coerce an Enum value to another EnumMeta.
|
82
|
+
|
83
|
+
An Enum value of EnumMeta E1 is considered coercable to EnumType E2
|
84
|
+
if the EnumMeta __qualname__ match and the names of their members
|
85
|
+
match as well. (This is configurable in streamlist configs)
|
86
|
+
"""
|
87
|
+
if not isinstance(from_enum_value, Enum):
|
88
|
+
raise ValueError(
|
89
|
+
f"Expected an Enum in the first argument. Got {type(from_enum_value)}"
|
90
|
+
)
|
91
|
+
if not isinstance(to_enum_class, EnumMeta):
|
92
|
+
raise ValueError(
|
93
|
+
f"Expected an EnumMeta/Type in the second argument. Got {type(to_enum_class)}"
|
94
|
+
)
|
95
|
+
if isinstance(from_enum_value, to_enum_class):
|
96
|
+
return from_enum_value # Enum is already a member, no coersion necessary
|
97
|
+
|
98
|
+
coercion_type = config.get_option("runner.enumCoercion")
|
99
|
+
if coercion_type not in _ALLOWED_ENUM_COERCION_CONFIG_SETTINGS:
|
100
|
+
raise errors.StreamlitAPIException(
|
101
|
+
"Invalid value for config option runner.enumCoercion. "
|
102
|
+
f"Expected one of {_ALLOWED_ENUM_COERCION_CONFIG_SETTINGS}, "
|
103
|
+
f"but got '{coercion_type}'."
|
104
|
+
)
|
105
|
+
if coercion_type == "off":
|
106
|
+
return from_enum_value # do not attempt to coerce
|
107
|
+
|
108
|
+
# We now know this is an Enum AND the user has configured coercion enabled.
|
109
|
+
# Check if we do NOT meet the required conditions and log a failure message
|
110
|
+
# if that is the case.
|
111
|
+
from_enum_class = from_enum_value.__class__
|
112
|
+
if (
|
113
|
+
from_enum_class.__qualname__ != to_enum_class.__qualname__
|
114
|
+
or (
|
115
|
+
coercion_type == "nameOnly"
|
116
|
+
and set(to_enum_class._member_names_) != set(from_enum_class._member_names_)
|
117
|
+
)
|
118
|
+
or (
|
119
|
+
coercion_type == "nameAndValue"
|
120
|
+
and set(to_enum_class._value2member_map_)
|
121
|
+
!= set(from_enum_class._value2member_map_)
|
122
|
+
)
|
123
|
+
):
|
124
|
+
_LOGGER.debug("Failed to coerce %s to class %s", from_enum_value, to_enum_class)
|
125
|
+
return from_enum_value # do not attempt to coerce
|
126
|
+
|
127
|
+
# At this point we think the Enum is coercable, and we know
|
128
|
+
# E1 and E2 have the same member names. We convert from E1 to E2 using _name_
|
129
|
+
# (since user Enum subclasses can override the .name property in 3.11)
|
130
|
+
_LOGGER.debug("Coerced %s to class %s", from_enum_value, to_enum_class)
|
131
|
+
return to_enum_class[from_enum_value._name_]
|
132
|
+
|
133
|
+
|
43
134
|
@overload
|
44
135
|
def maybe_coerce_enum(
|
45
136
|
register_widget_result: RegisterWidgetResult[Enum],
|
@@ -51,7 +142,7 @@ def maybe_coerce_enum(
|
|
51
142
|
@overload
|
52
143
|
def maybe_coerce_enum(
|
53
144
|
register_widget_result: RegisterWidgetResult[T],
|
54
|
-
options:
|
145
|
+
options: dataframe_util.OptionSequence[T],
|
55
146
|
opt_sequence: Sequence[T],
|
56
147
|
) -> RegisterWidgetResult[T]: ...
|
57
148
|
|
@@ -74,7 +165,7 @@ def maybe_coerce_enum(register_widget_result, options, opt_sequence):
|
|
74
165
|
return register_widget_result
|
75
166
|
|
76
167
|
return RegisterWidgetResult(
|
77
|
-
|
168
|
+
_coerce_enum(register_widget_result.value, coerce_class),
|
78
169
|
register_widget_result.value_changed,
|
79
170
|
)
|
80
171
|
|
@@ -84,7 +175,7 @@ def maybe_coerce_enum(register_widget_result, options, opt_sequence):
|
|
84
175
|
@overload
|
85
176
|
def maybe_coerce_enum_sequence(
|
86
177
|
register_widget_result: RegisterWidgetResult[list[T]],
|
87
|
-
options:
|
178
|
+
options: dataframe_util.OptionSequence[T],
|
88
179
|
opt_sequence: Sequence[T],
|
89
180
|
) -> RegisterWidgetResult[list[T]]: ...
|
90
181
|
|
@@ -92,7 +183,7 @@ def maybe_coerce_enum_sequence(
|
|
92
183
|
@overload
|
93
184
|
def maybe_coerce_enum_sequence(
|
94
185
|
register_widget_result: RegisterWidgetResult[tuple[T, T]],
|
95
|
-
options:
|
186
|
+
options: dataframe_util.OptionSequence[T],
|
96
187
|
opt_sequence: Sequence[T],
|
97
188
|
) -> RegisterWidgetResult[tuple[T, T]]: ...
|
98
189
|
|
@@ -118,8 +209,7 @@ def maybe_coerce_enum_sequence(register_widget_result, options, opt_sequence):
|
|
118
209
|
# Return a new RegisterWidgetResult with the coerced enum values sequence
|
119
210
|
return RegisterWidgetResult(
|
120
211
|
type(register_widget_result.value)(
|
121
|
-
|
122
|
-
for val in register_widget_result.value
|
212
|
+
_coerce_enum(val, coerce_class) for val in register_widget_result.value
|
123
213
|
),
|
124
214
|
register_widget_result.value_changed,
|
125
215
|
)
|