streamlit-nightly 1.37.2.dev20240806__py2.py3-none-any.whl → 1.37.2.dev20240808__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- streamlit/dataframe_util.py +153 -34
- streamlit/elements/lib/built_in_chart_utils.py +3 -7
- streamlit/elements/lib/options_selector_utils.py +3 -19
- streamlit/elements/write.py +13 -3
- streamlit/hello/Animation_Demo.py +1 -1
- streamlit/hello/Dataframe_Demo.py +1 -1
- streamlit/hello/Hello.py +1 -1
- streamlit/hello/Mapping_Demo.py +1 -1
- streamlit/hello/Plotting_Demo.py +1 -1
- streamlit/hello/streamlit_app.py +5 -5
- streamlit/runtime/metrics_util.py +5 -1
- streamlit/static/asset-manifest.json +3 -3
- streamlit/static/index.html +1 -1
- streamlit/static/static/js/8148.539ddabe.chunk.js +1 -0
- streamlit/static/static/js/{main.5d7956c8.js → main.80efcd23.js} +2 -2
- streamlit/type_util.py +29 -8
- {streamlit_nightly-1.37.2.dev20240806.dist-info → streamlit_nightly-1.37.2.dev20240808.dist-info}/METADATA +2 -2
- {streamlit_nightly-1.37.2.dev20240806.dist-info → streamlit_nightly-1.37.2.dev20240808.dist-info}/RECORD +23 -23
- streamlit/static/static/js/8148.89abd285.chunk.js +0 -1
- /streamlit/static/static/js/{main.5d7956c8.js.LICENSE.txt → main.80efcd23.js.LICENSE.txt} +0 -0
- {streamlit_nightly-1.37.2.dev20240806.data → streamlit_nightly-1.37.2.dev20240808.data}/scripts/streamlit.cmd +0 -0
- {streamlit_nightly-1.37.2.dev20240806.dist-info → streamlit_nightly-1.37.2.dev20240808.dist-info}/WHEEL +0 -0
- {streamlit_nightly-1.37.2.dev20240806.dist-info → streamlit_nightly-1.37.2.dev20240808.dist-info}/entry_points.txt +0 -0
- {streamlit_nightly-1.37.2.dev20240806.dist-info → streamlit_nightly-1.37.2.dev20240808.dist-info}/top_level.txt +0 -0
streamlit/dataframe_util.py
CHANGED
@@ -21,8 +21,8 @@ import dataclasses
|
|
21
21
|
import inspect
|
22
22
|
import math
|
23
23
|
import re
|
24
|
-
from collections import ChainMap, UserDict, deque
|
25
|
-
from collections.abc import ItemsView,
|
24
|
+
from collections import ChainMap, UserDict, UserList, deque
|
25
|
+
from collections.abc import ItemsView, Mapping
|
26
26
|
from enum import Enum, EnumMeta, auto
|
27
27
|
from types import MappingProxyType
|
28
28
|
from typing import (
|
@@ -31,8 +31,8 @@ from typing import (
|
|
31
31
|
Dict,
|
32
32
|
Final,
|
33
33
|
Iterable,
|
34
|
+
List,
|
34
35
|
Protocol,
|
35
|
-
Sequence,
|
36
36
|
TypeVar,
|
37
37
|
Union,
|
38
38
|
cast,
|
@@ -45,6 +45,7 @@ from streamlit.type_util import (
|
|
45
45
|
has_callable_attr,
|
46
46
|
is_custom_dict,
|
47
47
|
is_dataclass_instance,
|
48
|
+
is_list_like,
|
48
49
|
is_namedtuple,
|
49
50
|
is_type,
|
50
51
|
)
|
@@ -64,6 +65,8 @@ _MAX_UNEVALUATED_DF_ROWS = 10000
|
|
64
65
|
|
65
66
|
_PANDAS_DATA_OBJECT_TYPE_RE: Final = re.compile(r"^pandas.*$")
|
66
67
|
_PANDAS_STYLER_TYPE_STR: Final = "pandas.io.formats.style.Styler"
|
68
|
+
_XARRAY_DATA_ARRAY_TYPE_STR: Final = "xarray.core.dataarray.DataArray"
|
69
|
+
_XARRAY_DATASET_TYPE_STR: Final = "xarray.core.dataset.Dataset"
|
67
70
|
_SNOWPARK_DF_TYPE_STR: Final = "snowflake.snowpark.dataframe.DataFrame"
|
68
71
|
_SNOWPARK_DF_ROW_TYPE_STR: Final = "snowflake.snowpark.row.Row"
|
69
72
|
_SNOWPARK_TABLE_TYPE_STR: Final = "snowflake.snowpark.table.Table"
|
@@ -72,6 +75,12 @@ _MODIN_DF_TYPE_STR: Final = "modin.pandas.dataframe.DataFrame"
|
|
72
75
|
_MODIN_SERIES_TYPE_STR: Final = "modin.pandas.series.Series"
|
73
76
|
_SNOWPANDAS_DF_TYPE_STR: Final = "snowflake.snowpark.modin.pandas.dataframe.DataFrame"
|
74
77
|
_SNOWPANDAS_SERIES_TYPE_STR: Final = "snowflake.snowpark.modin.pandas.series.Series"
|
78
|
+
_SNOWPANDAS_INDEX_TYPE_STR: Final = (
|
79
|
+
"snowflake.snowpark.modin.plugin.extensions.index.Index"
|
80
|
+
)
|
81
|
+
_POLARS_DATAFRAME: Final = "polars.dataframe.frame.DataFrame"
|
82
|
+
_POLARS_SERIES: Final = "polars.series.series.Series"
|
83
|
+
_POLARS_LAZYFRAME: Final = "polars.lazyframe.frame.LazyFrame"
|
75
84
|
|
76
85
|
V_co = TypeVar(
|
77
86
|
"V_co",
|
@@ -132,6 +141,11 @@ class DataFormat(Enum):
|
|
132
141
|
MODIN_OBJECT = auto() # Modin DataFrame, Series
|
133
142
|
SNOWPANDAS_OBJECT = auto() # Snowpandas DataFrame, Series
|
134
143
|
PANDAS_STYLER = auto() # pandas Styler
|
144
|
+
POLARS_DATAFRAME = auto() # polars.dataframe.frame.DataFrame
|
145
|
+
POLARS_LAZYFRAME = auto() # polars.lazyframe.frame.LazyFrame
|
146
|
+
POLARS_SERIES = auto() # polars.series.series.Series
|
147
|
+
XARRAY_DATASET = auto() # xarray.Dataset
|
148
|
+
XARRAY_DATA_ARRAY = auto() # xarray.DataArray
|
135
149
|
LIST_OF_RECORDS = auto() # List[Dict[str, Scalar]]
|
136
150
|
LIST_OF_ROWS = auto() # List[List[Scalar]]
|
137
151
|
LIST_OF_VALUES = auto() # List[Scalar]
|
@@ -170,6 +184,11 @@ def is_dataframe_like(obj: object) -> bool:
|
|
170
184
|
DataFormat.PYSPARK_OBJECT,
|
171
185
|
DataFormat.MODIN_OBJECT,
|
172
186
|
DataFormat.SNOWPANDAS_OBJECT,
|
187
|
+
DataFormat.POLARS_SERIES,
|
188
|
+
DataFormat.POLARS_DATAFRAME,
|
189
|
+
DataFormat.POLARS_LAZYFRAME,
|
190
|
+
DataFormat.XARRAY_DATASET,
|
191
|
+
DataFormat.XARRAY_DATA_ARRAY,
|
173
192
|
DataFormat.COLUMN_SERIES_MAPPING,
|
174
193
|
]
|
175
194
|
|
@@ -181,7 +200,8 @@ def is_unevaluated_data_object(obj: object) -> bool:
|
|
181
200
|
- Snowpark DataFrame / Table
|
182
201
|
- PySpark DataFrame
|
183
202
|
- Modin DataFrame / Series
|
184
|
-
- Snowpandas DataFrame / Series
|
203
|
+
- Snowpandas DataFrame / Series / Index
|
204
|
+
- Polars LazyFrame
|
185
205
|
- Generator functions
|
186
206
|
|
187
207
|
Unevaluated means that the data is not yet in the local memory.
|
@@ -193,6 +213,7 @@ def is_unevaluated_data_object(obj: object) -> bool:
|
|
193
213
|
or is_pyspark_data_object(obj)
|
194
214
|
or is_snowpandas_data_object(obj)
|
195
215
|
or is_modin_data_object(obj)
|
216
|
+
or is_polars_lazyframe(obj)
|
196
217
|
or inspect.isgeneratorfunction(obj)
|
197
218
|
)
|
198
219
|
|
@@ -233,11 +254,38 @@ def is_modin_data_object(obj: object) -> bool:
|
|
233
254
|
|
234
255
|
def is_snowpandas_data_object(obj: object) -> bool:
|
235
256
|
"""True if obj is a Snowpark Pandas DataFrame or Series."""
|
236
|
-
return
|
237
|
-
obj,
|
257
|
+
return (
|
258
|
+
is_type(obj, _SNOWPANDAS_DF_TYPE_STR)
|
259
|
+
or is_type(obj, _SNOWPANDAS_SERIES_TYPE_STR)
|
260
|
+
or is_type(obj, _SNOWPANDAS_INDEX_TYPE_STR)
|
238
261
|
)
|
239
262
|
|
240
263
|
|
264
|
+
def is_polars_dataframe(obj: object) -> bool:
|
265
|
+
"""True if obj is a Polars Dataframe."""
|
266
|
+
return is_type(obj, _POLARS_DATAFRAME)
|
267
|
+
|
268
|
+
|
269
|
+
def is_xarray_dataset(obj: object) -> bool:
|
270
|
+
"""True if obj is a Xarray Dataset."""
|
271
|
+
return is_type(obj, _XARRAY_DATASET_TYPE_STR)
|
272
|
+
|
273
|
+
|
274
|
+
def is_xarray_data_array(obj: object) -> bool:
|
275
|
+
"""True if obj is a Xarray DataArray."""
|
276
|
+
return is_type(obj, _XARRAY_DATA_ARRAY_TYPE_STR)
|
277
|
+
|
278
|
+
|
279
|
+
def is_polars_series(obj: object) -> bool:
|
280
|
+
"""True if obj is a Polars Series."""
|
281
|
+
return is_type(obj, _POLARS_SERIES)
|
282
|
+
|
283
|
+
|
284
|
+
def is_polars_lazyframe(obj: object) -> bool:
|
285
|
+
"""True if obj is a Polars Lazyframe."""
|
286
|
+
return is_type(obj, _POLARS_LAZYFRAME)
|
287
|
+
|
288
|
+
|
241
289
|
def is_pandas_styler(obj: object) -> TypeGuard[Styler]:
|
242
290
|
"""True if obj is a pandas Styler."""
|
243
291
|
return is_type(obj, _PANDAS_STYLER_TYPE_STR)
|
@@ -361,6 +409,33 @@ def convert_anything_to_pandas_df(
|
|
361
409
|
else _fix_column_naming(pd.DataFrame(data))
|
362
410
|
)
|
363
411
|
|
412
|
+
if is_polars_dataframe(data):
|
413
|
+
data = data.clone() if ensure_copy else data
|
414
|
+
return data.to_pandas()
|
415
|
+
|
416
|
+
if is_polars_series(data):
|
417
|
+
data = data.clone() if ensure_copy else data
|
418
|
+
return data.to_pandas().to_frame()
|
419
|
+
|
420
|
+
if is_polars_lazyframe(data):
|
421
|
+
data = data.limit(max_unevaluated_rows).collect().to_pandas()
|
422
|
+
if data.shape[0] == max_unevaluated_rows:
|
423
|
+
_show_data_information(
|
424
|
+
f"⚠️ Showing only {string_util.simplify_number(max_unevaluated_rows)} "
|
425
|
+
"rows. Call `collect()` on the dataframe to show more."
|
426
|
+
)
|
427
|
+
return cast(pd.DataFrame, data)
|
428
|
+
|
429
|
+
if is_xarray_dataset(data):
|
430
|
+
if ensure_copy:
|
431
|
+
data = data.copy(deep=True)
|
432
|
+
return data.to_dataframe()
|
433
|
+
|
434
|
+
if is_xarray_data_array(data):
|
435
|
+
if ensure_copy:
|
436
|
+
data = data.copy(deep=True)
|
437
|
+
return data.to_series().to_frame()
|
438
|
+
|
364
439
|
if is_modin_data_object(data):
|
365
440
|
data = data.head(max_unevaluated_rows)._to_pandas()
|
366
441
|
|
@@ -429,7 +504,7 @@ def convert_anything_to_pandas_df(
|
|
429
504
|
return _fix_column_naming(pd.DataFrame([c.value for c in data])) # type: ignore
|
430
505
|
|
431
506
|
# Support for some list like objects
|
432
|
-
if isinstance(data, (deque, map, array.ArrayType)):
|
507
|
+
if isinstance(data, (deque, map, array.ArrayType, UserList)):
|
433
508
|
return _fix_column_naming(pd.DataFrame(list(data)))
|
434
509
|
|
435
510
|
# Support for Streamlit's custom dict-like objects
|
@@ -601,42 +676,56 @@ def convert_anything_to_arrow_bytes(
|
|
601
676
|
df = convert_anything_to_pandas_df(data, max_unevaluated_rows)
|
602
677
|
return convert_pandas_df_to_arrow_bytes(df)
|
603
678
|
|
679
|
+
if is_polars_dataframe(data):
|
680
|
+
return convert_arrow_table_to_arrow_bytes(data.to_arrow())
|
681
|
+
|
682
|
+
if is_polars_series(data):
|
683
|
+
return convert_arrow_table_to_arrow_bytes(data.to_frame().to_arrow())
|
684
|
+
|
604
685
|
# Fallback: try to convert to pandas DataFrame
|
605
686
|
# and then to Arrow bytes.
|
606
687
|
df = convert_anything_to_pandas_df(data, max_unevaluated_rows)
|
607
688
|
return convert_pandas_df_to_arrow_bytes(df)
|
608
689
|
|
609
690
|
|
610
|
-
def convert_anything_to_sequence(obj: OptionSequence[V_co]) ->
|
611
|
-
"""Try to convert different formats to
|
691
|
+
def convert_anything_to_sequence(obj: OptionSequence[V_co]) -> list[V_co]:
|
692
|
+
"""Try to convert different formats to a list.
|
612
693
|
|
613
694
|
If the input is a dataframe-like object, we just select the first
|
614
|
-
column to iterate over.
|
615
|
-
|
695
|
+
column to iterate over. Non sequence-like objects and scalar types,
|
696
|
+
will just be wrapped into a list.
|
616
697
|
|
617
698
|
Parameters
|
618
699
|
----------
|
700
|
+
|
619
701
|
obj : OptionSequence
|
620
|
-
The object to convert to a
|
702
|
+
The object to convert to a list.
|
621
703
|
|
622
704
|
Returns
|
623
705
|
-------
|
624
|
-
|
625
|
-
The converted
|
706
|
+
list
|
707
|
+
The converted list.
|
626
708
|
"""
|
627
709
|
if obj is None:
|
628
710
|
return [] # type: ignore
|
629
711
|
|
630
|
-
if isinstance(
|
631
|
-
|
632
|
-
|
712
|
+
if isinstance(obj, (str, int, float, bool)):
|
713
|
+
# Wrap basic objects into a list
|
714
|
+
return [obj]
|
715
|
+
|
716
|
+
if isinstance(obj, EnumMeta):
|
717
|
+
# Support for enum classes. For string enums, we return the string value
|
718
|
+
# of the enum members. For other enums, we just return the enum member.
|
719
|
+
return [member.value if isinstance(member, str) else member for member in obj] # type: ignore
|
720
|
+
|
721
|
+
if isinstance(obj, Mapping):
|
722
|
+
return list(obj.keys())
|
723
|
+
|
724
|
+
if is_list_like(obj) and not is_snowpark_row_list(obj):
|
633
725
|
# This also ensures that the sequence is copied to prevent
|
634
726
|
# potential mutations to the original object.
|
635
727
|
return list(obj)
|
636
728
|
|
637
|
-
if isinstance(obj, dict):
|
638
|
-
return list(obj.keys())
|
639
|
-
|
640
729
|
# Fallback to our DataFrame conversion logic:
|
641
730
|
try:
|
642
731
|
# We use ensure_copy here because the return value of this function is
|
@@ -647,13 +736,13 @@ def convert_anything_to_sequence(obj: OptionSequence[V_co]) -> Sequence[V_co]:
|
|
647
736
|
data_df = convert_anything_to_pandas_df(obj, ensure_copy=True)
|
648
737
|
# Return first column as a list:
|
649
738
|
return (
|
650
|
-
[]
|
739
|
+
[]
|
740
|
+
if data_df.empty
|
741
|
+
else cast(List[V_co], list(data_df.iloc[:, 0].to_list()))
|
651
742
|
)
|
652
|
-
except errors.StreamlitAPIException
|
653
|
-
|
654
|
-
|
655
|
-
f"Object type: {type(obj)}"
|
656
|
-
) from e
|
743
|
+
except errors.StreamlitAPIException:
|
744
|
+
# Wrap the object into a list
|
745
|
+
return [obj] # type: ignore
|
657
746
|
|
658
747
|
|
659
748
|
def _maybe_truncate_table(
|
@@ -853,7 +942,6 @@ def determine_data_format(input_data: Any) -> DataFormat:
|
|
853
942
|
DataFormat
|
854
943
|
The data format of the input data.
|
855
944
|
"""
|
856
|
-
import array
|
857
945
|
|
858
946
|
import numpy as np
|
859
947
|
import pandas as pd
|
@@ -881,20 +969,29 @@ def determine_data_format(input_data: Any) -> DataFormat:
|
|
881
969
|
return DataFormat.PANDAS_STYLER
|
882
970
|
elif isinstance(input_data, pd.api.extensions.ExtensionArray):
|
883
971
|
return DataFormat.PANDAS_ARRAY
|
972
|
+
elif is_polars_series(input_data):
|
973
|
+
return DataFormat.POLARS_SERIES
|
974
|
+
elif is_polars_dataframe(input_data):
|
975
|
+
return DataFormat.POLARS_DATAFRAME
|
976
|
+
elif is_polars_lazyframe(input_data):
|
977
|
+
return DataFormat.POLARS_LAZYFRAME
|
884
978
|
elif is_modin_data_object(input_data):
|
885
979
|
return DataFormat.MODIN_OBJECT
|
886
980
|
elif is_snowpandas_data_object(input_data):
|
887
981
|
return DataFormat.SNOWPANDAS_OBJECT
|
888
982
|
elif is_pyspark_data_object(input_data):
|
889
983
|
return DataFormat.PYSPARK_OBJECT
|
984
|
+
elif is_xarray_dataset(input_data):
|
985
|
+
return DataFormat.XARRAY_DATASET
|
986
|
+
elif is_xarray_data_array(input_data):
|
987
|
+
return DataFormat.XARRAY_DATA_ARRAY
|
890
988
|
elif is_snowpark_data_object(input_data) or is_snowpark_row_list(input_data):
|
891
989
|
return DataFormat.SNOWPARK_OBJECT
|
892
|
-
elif isinstance(
|
893
|
-
input_data, (range, EnumMeta, KeysView, ValuesView, deque, map, array.ArrayType)
|
894
|
-
):
|
895
|
-
return DataFormat.LIST_OF_VALUES
|
896
990
|
elif (
|
897
|
-
isinstance(
|
991
|
+
isinstance(
|
992
|
+
input_data,
|
993
|
+
(ChainMap, UserDict, MappingProxyType),
|
994
|
+
)
|
898
995
|
or is_dataclass_instance(input_data)
|
899
996
|
or is_namedtuple(input_data)
|
900
997
|
or is_custom_dict(input_data)
|
@@ -919,7 +1016,7 @@ def determine_data_format(input_data: Any) -> DataFormat:
|
|
919
1016
|
return DataFormat.LIST_OF_RECORDS
|
920
1017
|
if isinstance(first_element, (list, tuple, set, frozenset)):
|
921
1018
|
return DataFormat.LIST_OF_ROWS
|
922
|
-
elif isinstance(input_data, dict):
|
1019
|
+
elif isinstance(input_data, (dict, Mapping)):
|
923
1020
|
if not input_data:
|
924
1021
|
return DataFormat.KEY_VALUE_DICT
|
925
1022
|
if len(input_data) > 0:
|
@@ -934,6 +1031,9 @@ def determine_data_format(input_data: Any) -> DataFormat:
|
|
934
1031
|
# Use key-value dict as fallback. However, if the values of the dict
|
935
1032
|
# contains mixed types, it will become non-editable in the frontend.
|
936
1033
|
return DataFormat.KEY_VALUE_DICT
|
1034
|
+
elif is_list_like(input_data):
|
1035
|
+
return DataFormat.LIST_OF_VALUES
|
1036
|
+
|
937
1037
|
return DataFormat.UNKNOWN
|
938
1038
|
|
939
1039
|
|
@@ -991,7 +1091,7 @@ def convert_pandas_df_to_data_format(
|
|
991
1091
|
|
992
1092
|
Returns
|
993
1093
|
-------
|
994
|
-
pd.DataFrame, pd.Series, pyarrow.Table, np.ndarray, list, set, tuple, or dict.
|
1094
|
+
pd.DataFrame, pd.Series, pyarrow.Table, np.ndarray, xarray.Dataset, xarray.DataArray, polars.Dataframe, polars.Series, list, set, tuple, or dict.
|
995
1095
|
The converted dataframe.
|
996
1096
|
"""
|
997
1097
|
|
@@ -1029,6 +1129,25 @@ def convert_pandas_df_to_data_format(
|
|
1029
1129
|
return pa.Array.from_pandas(_pandas_df_to_series(df))
|
1030
1130
|
elif data_format == DataFormat.PANDAS_SERIES:
|
1031
1131
|
return _pandas_df_to_series(df)
|
1132
|
+
elif (
|
1133
|
+
data_format == DataFormat.POLARS_DATAFRAME
|
1134
|
+
or data_format == DataFormat.POLARS_LAZYFRAME
|
1135
|
+
):
|
1136
|
+
import polars as pl
|
1137
|
+
|
1138
|
+
return pl.from_pandas(df)
|
1139
|
+
elif data_format == DataFormat.POLARS_SERIES:
|
1140
|
+
import polars as pl
|
1141
|
+
|
1142
|
+
return pl.from_pandas(_pandas_df_to_series(df))
|
1143
|
+
elif data_format == DataFormat.XARRAY_DATASET:
|
1144
|
+
import xarray as xr
|
1145
|
+
|
1146
|
+
return xr.Dataset.from_dataframe(df)
|
1147
|
+
elif data_format == DataFormat.XARRAY_DATA_ARRAY:
|
1148
|
+
import xarray as xr
|
1149
|
+
|
1150
|
+
return xr.DataArray.from_series(_pandas_df_to_series(df))
|
1032
1151
|
elif data_format == DataFormat.LIST_OF_RECORDS:
|
1033
1152
|
return _unify_missing_values(df).to_dict(orient="records")
|
1034
1153
|
elif data_format == DataFormat.LIST_OF_ROWS:
|
@@ -641,14 +641,10 @@ def _parse_y_columns(
|
|
641
641
|
elif isinstance(y_from_user, str):
|
642
642
|
y_column_list = [y_from_user]
|
643
643
|
|
644
|
-
elif type_util.is_sequence(y_from_user):
|
645
|
-
y_column_list = [str(col) for col in y_from_user]
|
646
|
-
|
647
644
|
else:
|
648
|
-
|
649
|
-
|
650
|
-
|
651
|
-
)
|
645
|
+
y_column_list = [
|
646
|
+
str(col) for col in dataframe_util.convert_anything_to_sequence(y_from_user)
|
647
|
+
]
|
652
648
|
|
653
649
|
for col in y_column_list:
|
654
650
|
if col not in df.columns:
|
@@ -17,7 +17,6 @@ from __future__ import annotations
|
|
17
17
|
from typing import (
|
18
18
|
Any,
|
19
19
|
Sequence,
|
20
|
-
cast,
|
21
20
|
)
|
22
21
|
|
23
22
|
from streamlit.dataframe_util import OptionSequence, convert_anything_to_sequence
|
@@ -25,7 +24,6 @@ from streamlit.errors import StreamlitAPIException
|
|
25
24
|
from streamlit.type_util import (
|
26
25
|
T,
|
27
26
|
check_python_comparable,
|
28
|
-
is_type,
|
29
27
|
)
|
30
28
|
|
31
29
|
|
@@ -33,25 +31,11 @@ def check_and_convert_to_indices(
|
|
33
31
|
opt: Sequence[Any], default_values: Sequence[Any] | Any | None
|
34
32
|
) -> list[int] | None:
|
35
33
|
"""Perform validation checks and return indices based on the default values."""
|
36
|
-
if default_values is None
|
34
|
+
if default_values is None:
|
37
35
|
return None
|
38
36
|
|
39
|
-
|
40
|
-
|
41
|
-
# right below) when x is of type pd.Series() or np.array() throws a
|
42
|
-
# ValueError exception.
|
43
|
-
if is_type(default_values, "numpy.ndarray") or is_type(
|
44
|
-
default_values, "pandas.core.series.Series"
|
45
|
-
):
|
46
|
-
default_values = list(cast(Sequence[Any], default_values))
|
47
|
-
elif (
|
48
|
-
isinstance(default_values, (tuple, set))
|
49
|
-
or default_values
|
50
|
-
and default_values not in opt
|
51
|
-
):
|
52
|
-
default_values = list(default_values)
|
53
|
-
else:
|
54
|
-
default_values = [default_values]
|
37
|
+
default_values = convert_anything_to_sequence(default_values)
|
38
|
+
|
55
39
|
for value in default_values:
|
56
40
|
if value not in opt:
|
57
41
|
raise StreamlitAPIException(
|
streamlit/elements/write.py
CHANGED
@@ -17,9 +17,18 @@ from __future__ import annotations
|
|
17
17
|
import dataclasses
|
18
18
|
import inspect
|
19
19
|
import types
|
20
|
-
from collections import ChainMap, UserDict
|
20
|
+
from collections import ChainMap, UserDict, UserList
|
21
21
|
from io import StringIO
|
22
|
-
from typing import
|
22
|
+
from typing import (
|
23
|
+
TYPE_CHECKING,
|
24
|
+
Any,
|
25
|
+
Callable,
|
26
|
+
Final,
|
27
|
+
Generator,
|
28
|
+
Iterable,
|
29
|
+
List,
|
30
|
+
cast,
|
31
|
+
)
|
23
32
|
|
24
33
|
from streamlit import dataframe_util, type_util
|
25
34
|
from streamlit.errors import StreamlitAPIException
|
@@ -165,7 +174,7 @@ class WriteMixin:
|
|
165
174
|
if type_util.is_openai_chunk(chunk):
|
166
175
|
# Try to convert OpenAI chat completion chunk to a string:
|
167
176
|
try:
|
168
|
-
if len(chunk.choices) == 0:
|
177
|
+
if len(chunk.choices) == 0 or chunk.choices[0].delta is None:
|
169
178
|
# The choices list can be empty. E.g. when using the
|
170
179
|
# AzureOpenAI client, the first chunk will always be empty.
|
171
180
|
chunk = ""
|
@@ -447,6 +456,7 @@ class WriteMixin:
|
|
447
456
|
types.MappingProxyType,
|
448
457
|
UserDict,
|
449
458
|
ChainMap,
|
459
|
+
UserList,
|
450
460
|
),
|
451
461
|
)
|
452
462
|
or type_util.is_custom_dict(arg)
|
@@ -69,7 +69,7 @@ def animation_demo() -> None:
|
|
69
69
|
st.button("Re-run")
|
70
70
|
|
71
71
|
|
72
|
-
st.set_page_config(page_title="Animation Demo", page_icon="
|
72
|
+
st.set_page_config(page_title="Animation Demo", page_icon=":material/animation:")
|
73
73
|
st.markdown("# Animation Demo")
|
74
74
|
st.sidebar.header("Animation Demo")
|
75
75
|
st.write(
|
@@ -64,7 +64,7 @@ def data_frame_demo():
|
|
64
64
|
)
|
65
65
|
|
66
66
|
|
67
|
-
st.set_page_config(page_title="DataFrame Demo", page_icon="
|
67
|
+
st.set_page_config(page_title="DataFrame Demo", page_icon=":material/table:")
|
68
68
|
st.markdown("# DataFrame Demo")
|
69
69
|
st.sidebar.header("DataFrame Demo")
|
70
70
|
st.write(
|
streamlit/hello/Hello.py
CHANGED
streamlit/hello/Mapping_Demo.py
CHANGED
@@ -103,7 +103,7 @@ def mapping_demo():
|
|
103
103
|
)
|
104
104
|
|
105
105
|
|
106
|
-
st.set_page_config(page_title="Mapping Demo", page_icon="
|
106
|
+
st.set_page_config(page_title="Mapping Demo", page_icon=":material/public:")
|
107
107
|
st.markdown("# Mapping Demo")
|
108
108
|
st.sidebar.header("Mapping Demo")
|
109
109
|
st.write(
|
streamlit/hello/Plotting_Demo.py
CHANGED
@@ -42,7 +42,7 @@ def plotting_demo():
|
|
42
42
|
st.button("Re-run")
|
43
43
|
|
44
44
|
|
45
|
-
st.set_page_config(page_title="Plotting Demo", page_icon="
|
45
|
+
st.set_page_config(page_title="Plotting Demo", page_icon=":material/show_chart:")
|
46
46
|
st.markdown("# Plotting Demo")
|
47
47
|
st.sidebar.header("Plotting Demo")
|
48
48
|
st.write(
|
streamlit/hello/streamlit_app.py
CHANGED
@@ -22,11 +22,11 @@ dir_path = Path(__file__).parent
|
|
22
22
|
def run():
|
23
23
|
page = st.navigation(
|
24
24
|
[
|
25
|
-
st.Page(dir_path / "Hello.py"),
|
26
|
-
st.Page(dir_path / "Animation_Demo.py"),
|
27
|
-
st.Page(dir_path / "Plotting_Demo.py"),
|
28
|
-
st.Page(dir_path / "Mapping_Demo.py"),
|
29
|
-
st.Page(dir_path / "Dataframe_Demo.py"),
|
25
|
+
st.Page(dir_path / "Hello.py", icon=":material/waving_hand:"),
|
26
|
+
st.Page(dir_path / "Animation_Demo.py", icon=":material/animation:"),
|
27
|
+
st.Page(dir_path / "Plotting_Demo.py", icon=":material/show_chart:"),
|
28
|
+
st.Page(dir_path / "Mapping_Demo.py", icon=":material/public:"),
|
29
|
+
st.Page(dir_path / "Dataframe_Demo.py", icon=":material/table:"),
|
30
30
|
]
|
31
31
|
)
|
32
32
|
|
@@ -86,6 +86,11 @@ _ATTRIBUTIONS_TO_CHECK: Final = [
|
|
86
86
|
"cudf",
|
87
87
|
"xarray",
|
88
88
|
"ray",
|
89
|
+
"geopandas",
|
90
|
+
"mars",
|
91
|
+
"tables",
|
92
|
+
"zarr",
|
93
|
+
"datasets",
|
89
94
|
# ML & LLM Tools:
|
90
95
|
"mistralai",
|
91
96
|
"openai",
|
@@ -141,7 +146,6 @@ _ATTRIBUTIONS_TO_CHECK: Final = [
|
|
141
146
|
"pymilvus",
|
142
147
|
"lancedb",
|
143
148
|
# Others:
|
144
|
-
"datasets",
|
145
149
|
"snowflake",
|
146
150
|
"streamlit_extras",
|
147
151
|
"streamlit_pydantic",
|
@@ -1,13 +1,13 @@
|
|
1
1
|
{
|
2
2
|
"files": {
|
3
3
|
"main.css": "./static/css/main.554f96d9.css",
|
4
|
-
"main.js": "./static/js/main.
|
4
|
+
"main.js": "./static/js/main.80efcd23.js",
|
5
5
|
"static/js/9336.3e046ad7.chunk.js": "./static/js/9336.3e046ad7.chunk.js",
|
6
6
|
"static/js/9330.2b4c99e0.chunk.js": "./static/js/9330.2b4c99e0.chunk.js",
|
7
7
|
"static/js/2736.7d516fcc.chunk.js": "./static/js/2736.7d516fcc.chunk.js",
|
8
8
|
"static/js/3301.0cd98943.chunk.js": "./static/js/3301.0cd98943.chunk.js",
|
9
9
|
"static/css/8148.49dfd2ce.chunk.css": "./static/css/8148.49dfd2ce.chunk.css",
|
10
|
-
"static/js/8148.
|
10
|
+
"static/js/8148.539ddabe.chunk.js": "./static/js/8148.539ddabe.chunk.js",
|
11
11
|
"static/css/5441.e3b876c5.chunk.css": "./static/css/5441.e3b876c5.chunk.css",
|
12
12
|
"static/js/5441.a564862e.chunk.js": "./static/js/5441.a564862e.chunk.js",
|
13
13
|
"static/js/8427.4594845a.chunk.js": "./static/js/8427.4594845a.chunk.js",
|
@@ -153,6 +153,6 @@
|
|
153
153
|
},
|
154
154
|
"entrypoints": [
|
155
155
|
"static/css/main.554f96d9.css",
|
156
|
-
"static/js/main.
|
156
|
+
"static/js/main.80efcd23.js"
|
157
157
|
]
|
158
158
|
}
|
streamlit/static/index.html
CHANGED
@@ -1 +1 @@
|
|
1
|
-
<!doctype html><html lang="en"><head><meta charset="UTF-8"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><link rel="shortcut icon" href="./favicon.png"/><link rel="preload" href="./static/media/SourceSansPro-Regular.0d69e5ff5e92ac64a0c9.woff2" as="font" type="font/woff2" crossorigin><link rel="preload" href="./static/media/SourceSansPro-SemiBold.abed79cd0df1827e18cf.woff2" as="font" type="font/woff2" crossorigin><link rel="preload" href="./static/media/SourceSansPro-Bold.118dea98980e20a81ced.woff2" as="font" type="font/woff2" crossorigin><title>Streamlit</title><script>window.prerenderReady=!1</script><script defer="defer" src="./static/js/main.
|
1
|
+
<!doctype html><html lang="en"><head><meta charset="UTF-8"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><link rel="shortcut icon" href="./favicon.png"/><link rel="preload" href="./static/media/SourceSansPro-Regular.0d69e5ff5e92ac64a0c9.woff2" as="font" type="font/woff2" crossorigin><link rel="preload" href="./static/media/SourceSansPro-SemiBold.abed79cd0df1827e18cf.woff2" as="font" type="font/woff2" crossorigin><link rel="preload" href="./static/media/SourceSansPro-Bold.118dea98980e20a81ced.woff2" as="font" type="font/woff2" crossorigin><title>Streamlit</title><script>window.prerenderReady=!1</script><script defer="defer" src="./static/js/main.80efcd23.js"></script><link href="./static/css/main.554f96d9.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
|