streamlit-nightly 1.28.3.dev20231124__py2.py3-none-any.whl → 1.28.3.dev20231129__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. streamlit/config.py +1 -1
  2. streamlit/connections/snowflake_connection.py +1 -1
  3. streamlit/connections/snowpark_connection.py +1 -1
  4. streamlit/connections/sql_connection.py +13 -8
  5. streamlit/elements/arrow_altair.py +57 -72
  6. streamlit/elements/layouts.py +1 -1
  7. streamlit/elements/widgets/data_editor.py +5 -3
  8. streamlit/env_util.py +2 -2
  9. streamlit/proto/BackMsg_pb2.py +2 -2
  10. streamlit/proto/BackMsg_pb2.pyi +7 -3
  11. streamlit/runtime/app_session.py +13 -0
  12. streamlit/runtime/caching/hashing.py +7 -1
  13. streamlit/static/asset-manifest.json +2 -2
  14. streamlit/static/index.html +1 -1
  15. streamlit/static/static/js/{main.dcf3628b.js → main.ca9076db.js} +2 -2
  16. streamlit/testing/v1/local_script_runner.py +1 -1
  17. streamlit/type_util.py +13 -2
  18. {streamlit_nightly-1.28.3.dev20231124.dist-info → streamlit_nightly-1.28.3.dev20231129.dist-info}/METADATA +1 -1
  19. {streamlit_nightly-1.28.3.dev20231124.dist-info → streamlit_nightly-1.28.3.dev20231129.dist-info}/RECORD +24 -24
  20. {streamlit_nightly-1.28.3.dev20231124.dist-info → streamlit_nightly-1.28.3.dev20231129.dist-info}/WHEEL +1 -1
  21. /streamlit/static/static/js/{main.dcf3628b.js.LICENSE.txt → main.ca9076db.js.LICENSE.txt} +0 -0
  22. {streamlit_nightly-1.28.3.dev20231124.data → streamlit_nightly-1.28.3.dev20231129.data}/scripts/streamlit.cmd +0 -0
  23. {streamlit_nightly-1.28.3.dev20231124.dist-info → streamlit_nightly-1.28.3.dev20231129.dist-info}/entry_points.txt +0 -0
  24. {streamlit_nightly-1.28.3.dev20231124.dist-info → streamlit_nightly-1.28.3.dev20231129.dist-info}/top_level.txt +0 -0
streamlit/config.py CHANGED
@@ -712,7 +712,7 @@ _create_option(
712
712
  "server.scriptHealthCheckEnabled",
713
713
  visibility="hidden",
714
714
  description="""
715
- Flag for enabling the script health check endpoint. It used for checking if
715
+ Flag for enabling the script health check endpoint. It's used for checking if
716
716
  a script loads successfully. On success, the endpoint will return a 200
717
717
  HTTP status code. On failure, the endpoint will return a 503 HTTP status code.
718
718
 
@@ -151,7 +151,7 @@ class SnowflakeConnection(BaseConnection["InternalSnowflakeConnection"]):
151
151
 
152
152
  Returns
153
153
  -------
154
- pd.DataFrame
154
+ pandas.DataFrame
155
155
  The result of running the query, formatted as a pandas DataFrame.
156
156
 
157
157
  Example
@@ -116,7 +116,7 @@ class SnowparkConnection(BaseConnection["Session"]):
116
116
 
117
117
  Returns
118
118
  -------
119
- pd.DataFrame
119
+ pandas.DataFrame
120
120
  The result of running the query, formatted as a pandas DataFrame.
121
121
 
122
122
  Example
@@ -133,15 +133,18 @@ class SQLConnection(BaseConnection["Engine"]):
133
133
  """Run a read-only query.
134
134
 
135
135
  This method implements both query result caching (with caching behavior
136
- identical to that of using @st.cache_data) as well as simple error handling/retries.
136
+ identical to that of using ``@st.cache_data``) as well as simple error handling/retries.
137
137
 
138
138
  .. note::
139
139
  Queries that are run without a specified ttl are cached indefinitely.
140
140
 
141
141
  Aside from the ``ttl`` kwarg, all kwargs passed to this function are passed down
142
- to `pd.read_sql <https://pandas.pydata.org/docs/reference/api/pandas.read_sql.html>`_
142
+ to |pandas.read_sql|_
143
143
  and have the behavior described in the pandas documentation.
144
144
 
145
+ .. |pandas.read_sql| replace:: ``pandas.read_sql``
146
+ .. _pandas.read_sql: https://pandas.pydata.org/docs/reference/api/pandas.read_sql.html
147
+
145
148
  Parameters
146
149
  ----------
147
150
  sql : str
@@ -165,12 +168,14 @@ class SQLConnection(BaseConnection["Engine"]):
165
168
  paramstyle <https://peps.python.org/pep-0249/#paramstyle>`_, is supported.
166
169
  Default is None.
167
170
  **kwargs: dict
168
- Additional keyword arguments are passed to `pd.read_sql
169
- <https://pandas.pydata.org/docs/reference/api/pandas.read_sql.html>`_.
171
+ Additional keyword arguments are passed to |pandas.read_sql|_.
172
+
173
+ .. |pandas.read_sql| replace:: ``pandas.read_sql``
174
+ .. _pandas.read_sql: https://pandas.pydata.org/docs/reference/api/pandas.read_sql.html
170
175
 
171
176
  Returns
172
177
  -------
173
- pd.DataFrame
178
+ pandas.DataFrame
174
179
  The result of running the query, formatted as a pandas DataFrame.
175
180
 
176
181
  Example
@@ -239,12 +244,12 @@ class SQLConnection(BaseConnection["Engine"]):
239
244
  )
240
245
 
241
246
  def connect(self) -> "SQLAlchemyConnection":
242
- """Call ``.connect()`` on the underlying SQLAlchemy Engine, returning a new
243
- sqlalchemy.engine.Connection object.
247
+ """Call ``.connect()`` on the underlying SQLAlchemy Engine, returning a new\
248
+ ``sqlalchemy.engine.Connection`` object.
244
249
 
245
250
  Calling this method is equivalent to calling ``self._instance.connect()``.
246
251
 
247
- NOTE: This method should not be confused with the internal _connect method used
252
+ NOTE: This method should not be confused with the internal ``_connect`` method used
248
253
  to implement a Streamlit Connection.
249
254
  """
250
255
  return self._instance.connect()
@@ -21,18 +21,7 @@ from __future__ import annotations
21
21
  from contextlib import nullcontext
22
22
  from datetime import date
23
23
  from enum import Enum
24
- from typing import (
25
- TYPE_CHECKING,
26
- Any,
27
- Collection,
28
- Dict,
29
- List,
30
- Optional,
31
- Sequence,
32
- Tuple,
33
- Union,
34
- cast,
35
- )
24
+ from typing import TYPE_CHECKING, Any, Collection, Dict, List, Sequence, Tuple, cast
36
25
 
37
26
  import pandas as pd
38
27
  from pandas.api.types import infer_dtype, is_integer_dtype
@@ -824,7 +813,7 @@ class ArrowAltairMixin:
824
813
  return cast("DeltaGenerator", self)
825
814
 
826
815
 
827
- def _is_date_column(df: pd.DataFrame, name: Optional[str]) -> bool:
816
+ def _is_date_column(df: pd.DataFrame, name: str | None) -> bool:
828
817
  """True if the column with the given name stores datetime.date values.
829
818
 
830
819
  This function just checks the first value in the given column, so
@@ -854,7 +843,7 @@ def _is_date_column(df: pd.DataFrame, name: Optional[str]) -> bool:
854
843
  def _melt_data(
855
844
  df: pd.DataFrame,
856
845
  columns_to_leave_alone: List[str],
857
- columns_to_melt: Optional[List[str]],
846
+ columns_to_melt: List[str] | None,
858
847
  new_y_column_name: str,
859
848
  new_color_column_name: str,
860
849
  ) -> pd.DataFrame:
@@ -894,11 +883,11 @@ def _melt_data(
894
883
 
895
884
  def prep_data(
896
885
  df: pd.DataFrame,
897
- x_column: Optional[str],
886
+ x_column: str | None,
898
887
  y_column_list: List[str],
899
- color_column: Optional[str],
900
- size_column: Optional[str],
901
- ) -> Tuple[pd.DataFrame, Optional[str], Optional[str], Optional[str], Optional[str]]:
888
+ color_column: str | None,
889
+ size_column: str | None,
890
+ ) -> Tuple[pd.DataFrame, str | None, str | None, str | None, str | None]:
902
891
  """Prepares the data for charting. This is also used in add_rows.
903
892
 
904
893
  Returns the prepared dataframe and the new names of the x column (taking the index reset into
@@ -938,14 +927,14 @@ def prep_data(
938
927
 
939
928
  def _generate_chart(
940
929
  chart_type: ChartType,
941
- data: Optional[Data],
942
- x_from_user: Optional[str] = None,
943
- y_from_user: Union[str, Sequence[str], None] = None,
944
- color_from_user: Union[str, Color, List[Color], None] = None,
945
- size_from_user: Union[str, float, None] = None,
930
+ data: Data | None,
931
+ x_from_user: str | None = None,
932
+ y_from_user: str | Sequence[str] | None = None,
933
+ color_from_user: str | Color | List[Color] | None = None,
934
+ size_from_user: str | float | None = None,
946
935
  width: int = 0,
947
936
  height: int = 0,
948
- ) -> alt.Chart:
937
+ ) -> Tuple[alt.Chart, AddRowsMetadata]:
949
938
  """Function to use the chart's type, data columns and indices to figure out the chart's spec."""
950
939
  import altair as alt
951
940
 
@@ -1030,8 +1019,8 @@ def _generate_chart(
1030
1019
 
1031
1020
 
1032
1021
  def _maybe_reset_index_in_place(
1033
- df: pd.DataFrame, x_column: Optional[str], y_column_list: List[str]
1034
- ) -> Optional[str]:
1022
+ df: pd.DataFrame, x_column: str | None, y_column_list: List[str]
1023
+ ) -> str | None:
1035
1024
  if x_column is None and len(y_column_list) > 0:
1036
1025
  if df.index.name is None:
1037
1026
  # Pick column name that is unlikely to collide with user-given names.
@@ -1046,9 +1035,7 @@ def _maybe_reset_index_in_place(
1046
1035
  return x_column
1047
1036
 
1048
1037
 
1049
- def _drop_unused_columns(
1050
- df: pd.DataFrame, *column_names: Optional[str]
1051
- ) -> pd.DataFrame:
1038
+ def _drop_unused_columns(df: pd.DataFrame, *column_names: str | None) -> pd.DataFrame:
1052
1039
  """Returns a subset of df, selecting only column_names that aren't None."""
1053
1040
 
1054
1041
  # We can't just call set(col_names) because sets don't have stable ordering,
@@ -1069,7 +1056,7 @@ def _drop_unused_columns(
1069
1056
  return df[keep]
1070
1057
 
1071
1058
 
1072
- def _maybe_convert_color_column_in_place(df: pd.DataFrame, color_column: Optional[str]):
1059
+ def _maybe_convert_color_column_in_place(df: pd.DataFrame, color_column: str | None):
1073
1060
  """If needed, convert color column to a format Vega understands."""
1074
1061
  if color_column is None or len(df[color_column]) == 0:
1075
1062
  return
@@ -1090,11 +1077,11 @@ def _maybe_convert_color_column_in_place(df: pd.DataFrame, color_column: Optiona
1090
1077
 
1091
1078
  def _convert_col_names_to_str_in_place(
1092
1079
  df: pd.DataFrame,
1093
- x_column: Optional[str],
1080
+ x_column: str | None,
1094
1081
  y_column_list: List[str],
1095
- color_column: Optional[str],
1096
- size_column: Optional[str],
1097
- ) -> Tuple[Optional[str], List[str], Optional[str], Optional[str]]:
1082
+ color_column: str | None,
1083
+ size_column: str | None,
1084
+ ) -> Tuple[str | None, List[str], str | None, str | None]:
1098
1085
  """Converts column names to strings, since Vega-Lite does not accept ints, etc."""
1099
1086
  column_names = list(df.columns) # list() converts RangeIndex, etc, to regular list.
1100
1087
  str_column_names = [str(c) for c in column_names]
@@ -1110,7 +1097,7 @@ def _convert_col_names_to_str_in_place(
1110
1097
 
1111
1098
  def _parse_generic_column(
1112
1099
  df: pd.DataFrame, column_or_value: Any
1113
- ) -> Tuple[Optional[str], Any]:
1100
+ ) -> Tuple[str | None, Any]:
1114
1101
  if isinstance(column_or_value, str) and column_or_value in df.columns:
1115
1102
  column_name = column_or_value
1116
1103
  value = None
@@ -1121,7 +1108,7 @@ def _parse_generic_column(
1121
1108
  return column_name, value
1122
1109
 
1123
1110
 
1124
- def _parse_x_column(df: pd.DataFrame, x_from_user: Optional[str]) -> Optional[str]:
1111
+ def _parse_x_column(df: pd.DataFrame, x_from_user: str | None) -> str | None:
1125
1112
  if x_from_user is None:
1126
1113
  return None
1127
1114
 
@@ -1141,8 +1128,8 @@ def _parse_x_column(df: pd.DataFrame, x_from_user: Optional[str]) -> Optional[st
1141
1128
 
1142
1129
  def _parse_y_columns(
1143
1130
  df: pd.DataFrame,
1144
- y_from_user: Union[str, Sequence[str], None],
1145
- x_column: Union[str, None],
1131
+ y_from_user: str | Sequence[str] | None,
1132
+ x_column: str | None,
1146
1133
  ) -> List[str]:
1147
1134
  y_column_list: List[str] = []
1148
1135
 
@@ -1173,8 +1160,8 @@ def _parse_y_columns(
1173
1160
 
1174
1161
 
1175
1162
  def _get_opacity_encoding(
1176
- chart_type: ChartType, color_column: Optional[str]
1177
- ) -> Optional[alt.OpacityValue]:
1163
+ chart_type: ChartType, color_column: str | None
1164
+ ) -> alt.OpacityValue | None:
1178
1165
  import altair as alt
1179
1166
 
1180
1167
  if color_column and chart_type == ChartType.AREA:
@@ -1183,7 +1170,7 @@ def _get_opacity_encoding(
1183
1170
  return None
1184
1171
 
1185
1172
 
1186
- def _get_scale(df: pd.DataFrame, column_name: Optional[str]) -> alt.Scale:
1173
+ def _get_scale(df: pd.DataFrame, column_name: str | None) -> alt.Scale:
1187
1174
  import altair as alt
1188
1175
 
1189
1176
  # Set the X and Y axes' scale to "utc" if they contain date values.
@@ -1197,9 +1184,7 @@ def _get_scale(df: pd.DataFrame, column_name: Optional[str]) -> alt.Scale:
1197
1184
  return alt.Scale()
1198
1185
 
1199
1186
 
1200
- def _get_axis_config(
1201
- df: pd.DataFrame, column_name: Optional[str], grid: bool
1202
- ) -> alt.Axis:
1187
+ def _get_axis_config(df: pd.DataFrame, column_name: str | None, grid: bool) -> alt.Axis:
1203
1188
  import altair as alt
1204
1189
 
1205
1190
  if column_name is not None and is_integer_dtype(df[column_name]):
@@ -1212,13 +1197,13 @@ def _get_axis_config(
1212
1197
 
1213
1198
  def _maybe_melt(
1214
1199
  df: pd.DataFrame,
1215
- x_column: Optional[str],
1200
+ x_column: str | None,
1216
1201
  y_column_list: List[str],
1217
- color_column: Optional[str],
1218
- size_column: Optional[str],
1219
- ) -> Tuple[pd.DataFrame, Optional[str], Optional[str]]:
1202
+ color_column: str | None,
1203
+ size_column: str | None,
1204
+ ) -> Tuple[pd.DataFrame, str | None, str | None]:
1220
1205
  """If multiple columns are set for y, melt the dataframe into long format."""
1221
- y_column: Optional[str]
1206
+ y_column: str | None
1222
1207
 
1223
1208
  if len(y_column_list) == 0:
1224
1209
  y_column = None
@@ -1246,8 +1231,8 @@ def _maybe_melt(
1246
1231
 
1247
1232
  def _get_x_encoding(
1248
1233
  df: pd.DataFrame,
1249
- x_column: Optional[str],
1250
- x_from_user: Optional[str],
1234
+ x_column: str | None,
1235
+ x_from_user: str | None,
1251
1236
  chart_type: ChartType,
1252
1237
  ) -> alt.X:
1253
1238
  import altair as alt
@@ -1286,8 +1271,8 @@ def _get_x_encoding(
1286
1271
 
1287
1272
  def _get_y_encoding(
1288
1273
  df: pd.DataFrame,
1289
- y_column: Optional[str],
1290
- y_from_user: Union[str, Sequence[str], None],
1274
+ y_column: str | None,
1275
+ y_from_user: str | Sequence[str] | None,
1291
1276
  ) -> alt.Y:
1292
1277
  import altair as alt
1293
1278
 
@@ -1325,11 +1310,11 @@ def _get_y_encoding(
1325
1310
 
1326
1311
  def _get_color_encoding(
1327
1312
  df: pd.DataFrame,
1328
- color_value: Optional[Color],
1329
- color_column: Optional[str],
1313
+ color_value: Color | None,
1314
+ color_column: str | None,
1330
1315
  y_column_list: List[str],
1331
- color_from_user: Union[str, Color, List[Color], None],
1332
- ) -> alt.Color:
1316
+ color_from_user: str | Color | List[Color] | None,
1317
+ ) -> alt.Color | alt.ColorValue | None:
1333
1318
  import altair as alt
1334
1319
 
1335
1320
  has_color_value = color_value not in [None, [], tuple()]
@@ -1365,7 +1350,7 @@ def _get_color_encoding(
1365
1350
  raise StreamlitInvalidColorError(df, color_from_user)
1366
1351
 
1367
1352
  elif color_column is not None:
1368
- column_type: Union[str, Tuple[str, List[Any]]]
1353
+ column_type: str | Tuple[str, List[Any]]
1369
1354
 
1370
1355
  if color_column == MELTED_COLOR_COLUMN_NAME:
1371
1356
  column_type = "nominal"
@@ -1405,9 +1390,9 @@ def _get_color_encoding(
1405
1390
 
1406
1391
  def _get_size_encoding(
1407
1392
  chart_type: ChartType,
1408
- size_column: Optional[str],
1409
- size_value: Union[str, float, None],
1410
- ) -> alt.Size:
1393
+ size_column: str | None,
1394
+ size_value: str | float | None,
1395
+ ) -> alt.Size | alt.SizeValue | None:
1411
1396
  import altair as alt
1412
1397
 
1413
1398
  if chart_type == ChartType.SCATTER:
@@ -1438,9 +1423,9 @@ def _get_size_encoding(
1438
1423
  def _get_tooltip_encoding(
1439
1424
  x_column: str,
1440
1425
  y_column: str,
1441
- size_column: Optional[str],
1442
- color_column: Optional[str],
1443
- color_enc: alt.Color,
1426
+ size_column: str | None,
1427
+ color_column: str | None,
1428
+ color_enc: alt.Color | alt.ColorValue | None,
1444
1429
  ) -> list[alt.Tooltip]:
1445
1430
  import altair as alt
1446
1431
 
@@ -1489,8 +1474,8 @@ def _get_tooltip_encoding(
1489
1474
 
1490
1475
 
1491
1476
  def _get_x_encoding_type(
1492
- df: pd.DataFrame, chart_type: ChartType, x_column: Optional[str]
1493
- ) -> Union[str, Tuple[str, List[Any]]]:
1477
+ df: pd.DataFrame, chart_type: ChartType, x_column: str | None
1478
+ ) -> type_util.VegaLiteType:
1494
1479
  if x_column is None:
1495
1480
  return "quantitative" # Anything. If None, Vega-Lite may hide the axis.
1496
1481
 
@@ -1503,8 +1488,8 @@ def _get_x_encoding_type(
1503
1488
 
1504
1489
 
1505
1490
  def _get_y_encoding_type(
1506
- df: pd.DataFrame, y_column: Optional[str]
1507
- ) -> Union[str, Tuple[str, List[Any]]]:
1491
+ df: pd.DataFrame, y_column: str | None
1492
+ ) -> type_util.VegaLiteType:
1508
1493
  if y_column:
1509
1494
  return type_util.infer_vegalite_type(df[y_column])
1510
1495
 
@@ -1515,7 +1500,7 @@ def marshall(
1515
1500
  vega_lite_chart: ArrowVegaLiteChartProto,
1516
1501
  altair_chart: alt.Chart,
1517
1502
  use_container_width: bool = False,
1518
- theme: Union[None, Literal["streamlit"]] = "streamlit",
1503
+ theme: None | Literal["streamlit"] = "streamlit",
1519
1504
  **kwargs: Any,
1520
1505
  ) -> None:
1521
1506
  """Marshall chart's data into proto."""
@@ -1536,13 +1521,13 @@ def marshall(
1536
1521
  datasets[name] = data
1537
1522
  return {"name": name}
1538
1523
 
1539
- alt.data_transformers.register("id", id_transform)
1524
+ alt.data_transformers.register("id", id_transform) # type: ignore[attr-defined,unused-ignore]
1540
1525
 
1541
1526
  # The default altair theme has some width/height defaults defined
1542
1527
  # which are not useful for Streamlit. Therefore, we change the theme to
1543
1528
  # "none" to avoid those defaults.
1544
- with alt.themes.enable("none") if alt.themes.active == "default" else nullcontext():
1545
- with alt.data_transformers.enable("id"):
1529
+ with alt.themes.enable("none") if alt.themes.active == "default" else nullcontext(): # type: ignore[attr-defined,unused-ignore]
1530
+ with alt.data_transformers.enable("id"): # type: ignore[attr-defined,unused-ignore]
1546
1531
  chart_dict = altair_chart.to_dict()
1547
1532
 
1548
1533
  # Put datasets back into the chart dict but note how they weren't
@@ -68,7 +68,7 @@ class LayoutsMixin:
68
68
 
69
69
  >>> import streamlit as st
70
70
  >>>
71
- >>> container = st.container()
71
+ >>> container = st.container(border=True)
72
72
  >>> container.write("This is inside the container")
73
73
  >>> st.write("This is outside the container")
74
74
  >>>
@@ -606,8 +606,10 @@ class DataEditorMixin:
606
606
  - Mixing data types within a column can make the column uneditable.
607
607
  - Additionally, the following data types are not yet supported for editing:
608
608
  complex, list, tuple, bytes, bytearray, memoryview, dict, set, frozenset,
609
- datetime.timedelta, decimal.Decimal, fractions.Fraction, pandas.Interval,
610
- pandas.Period, pandas.Timedelta.
609
+ fractions.Fraction, pandas.Interval, and pandas.Period.
610
+ - To prevent overflow in JavaScript, columns containing datetime.timedelta
611
+ and pandas.Timedelta values will default to uneditable but this can be
612
+ changed through column configuration.
611
613
 
612
614
  width : int or None
613
615
  Desired width of the data editor expressed in pixels. If None, the width will
@@ -680,7 +682,7 @@ class DataEditorMixin:
680
682
  pandas.DataFrame, pandas.Series, pyarrow.Table, numpy.ndarray, list, set, tuple, or dict.
681
683
  The edited data. The edited data is returned in its original data type if
682
684
  it corresponds to any of the supported return types. All other data types
683
- are returned as a ``pd.DataFrame``.
685
+ are returned as a ``pandas.DataFrame``.
684
686
 
685
687
  Examples
686
688
  --------
streamlit/env_util.py CHANGED
@@ -54,6 +54,6 @@ def is_repl():
54
54
 
55
55
  def is_executable_in_path(name):
56
56
  """Check if executable is in OS path."""
57
- from distutils.spawn import find_executable
57
+ from shutil import which
58
58
 
59
- return find_executable(name) is not None
59
+ return which(name) is not None
@@ -15,7 +15,7 @@ from streamlit.proto import ClientState_pb2 as streamlit_dot_proto_dot_ClientSta
15
15
  from streamlit.proto import Common_pb2 as streamlit_dot_proto_dot_Common__pb2
16
16
 
17
17
 
18
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dstreamlit/proto/BackMsg.proto\x1a!streamlit/proto/ClientState.proto\x1a\x1cstreamlit/proto/Common.proto\"\xd9\x02\n\x07\x42\x61\x63kMsg\x12\x15\n\x0b\x63lear_cache\x18\x05 \x01(\x08H\x00\x12\x19\n\x0fset_run_on_save\x18\x06 \x01(\x08H\x00\x12\x15\n\x0bstop_script\x18\x07 \x01(\x08H\x00\x12$\n\x0crerun_script\x18\x0b \x01(\x0b\x32\x0c.ClientStateH\x00\x12\x17\n\rload_git_info\x18\x0c \x01(\x08H\x00\x12$\n\x1a\x64\x65\x62ug_disconnect_websocket\x18\x0e \x01(\x08H\x00\x12 \n\x16\x64\x65\x62ug_shutdown_runtime\x18\x0f \x01(\x08H\x00\x12-\n\x11\x66ile_urls_request\x18\x10 \x01(\x0b\x32\x10.FileURLsRequestH\x00\x12\x1d\n\x15\x64\x65\x62ug_last_backmsg_id\x18\r \x01(\tB\x06\n\x04typeJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05J\x04\x08\x08\x10\tJ\x04\x08\t\x10\nJ\x04\x08\n\x10\x0b\x62\x06proto3')
18
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1dstreamlit/proto/BackMsg.proto\x1a!streamlit/proto/ClientState.proto\x1a\x1cstreamlit/proto/Common.proto\"\xf2\x02\n\x07\x42\x61\x63kMsg\x12\x15\n\x0b\x63lear_cache\x18\x05 \x01(\x08H\x00\x12\x19\n\x0fset_run_on_save\x18\x06 \x01(\x08H\x00\x12\x15\n\x0bstop_script\x18\x07 \x01(\x08H\x00\x12$\n\x0crerun_script\x18\x0b \x01(\x0b\x32\x0c.ClientStateH\x00\x12\x17\n\rload_git_info\x18\x0c \x01(\x08H\x00\x12$\n\x1a\x64\x65\x62ug_disconnect_websocket\x18\x0e \x01(\x08H\x00\x12 \n\x16\x64\x65\x62ug_shutdown_runtime\x18\x0f \x01(\x08H\x00\x12-\n\x11\x66ile_urls_request\x18\x10 \x01(\x0b\x32\x10.FileURLsRequestH\x00\x12\x17\n\rapp_heartbeat\x18\x11 \x01(\x08H\x00\x12\x1d\n\x15\x64\x65\x62ug_last_backmsg_id\x18\r \x01(\tB\x06\n\x04typeJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05J\x04\x08\x08\x10\tJ\x04\x08\t\x10\nJ\x04\x08\n\x10\x0b\x62\x06proto3')
19
19
 
20
20
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
21
21
  _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'streamlit.proto.BackMsg_pb2', globals())
@@ -23,5 +23,5 @@ if _descriptor._USE_C_DESCRIPTORS == False:
23
23
 
24
24
  DESCRIPTOR._options = None
25
25
  _BACKMSG._serialized_start=99
26
- _BACKMSG._serialized_end=444
26
+ _BACKMSG._serialized_end=469
27
27
  # @@protoc_insertion_point(module_scope)
@@ -43,6 +43,7 @@ class BackMsg(google.protobuf.message.Message):
43
43
  DEBUG_DISCONNECT_WEBSOCKET_FIELD_NUMBER: builtins.int
44
44
  DEBUG_SHUTDOWN_RUNTIME_FIELD_NUMBER: builtins.int
45
45
  FILE_URLS_REQUEST_FIELD_NUMBER: builtins.int
46
+ APP_HEARTBEAT_FIELD_NUMBER: builtins.int
46
47
  DEBUG_LAST_BACKMSG_ID_FIELD_NUMBER: builtins.int
47
48
  clear_cache: builtins.bool
48
49
  """DEPRECATED. Asks the server to run the script with this object
@@ -77,6 +78,8 @@ class BackMsg(google.protobuf.message.Message):
77
78
  """Requests that the server generate URLs for getting/uploading/deleting
78
79
  files for the `st.file_uploader` widget
79
80
  """
81
+ app_heartbeat: builtins.bool
82
+ """Sends an app heartbeat message through the websocket"""
80
83
  debug_last_backmsg_id: builtins.str
81
84
  """An ID used to associate this BackMsg with the corresponding ForwardMsgs
82
85
  that are sent to the client due to it. As its name suggests, this field
@@ -93,10 +96,11 @@ class BackMsg(google.protobuf.message.Message):
93
96
  debug_disconnect_websocket: builtins.bool = ...,
94
97
  debug_shutdown_runtime: builtins.bool = ...,
95
98
  file_urls_request: streamlit.proto.Common_pb2.FileURLsRequest | None = ...,
99
+ app_heartbeat: builtins.bool = ...,
96
100
  debug_last_backmsg_id: builtins.str = ...,
97
101
  ) -> None: ...
98
- def HasField(self, field_name: typing_extensions.Literal["clear_cache", b"clear_cache", "debug_disconnect_websocket", b"debug_disconnect_websocket", "debug_shutdown_runtime", b"debug_shutdown_runtime", "file_urls_request", b"file_urls_request", "load_git_info", b"load_git_info", "rerun_script", b"rerun_script", "set_run_on_save", b"set_run_on_save", "stop_script", b"stop_script", "type", b"type"]) -> builtins.bool: ...
99
- def ClearField(self, field_name: typing_extensions.Literal["clear_cache", b"clear_cache", "debug_disconnect_websocket", b"debug_disconnect_websocket", "debug_last_backmsg_id", b"debug_last_backmsg_id", "debug_shutdown_runtime", b"debug_shutdown_runtime", "file_urls_request", b"file_urls_request", "load_git_info", b"load_git_info", "rerun_script", b"rerun_script", "set_run_on_save", b"set_run_on_save", "stop_script", b"stop_script", "type", b"type"]) -> None: ...
100
- def WhichOneof(self, oneof_group: typing_extensions.Literal["type", b"type"]) -> typing_extensions.Literal["clear_cache", "set_run_on_save", "stop_script", "rerun_script", "load_git_info", "debug_disconnect_websocket", "debug_shutdown_runtime", "file_urls_request"] | None: ...
102
+ def HasField(self, field_name: typing_extensions.Literal["app_heartbeat", b"app_heartbeat", "clear_cache", b"clear_cache", "debug_disconnect_websocket", b"debug_disconnect_websocket", "debug_shutdown_runtime", b"debug_shutdown_runtime", "file_urls_request", b"file_urls_request", "load_git_info", b"load_git_info", "rerun_script", b"rerun_script", "set_run_on_save", b"set_run_on_save", "stop_script", b"stop_script", "type", b"type"]) -> builtins.bool: ...
103
+ def ClearField(self, field_name: typing_extensions.Literal["app_heartbeat", b"app_heartbeat", "clear_cache", b"clear_cache", "debug_disconnect_websocket", b"debug_disconnect_websocket", "debug_last_backmsg_id", b"debug_last_backmsg_id", "debug_shutdown_runtime", b"debug_shutdown_runtime", "file_urls_request", b"file_urls_request", "load_git_info", b"load_git_info", "rerun_script", b"rerun_script", "set_run_on_save", b"set_run_on_save", "stop_script", b"stop_script", "type", b"type"]) -> None: ...
104
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["type", b"type"]) -> typing_extensions.Literal["clear_cache", "set_run_on_save", "stop_script", "rerun_script", "load_git_info", "debug_disconnect_websocket", "debug_shutdown_runtime", "file_urls_request", "app_heartbeat"] | None: ...
101
105
 
102
106
  global___BackMsg = BackMsg
@@ -289,6 +289,8 @@ class AppSession:
289
289
  self._handle_git_information_request()
290
290
  elif msg_type == "clear_cache":
291
291
  self._handle_clear_cache_request()
292
+ elif msg_type == "app_heartbeat":
293
+ self._handle_app_heartbeat_request()
292
294
  elif msg_type == "set_run_on_save":
293
295
  self._handle_set_run_on_save_request(msg.set_run_on_save)
294
296
  elif msg_type == "stop_script":
@@ -737,6 +739,17 @@ class AppSession:
737
739
  caching.cache_resource.clear()
738
740
  self._session_state.clear()
739
741
 
742
+ def _handle_app_heartbeat_request(self) -> None:
743
+ """Handle an incoming app heartbeat.
744
+
745
+ The heartbeat indicates the frontend is active and keeps the
746
+ websocket from going idle and disconnecting.
747
+
748
+ The actual handler here is a noop
749
+
750
+ """
751
+ pass
752
+
740
753
  def _handle_set_run_on_save_request(self, new_value: bool) -> None:
741
754
  """Change our run_on_save flag to the given value.
742
755
 
@@ -22,6 +22,7 @@ import inspect
22
22
  import io
23
23
  import os
24
24
  import pickle
25
+ import struct
25
26
  import sys
26
27
  import tempfile
27
28
  import threading
@@ -224,6 +225,11 @@ def _int_to_bytes(i: int) -> bytes:
224
225
  return i.to_bytes(num_bytes, "little", signed=True)
225
226
 
226
227
 
228
+ def _float_to_bytes(f: float) -> bytes:
229
+ # Floats are 64bit in Python, so we need to use the "d" format.
230
+ return struct.pack("<d", f)
231
+
232
+
227
233
  def _key(obj: Optional[Any]) -> Any:
228
234
  """Return key for memoization."""
229
235
 
@@ -361,7 +367,7 @@ class _CacheFuncHasher:
361
367
  return obj.encode()
362
368
 
363
369
  elif isinstance(obj, float):
364
- return self.to_bytes(hash(obj))
370
+ return _float_to_bytes(obj)
365
371
 
366
372
  elif isinstance(obj, int):
367
373
  return _int_to_bytes(obj)
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "files": {
3
3
  "main.css": "./static/css/main.77d1c464.css",
4
- "main.js": "./static/js/main.dcf3628b.js",
4
+ "main.js": "./static/js/main.ca9076db.js",
5
5
  "static/js/9336.2d95d840.chunk.js": "./static/js/9336.2d95d840.chunk.js",
6
6
  "static/js/9330.c0dd1723.chunk.js": "./static/js/9330.c0dd1723.chunk.js",
7
7
  "static/js/2736.ec449a3a.chunk.js": "./static/js/2736.ec449a3a.chunk.js",
@@ -149,6 +149,6 @@
149
149
  },
150
150
  "entrypoints": [
151
151
  "static/css/main.77d1c464.css",
152
- "static/js/main.dcf3628b.js"
152
+ "static/js/main.ca9076db.js"
153
153
  ]
154
154
  }
@@ -1 +1 @@
1
- <!doctype html><html lang="en"><head><meta charset="UTF-8"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><link rel="shortcut icon" href="./favicon.png"/><link rel="preload" href="./static/media/SourceSansPro-Regular.0d69e5ff5e92ac64a0c9.woff2" as="font" type="font/woff2" crossorigin><link rel="preload" href="./static/media/SourceSerifPro-SemiBold.5c1d378dd5990ef334ca.woff2" as="font" type="font/woff2" crossorigin><link rel="preload" href="./static/media/SourceSansPro-Bold.118dea98980e20a81ced.woff2" as="font" type="font/woff2" crossorigin><title>Streamlit</title><script>window.prerenderReady=!1</script><script defer="defer" src="./static/js/main.dcf3628b.js"></script><link href="./static/css/main.77d1c464.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>
1
+ <!doctype html><html lang="en"><head><meta charset="UTF-8"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><link rel="shortcut icon" href="./favicon.png"/><link rel="preload" href="./static/media/SourceSansPro-Regular.0d69e5ff5e92ac64a0c9.woff2" as="font" type="font/woff2" crossorigin><link rel="preload" href="./static/media/SourceSerifPro-SemiBold.5c1d378dd5990ef334ca.woff2" as="font" type="font/woff2" crossorigin><link rel="preload" href="./static/media/SourceSansPro-Bold.118dea98980e20a81ced.woff2" as="font" type="font/woff2" crossorigin><title>Streamlit</title><script>window.prerenderReady=!1</script><script defer="defer" src="./static/js/main.ca9076db.js"></script><link href="./static/css/main.77d1c464.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>