streamlit-nightly 1.37.2.dev20240814__py2.py3-none-any.whl → 1.37.2.dev20240816__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. streamlit/commands/execution_control.py +1 -4
  2. streamlit/commands/experimental_query_params.py +1 -1
  3. streamlit/commands/logo.py +1 -1
  4. streamlit/commands/navigation.py +1 -1
  5. streamlit/commands/page_config.py +1 -1
  6. streamlit/components/v1/component_arrow.py +2 -2
  7. streamlit/components/v1/component_registry.py +1 -1
  8. streamlit/components/v1/custom_component.py +1 -1
  9. streamlit/cursor.py +1 -1
  10. streamlit/dataframe_util.py +184 -25
  11. streamlit/elements/arrow.py +1 -1
  12. streamlit/elements/json.py +13 -7
  13. streamlit/elements/lib/dialog.py +1 -1
  14. streamlit/elements/lib/mutable_status_container.py +1 -1
  15. streamlit/elements/lib/policies.py +1 -1
  16. streamlit/elements/media.py +3 -5
  17. streamlit/elements/plotly_chart.py +1 -1
  18. streamlit/elements/vega_charts.py +1 -1
  19. streamlit/elements/widgets/button.py +7 -4
  20. streamlit/elements/widgets/button_group.py +28 -9
  21. streamlit/elements/widgets/chat.py +1 -1
  22. streamlit/elements/widgets/data_editor.py +1 -1
  23. streamlit/elements/write.py +18 -3
  24. streamlit/error_util.py +10 -8
  25. streamlit/navigation/page.py +1 -1
  26. streamlit/platform.py +1 -1
  27. streamlit/runtime/caching/cache_data_api.py +1 -1
  28. streamlit/runtime/caching/cache_resource_api.py +1 -1
  29. streamlit/runtime/caching/cached_message_replay.py +1 -1
  30. streamlit/runtime/context.py +6 -5
  31. streamlit/runtime/forward_msg_queue.py +18 -1
  32. streamlit/runtime/fragment.py +7 -5
  33. streamlit/runtime/media_file_manager.py +3 -1
  34. streamlit/runtime/metrics_util.py +2 -7
  35. streamlit/runtime/scriptrunner/__init__.py +7 -4
  36. streamlit/runtime/scriptrunner/exec_code.py +6 -3
  37. streamlit/runtime/scriptrunner/script_runner.py +10 -9
  38. streamlit/runtime/scriptrunner_utils/__init__.py +19 -0
  39. streamlit/runtime/{scriptrunner → scriptrunner_utils}/exceptions.py +1 -1
  40. streamlit/runtime/{scriptrunner → scriptrunner_utils}/script_requests.py +61 -6
  41. streamlit/runtime/{scriptrunner → scriptrunner_utils}/script_run_context.py +1 -1
  42. streamlit/runtime/state/__init__.py +0 -2
  43. streamlit/runtime/state/common.py +1 -1
  44. streamlit/runtime/state/query_params.py +1 -6
  45. streamlit/runtime/state/session_state.py +1 -5
  46. streamlit/runtime/state/session_state_proxy.py +3 -1
  47. streamlit/runtime/state/widgets.py +0 -59
  48. streamlit/static/asset-manifest.json +17 -17
  49. streamlit/static/index.html +1 -1
  50. streamlit/static/static/js/{1168.2a7e18da.chunk.js → 1168.2a9806f0.chunk.js} +1 -1
  51. streamlit/static/static/js/{1451.3d44ca81.chunk.js → 1451.d93e956f.chunk.js} +1 -1
  52. streamlit/static/static/js/{178.7bea8c5d.chunk.js → 178.ddebe26b.chunk.js} +1 -1
  53. streamlit/static/static/js/2469.6217c5c3.chunk.js +1 -0
  54. streamlit/static/static/js/{2634.1249dc7a.chunk.js → 2634.4e2535ee.chunk.js} +1 -1
  55. streamlit/static/static/js/{2736.dcbc9141.chunk.js → 2736.3d50ec7f.chunk.js} +1 -1
  56. streamlit/static/static/js/{3301.45709e64.chunk.js → 3301.7379a9fd.chunk.js} +5 -5
  57. streamlit/static/static/js/4113.786b0142.chunk.js +1 -0
  58. streamlit/static/static/js/4500.d884c792.chunk.js +1 -0
  59. streamlit/static/static/js/6853.a1c4fa00.chunk.js +1 -0
  60. streamlit/static/static/js/{7602.2331daf7.chunk.js → 7602.33571c14.chunk.js} +1 -1
  61. streamlit/static/static/js/7805.ba32ae70.chunk.js +1 -0
  62. streamlit/static/static/js/8148.b905db99.chunk.js +1 -0
  63. streamlit/static/static/js/8427.b1a68937.chunk.js +1 -0
  64. streamlit/static/static/js/{9330.2b4c99e0.chunk.js → 9330.32e8a53a.chunk.js} +1 -1
  65. streamlit/static/static/js/main.90c4efd0.js +28 -0
  66. streamlit/testing/v1/local_script_runner.py +1 -1
  67. streamlit/type_util.py +17 -47
  68. streamlit/user_info.py +4 -2
  69. streamlit/web/server/stats_request_handler.py +1 -3
  70. streamlit/web/server/websocket_headers.py +1 -1
  71. {streamlit_nightly-1.37.2.dev20240814.dist-info → streamlit_nightly-1.37.2.dev20240816.dist-info}/METADATA +1 -1
  72. {streamlit_nightly-1.37.2.dev20240814.dist-info → streamlit_nightly-1.37.2.dev20240816.dist-info}/RECORD +77 -76
  73. streamlit/static/static/js/2469.4bb197dd.chunk.js +0 -1
  74. streamlit/static/static/js/4113.ca4d2d7b.chunk.js +0 -1
  75. streamlit/static/static/js/4500.c007e274.chunk.js +0 -1
  76. streamlit/static/static/js/6853.5d19f25b.chunk.js +0 -1
  77. streamlit/static/static/js/7805.f7c8d475.chunk.js +0 -1
  78. streamlit/static/static/js/8148.7805e73f.chunk.js +0 -1
  79. streamlit/static/static/js/8427.69ce2c45.chunk.js +0 -1
  80. streamlit/static/static/js/main.b519dd78.js +0 -28
  81. /streamlit/static/static/js/{main.b519dd78.js.LICENSE.txt → main.90c4efd0.js.LICENSE.txt} +0 -0
  82. {streamlit_nightly-1.37.2.dev20240814.data → streamlit_nightly-1.37.2.dev20240816.data}/scripts/streamlit.cmd +0 -0
  83. {streamlit_nightly-1.37.2.dev20240814.dist-info → streamlit_nightly-1.37.2.dev20240816.dist-info}/WHEEL +0 -0
  84. {streamlit_nightly-1.37.2.dev20240814.dist-info → streamlit_nightly-1.37.2.dev20240816.dist-info}/entry_points.txt +0 -0
  85. {streamlit_nightly-1.37.2.dev20240814.dist-info → streamlit_nightly-1.37.2.dev20240816.dist-info}/top_level.txt +0 -0
@@ -16,12 +16,11 @@ from __future__ import annotations
16
16
 
17
17
  import os
18
18
  from itertools import dropwhile
19
- from typing import Final, Literal, NoReturn
19
+ from typing import Literal, NoReturn
20
20
 
21
21
  import streamlit as st
22
22
  from streamlit.errors import NoSessionContext, StreamlitAPIException
23
23
  from streamlit.file_util import get_main_script_directory, normalize_path_join
24
- from streamlit.logger import get_logger
25
24
  from streamlit.navigation.page import StreamlitPage
26
25
  from streamlit.runtime.metrics_util import gather_metrics
27
26
  from streamlit.runtime.scriptrunner import (
@@ -30,8 +29,6 @@ from streamlit.runtime.scriptrunner import (
30
29
  get_script_run_ctx,
31
30
  )
32
31
 
33
- _LOGGER: Final = get_logger(__name__)
34
-
35
32
 
36
33
  @gather_metrics("stop")
37
34
  def stop() -> NoReturn: # type: ignore[misc]
@@ -26,7 +26,7 @@ from streamlit.constants import (
26
26
  from streamlit.errors import StreamlitAPIException
27
27
  from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
28
28
  from streamlit.runtime.metrics_util import gather_metrics
29
- from streamlit.runtime.scriptrunner import get_script_run_ctx
29
+ from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
30
30
 
31
31
 
32
32
  @gather_metrics("experimental_get_query_params")
@@ -21,7 +21,7 @@ from streamlit.elements.image import AtomicImage, WidthBehaviour, image_to_url
21
21
  from streamlit.errors import StreamlitAPIException
22
22
  from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
23
23
  from streamlit.runtime.metrics_util import gather_metrics
24
- from streamlit.runtime.scriptrunner import get_script_run_ctx
24
+ from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
25
25
 
26
26
 
27
27
  def _invalid_logo_text(field_name: str):
@@ -23,7 +23,7 @@ from streamlit.errors import StreamlitAPIException
23
23
  from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
24
24
  from streamlit.proto.Navigation_pb2 import Navigation as NavigationProto
25
25
  from streamlit.runtime.metrics_util import gather_metrics
26
- from streamlit.runtime.scriptrunner.script_run_context import (
26
+ from streamlit.runtime.scriptrunner_utils.script_run_context import (
27
27
  ScriptRunContext,
28
28
  get_script_run_ctx,
29
29
  )
@@ -25,7 +25,7 @@ from streamlit.errors import StreamlitAPIException
25
25
  from streamlit.proto.ForwardMsg_pb2 import ForwardMsg as ForwardProto
26
26
  from streamlit.proto.PageConfig_pb2 import PageConfig as PageConfigProto
27
27
  from streamlit.runtime.metrics_util import gather_metrics
28
- from streamlit.runtime.scriptrunner import get_script_run_ctx
28
+ from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
29
29
  from streamlit.string_util import is_emoji, validate_material_icon
30
30
  from streamlit.url_util import is_url
31
31
  from streamlit.util import lower_clean_dict_keys
@@ -20,7 +20,7 @@ from __future__ import annotations
20
20
 
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from streamlit import dataframe_util, type_util
23
+ from streamlit import dataframe_util
24
24
  from streamlit.elements.lib import pandas_styler_utils
25
25
 
26
26
  if TYPE_CHECKING:
@@ -122,7 +122,7 @@ def arrow_proto_to_dataframe(proto: ArrowTableProto) -> DataFrame:
122
122
 
123
123
  """
124
124
 
125
- if type_util.is_pyarrow_version_less_than("14.0.1"):
125
+ if dataframe_util.is_pyarrow_version_less_than("14.0.1"):
126
126
  raise RuntimeError(
127
127
  "The installed pyarrow version is not compatible with this component. "
128
128
  "Please upgrade to 14.0.1 or higher: pip install -U pyarrow"
@@ -20,7 +20,7 @@ from typing import TYPE_CHECKING
20
20
 
21
21
  from streamlit.components.v1.custom_component import CustomComponent
22
22
  from streamlit.runtime import get_instance
23
- from streamlit.runtime.scriptrunner import get_script_run_ctx
23
+ from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
24
24
 
25
25
  if TYPE_CHECKING:
26
26
  from types import FrameType
@@ -27,7 +27,7 @@ from streamlit.proto.Components_pb2 import ArrowTable as ArrowTableProto
27
27
  from streamlit.proto.Components_pb2 import SpecialArg
28
28
  from streamlit.proto.Element_pb2 import Element
29
29
  from streamlit.runtime.metrics_util import gather_metrics
30
- from streamlit.runtime.scriptrunner import get_script_run_ctx
30
+ from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
31
31
  from streamlit.runtime.state import NoValue, register_widget
32
32
  from streamlit.runtime.state.common import compute_widget_id
33
33
  from streamlit.type_util import is_bytes_like, to_bytes
streamlit/cursor.py CHANGED
@@ -17,7 +17,7 @@ from __future__ import annotations
17
17
  from typing import Any
18
18
 
19
19
  from streamlit import util
20
- from streamlit.runtime.scriptrunner import get_script_run_ctx
20
+ from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
21
21
 
22
22
 
23
23
  def make_delta_path(
@@ -22,33 +22,38 @@ import inspect
22
22
  import math
23
23
  import re
24
24
  from collections import ChainMap, UserDict, UserList, deque
25
- from collections.abc import ItemsView, Mapping
25
+ from collections.abc import ItemsView
26
26
  from enum import Enum, EnumMeta, auto
27
27
  from types import MappingProxyType
28
28
  from typing import (
29
29
  TYPE_CHECKING,
30
30
  Any,
31
- Dict,
32
31
  Final,
33
32
  Iterable,
34
33
  List,
34
+ Mapping,
35
35
  Protocol,
36
+ Sequence,
36
37
  TypeVar,
37
38
  Union,
38
39
  cast,
40
+ runtime_checkable,
39
41
  )
40
42
 
41
43
  from typing_extensions import TypeAlias, TypeGuard
42
44
 
43
45
  from streamlit import config, errors, logger, string_util
44
46
  from streamlit.type_util import (
47
+ CustomDict,
45
48
  NumpyShape,
46
49
  has_callable_attr,
47
50
  is_custom_dict,
48
51
  is_dataclass_instance,
49
52
  is_list_like,
50
53
  is_namedtuple,
54
+ is_pydantic_model,
51
55
  is_type,
56
+ is_version_less_than,
52
57
  )
53
58
 
54
59
  if TYPE_CHECKING:
@@ -87,6 +92,7 @@ _DASK_SERIES: Final = "dask.dataframe.core.Series"
87
92
  _DASK_INDEX: Final = "dask.dataframe.core.Index"
88
93
  _RAY_MATERIALIZED_DATASET: Final = "ray.data.dataset.MaterializedDataset"
89
94
  _RAY_DATASET: Final = "ray.data.dataset.Dataset"
95
+ _DUCKDB_RELATION: Final = "duckdb.duckdb.DuckDBPyRelation"
90
96
 
91
97
  V_co = TypeVar(
92
98
  "V_co",
@@ -94,6 +100,39 @@ V_co = TypeVar(
94
100
  )
95
101
 
96
102
 
103
+ @runtime_checkable
104
+ class DBAPICursor(Protocol):
105
+ """Protocol for DBAPI 2.0 Cursor objects (PEP 249).
106
+
107
+ This is a simplified version of the DBAPI Cursor protocol
108
+ that only contains the methods that are relevant or used for
109
+ our DB API Integration.
110
+
111
+ Specification: https://peps.python.org/pep-0249/
112
+ Inspired by: https://github.com/python/typeshed/blob/main/stdlib/_typeshed/dbapi.pyi
113
+ """
114
+
115
+ @property
116
+ def description(
117
+ self,
118
+ ) -> (
119
+ Sequence[
120
+ tuple[
121
+ str,
122
+ Any | None,
123
+ int | None,
124
+ int | None,
125
+ int | None,
126
+ int | None,
127
+ bool | None,
128
+ ]
129
+ ]
130
+ | None
131
+ ): ...
132
+ def fetchmany(self, size: int = ..., /) -> Sequence[Sequence[Any]]: ...
133
+ def fetchall(self) -> Sequence[Sequence[Any]]: ...
134
+
135
+
97
136
  class DataFrameGenericAlias(Protocol[V_co]):
98
137
  """Technically not a GenericAlias, but serves the same purpose in
99
138
  OptionSequence below, in that it is a type which admits DataFrame,
@@ -108,9 +147,26 @@ class DataFrameGenericAlias(Protocol[V_co]):
108
147
  def iloc(self) -> _iLocIndexer: ...
109
148
 
110
149
 
150
+ class PandasCompatible(Protocol):
151
+ """Protocol for Pandas compatible objects that have a `to_pandas` method."""
152
+
153
+ def to_pandas(self) -> DataFrame | Series: ...
154
+
155
+
156
+ class DataframeInterchangeCompatible(Protocol):
157
+ """Protocol for objects support the dataframe-interchange protocol.
158
+
159
+ https://data-apis.org/dataframe-protocol/latest/index.html
160
+ """
161
+
162
+ def __dataframe__(self, allow_copy: bool) -> Any: ...
163
+
164
+
111
165
  OptionSequence: TypeAlias = Union[
112
166
  Iterable[V_co],
113
167
  DataFrameGenericAlias[V_co],
168
+ PandasCompatible,
169
+ DataframeInterchangeCompatible,
114
170
  ]
115
171
 
116
172
  # Various data types supported by our dataframe processing
@@ -122,9 +178,14 @@ Data: TypeAlias = Union[
122
178
  "Styler",
123
179
  "Index",
124
180
  "pa.Table",
181
+ "pa.Array",
125
182
  "np.ndarray[Any, np.dtype[Any]]",
126
183
  Iterable[Any],
127
- Dict[Any, Any],
184
+ "Mapping[Any, Any]",
185
+ DBAPICursor,
186
+ PandasCompatible,
187
+ DataframeInterchangeCompatible,
188
+ CustomDict,
128
189
  None,
129
190
  ]
130
191
 
@@ -163,6 +224,55 @@ class DataFormat(Enum):
163
224
  COLUMN_VALUE_MAPPING = auto() # {column: List[values]}
164
225
  COLUMN_SERIES_MAPPING = auto() # {column: Series(values)}
165
226
  KEY_VALUE_DICT = auto() # {index: value}
227
+ DBAPI_CURSOR = auto() # DBAPI Cursor (PEP 249)
228
+ DUCKDB_RELATION = auto() # DuckDB Relation
229
+
230
+
231
+ def is_pyarrow_version_less_than(v: str) -> bool:
232
+ """Return True if the current Pyarrow version is less than the input version.
233
+
234
+ Parameters
235
+ ----------
236
+ v : str
237
+ Version string, e.g. "0.25.0"
238
+
239
+ Returns
240
+ -------
241
+ bool
242
+
243
+
244
+ Raises
245
+ ------
246
+ InvalidVersion
247
+ If the version strings are not valid.
248
+
249
+ """
250
+ import pyarrow as pa
251
+
252
+ return is_version_less_than(pa.__version__, v)
253
+
254
+
255
+ def is_pandas_version_less_than(v: str) -> bool:
256
+ """Return True if the current Pandas version is less than the input version.
257
+
258
+ Parameters
259
+ ----------
260
+ v : str
261
+ Version string, e.g. "0.25.0"
262
+
263
+ Returns
264
+ -------
265
+ bool
266
+
267
+
268
+ Raises
269
+ ------
270
+ InvalidVersion
271
+ If the version strings are not valid.
272
+ """
273
+ import pandas as pd
274
+
275
+ return is_version_less_than(pd.__version__, v)
166
276
 
167
277
 
168
278
  def is_dataframe_like(obj: object) -> bool:
@@ -200,6 +310,7 @@ def is_dataframe_like(obj: object) -> bool:
200
310
  DataFormat.DASK_OBJECT,
201
311
  DataFormat.RAY_DATASET,
202
312
  DataFormat.COLUMN_SERIES_MAPPING,
313
+ DataFormat.DBAPI_CURSOR,
203
314
  ]
204
315
 
205
316
 
@@ -215,6 +326,8 @@ def is_unevaluated_data_object(obj: object) -> bool:
215
326
  - Ray Dataset
216
327
  - Polars LazyFrame
217
328
  - Generator functions
329
+ - DB API 2.0 Cursor (PEP 249)
330
+ - DuckDB Relation (Relational API)
218
331
 
219
332
  Unevaluated means that the data is not yet in the local memory.
220
333
  Unevaluated data objects are treated differently from other data objects by only
@@ -228,6 +341,8 @@ def is_unevaluated_data_object(obj: object) -> bool:
228
341
  or is_ray_dataset(obj)
229
342
  or is_polars_lazyframe(obj)
230
343
  or is_dask_object(obj)
344
+ or is_duckdb_relation(obj)
345
+ or is_dbapi_cursor(obj)
231
346
  or inspect.isgeneratorfunction(obj)
232
347
  )
233
348
 
@@ -254,11 +369,7 @@ def is_snowpark_row_list(obj: object) -> bool:
254
369
 
255
370
  def is_pyspark_data_object(obj: object) -> bool:
256
371
  """True if obj is of type pyspark.sql.dataframe.DataFrame"""
257
- return (
258
- is_type(obj, _PYSPARK_DF_TYPE_STR)
259
- and hasattr(obj, "toPandas")
260
- and callable(obj.toPandas)
261
- )
372
+ return is_type(obj, _PYSPARK_DF_TYPE_STR) and has_callable_attr(obj, "toPandas")
262
373
 
263
374
 
264
375
  def is_dask_object(obj: object) -> bool:
@@ -319,6 +430,23 @@ def is_pandas_styler(obj: object) -> TypeGuard[Styler]:
319
430
  return is_type(obj, _PANDAS_STYLER_TYPE_STR)
320
431
 
321
432
 
433
+ def is_dbapi_cursor(obj: object) -> TypeGuard[DBAPICursor]:
434
+ """True if obj looks like a DB API 2.0 Cursor.
435
+
436
+ https://peps.python.org/pep-0249/
437
+ """
438
+ return isinstance(obj, DBAPICursor)
439
+
440
+
441
+ def is_duckdb_relation(obj: object) -> bool:
442
+ """True if obj is a DuckDB relation.
443
+
444
+ https://duckdb.org/docs/api/python/relational_api
445
+ """
446
+
447
+ return is_type(obj, _DUCKDB_RELATION)
448
+
449
+
322
450
  def _is_list_of_scalars(data: Iterable[Any]) -> bool:
323
451
  """Check if the list only contains scalar values."""
324
452
  from pandas.api.types import infer_dtype
@@ -400,7 +528,7 @@ def convert_anything_to_pandas_df(
400
528
 
401
529
  Parameters
402
530
  ----------
403
- data : any
531
+ data : dataframe-, array-, or collections-like object
404
532
  The data to convert to a Pandas DataFrame.
405
533
 
406
534
  max_unevaluated_rows: int
@@ -533,12 +661,45 @@ def convert_anything_to_pandas_df(
533
661
  )
534
662
  return cast(pd.DataFrame, data)
535
663
 
664
+ if is_duckdb_relation(data):
665
+ data = data.limit(max_unevaluated_rows).df()
666
+ if data.shape[0] == max_unevaluated_rows:
667
+ _show_data_information(
668
+ f"⚠️ Showing only {string_util.simplify_number(max_unevaluated_rows)} "
669
+ "rows. Call `df()` on the relation to show more."
670
+ )
671
+ return data
672
+
673
+ if is_dbapi_cursor(data):
674
+ # Based on the specification, the first item in the description is the
675
+ # column name (if available)
676
+ columns = (
677
+ [d[0] if d else "" for d in data.description] if data.description else None
678
+ )
679
+ data = pd.DataFrame(data.fetchmany(max_unevaluated_rows), columns=columns)
680
+ if data.shape[0] == max_unevaluated_rows:
681
+ _show_data_information(
682
+ f"⚠️ Showing only {string_util.simplify_number(max_unevaluated_rows)} "
683
+ "rows. Call `fetchall()` on the Cursor to show more."
684
+ )
685
+ return data
686
+
536
687
  if is_snowpark_row_list(data):
537
688
  return pd.DataFrame([row.as_dict() for row in data])
538
689
 
539
690
  if has_callable_attr(data, "to_pandas"):
540
691
  return pd.DataFrame(data.to_pandas())
541
692
 
693
+ # Check for dataframe interchange protocol
694
+ # Only available in pandas >= 1.5.0
695
+ # https://pandas.pydata.org/docs/whatsnew/v1.5.0.html#dataframe-interchange-protocol-implementation
696
+ if (
697
+ has_callable_attr(data, "__dataframe__")
698
+ and is_pandas_version_less_than("1.5.0") is False
699
+ ):
700
+ data_df = pd.api.interchange.from_dataframe(data)
701
+ return data_df.copy() if ensure_copy else data_df
702
+
542
703
  # Support for generator functions
543
704
  if inspect.isgeneratorfunction(data):
544
705
  data = _fix_column_naming(
@@ -573,7 +734,9 @@ def convert_anything_to_pandas_df(
573
734
  return _dict_to_pandas_df(dataclasses.asdict(data))
574
735
 
575
736
  # Support for dict-like objects
576
- if isinstance(data, (ChainMap, MappingProxyType, UserDict)):
737
+ if isinstance(data, (ChainMap, MappingProxyType, UserDict)) or is_pydantic_model(
738
+ data
739
+ ):
577
740
  return _dict_to_pandas_df(dict(data))
578
741
 
579
742
  # Try to convert to pandas.DataFrame. This will raise an error is df is not
@@ -703,7 +866,7 @@ def convert_anything_to_arrow_bytes(
703
866
 
704
867
  Parameters
705
868
  ----------
706
- data : any
869
+ data : dataframe-, array-, or collections-like object
707
870
  The data to convert to Arrow bytes.
708
871
 
709
872
  max_unevaluated_rows: int
@@ -721,19 +884,7 @@ def convert_anything_to_arrow_bytes(
721
884
  if isinstance(data, pa.Table):
722
885
  return convert_arrow_table_to_arrow_bytes(data)
723
886
 
724
- if is_pandas_data_object(data):
725
- # All pandas data objects should be handled via our pandas
726
- # conversion logic. We are already calling it here
727
- # to ensure that its not handled via the interchange
728
- # protocol support below.
729
- df = convert_anything_to_pandas_df(data, max_unevaluated_rows)
730
- return convert_pandas_df_to_arrow_bytes(df)
731
-
732
- if is_polars_dataframe(data):
733
- return convert_arrow_table_to_arrow_bytes(data.to_arrow())
734
-
735
- if is_polars_series(data):
736
- return convert_arrow_table_to_arrow_bytes(data.to_frame().to_arrow())
887
+ # TODO(lukasmasuch): Add direct conversion to Arrow for supported formats here
737
888
 
738
889
  # Fallback: try to convert to pandas DataFrame
739
890
  # and then to Arrow bytes.
@@ -751,7 +902,7 @@ def convert_anything_to_sequence(obj: OptionSequence[V_co]) -> list[V_co]:
751
902
  Parameters
752
903
  ----------
753
904
 
754
- obj : OptionSequence
905
+ obj : dataframe-, array-, or collections-like object
755
906
  The object to convert to a list.
756
907
 
757
908
  Returns
@@ -891,6 +1042,7 @@ def is_colum_type_arrow_incompatible(column: Series[Any] | Index) -> bool:
891
1042
  "period[ns]",
892
1043
  "period[U]",
893
1044
  "period[us]",
1045
+ "geometry",
894
1046
  }:
895
1047
  return True
896
1048
 
@@ -1044,6 +1196,10 @@ def determine_data_format(input_data: Any) -> DataFormat:
1044
1196
  return DataFormat.DASK_OBJECT
1045
1197
  elif is_snowpark_data_object(input_data) or is_snowpark_row_list(input_data):
1046
1198
  return DataFormat.SNOWPARK_OBJECT
1199
+ elif is_duckdb_relation(input_data):
1200
+ return DataFormat.DUCKDB_RELATION
1201
+ elif is_dbapi_cursor(input_data):
1202
+ return DataFormat.DBAPI_CURSOR
1047
1203
  elif (
1048
1204
  isinstance(
1049
1205
  input_data,
@@ -1052,6 +1208,7 @@ def determine_data_format(input_data: Any) -> DataFormat:
1052
1208
  or is_dataclass_instance(input_data)
1053
1209
  or is_namedtuple(input_data)
1054
1210
  or is_custom_dict(input_data)
1211
+ or is_pydantic_model(input_data)
1055
1212
  ):
1056
1213
  return DataFormat.KEY_VALUE_DICT
1057
1214
  elif isinstance(input_data, (ItemsView, enumerate)):
@@ -1164,6 +1321,8 @@ def convert_pandas_df_to_data_format(
1164
1321
  DataFormat.SNOWPANDAS_OBJECT,
1165
1322
  DataFormat.DASK_OBJECT,
1166
1323
  DataFormat.RAY_DATASET,
1324
+ DataFormat.DBAPI_CURSOR,
1325
+ DataFormat.DUCKDB_RELATION,
1167
1326
  ]:
1168
1327
  return df
1169
1328
  elif data_format == DataFormat.NUMPY_LIST:
@@ -48,7 +48,7 @@ from streamlit.errors import StreamlitAPIException
48
48
  from streamlit.proto.Arrow_pb2 import Arrow as ArrowProto
49
49
  from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
50
50
  from streamlit.runtime.metrics_util import gather_metrics
51
- from streamlit.runtime.scriptrunner.script_run_context import (
51
+ from streamlit.runtime.scriptrunner_utils.script_run_context import (
52
52
  enqueue_message,
53
53
  get_script_run_ctx,
54
54
  )
@@ -21,7 +21,12 @@ from typing import TYPE_CHECKING, Any, cast
21
21
 
22
22
  from streamlit.proto.Json_pb2 import Json as JsonProto
23
23
  from streamlit.runtime.metrics_util import gather_metrics
24
- from streamlit.type_util import is_custom_dict, is_namedtuple
24
+ from streamlit.type_util import (
25
+ is_custom_dict,
26
+ is_list_like,
27
+ is_namedtuple,
28
+ is_pydantic_model,
29
+ )
25
30
 
26
31
  if TYPE_CHECKING:
27
32
  from streamlit.delta_generator import DeltaGenerator
@@ -79,7 +84,6 @@ class JsonMixin:
79
84
  height: 385px
80
85
 
81
86
  """
82
- import streamlit as st
83
87
 
84
88
  if is_custom_dict(body):
85
89
  body = body.to_dict()
@@ -87,18 +91,20 @@ class JsonMixin:
87
91
  if is_namedtuple(body):
88
92
  body = body._asdict()
89
93
 
90
- if isinstance(body, (map, enumerate)):
91
- body = list(body)
94
+ if isinstance(
95
+ body, (ChainMap, types.MappingProxyType, UserDict)
96
+ ) or is_pydantic_model(body):
97
+ body = dict(body) # type: ignore
92
98
 
93
- if isinstance(body, (ChainMap, types.MappingProxyType, UserDict)):
94
- body = dict(body)
99
+ if is_list_like(body):
100
+ body = list(body)
95
101
 
96
102
  if not isinstance(body, str):
97
103
  try:
98
104
  # Serialize body to string and try to interpret sets as lists
99
105
  body = json.dumps(body, default=_ensure_serialization)
100
106
  except TypeError as err:
101
- st.warning(
107
+ self.dg.warning(
102
108
  "Warning: this data structure was not fully serializable as "
103
109
  f"JSON due to one or more unexpected keys. (Error was: {err})"
104
110
  )
@@ -23,7 +23,7 @@ from streamlit.delta_generator import DeltaGenerator
23
23
  from streamlit.errors import StreamlitAPIException
24
24
  from streamlit.proto.Block_pb2 import Block as BlockProto
25
25
  from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
26
- from streamlit.runtime.scriptrunner.script_run_context import (
26
+ from streamlit.runtime.scriptrunner_utils.script_run_context import (
27
27
  enqueue_message,
28
28
  get_script_run_ctx,
29
29
  )
@@ -23,7 +23,7 @@ from streamlit.delta_generator import DeltaGenerator
23
23
  from streamlit.errors import StreamlitAPIException
24
24
  from streamlit.proto.Block_pb2 import Block as BlockProto
25
25
  from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
26
- from streamlit.runtime.scriptrunner.script_run_context import enqueue_message
26
+ from streamlit.runtime.scriptrunner_utils.script_run_context import enqueue_message
27
27
 
28
28
  if TYPE_CHECKING:
29
29
  from types import TracebackType
@@ -19,7 +19,7 @@ from typing import TYPE_CHECKING, Any, Final, Sequence
19
19
  from streamlit import config, errors, logger, runtime
20
20
  from streamlit.elements.form_utils import is_in_form
21
21
  from streamlit.errors import StreamlitAPIException, StreamlitAPIWarning
22
- from streamlit.runtime.scriptrunner.script_run_context import get_script_run_ctx
22
+ from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
23
23
  from streamlit.runtime.state import WidgetCallback, get_session_state
24
24
 
25
25
  if TYPE_CHECKING:
@@ -22,7 +22,6 @@ from typing import TYPE_CHECKING, Dict, Final, Union, cast
22
22
 
23
23
  from typing_extensions import TypeAlias
24
24
 
25
- import streamlit as st
26
25
  from streamlit import runtime, type_util, url_util
27
26
  from streamlit.elements.lib.subtitle_utils import process_subtitle_data
28
27
  from streamlit.errors import StreamlitAPIException
@@ -30,7 +29,7 @@ from streamlit.proto.Audio_pb2 import Audio as AudioProto
30
29
  from streamlit.proto.Video_pb2 import Video as VideoProto
31
30
  from streamlit.runtime import caching
32
31
  from streamlit.runtime.metrics_util import gather_metrics
33
- from streamlit.runtime.scriptrunner import get_script_run_ctx
32
+ from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
34
33
  from streamlit.runtime.state.common import compute_widget_id
35
34
  from streamlit.time_util import time_to_seconds
36
35
  from streamlit.type_util import NumpyShape
@@ -169,7 +168,6 @@ class MediaMixin:
169
168
  start_time, end_time = _parse_start_time_end_time(start_time, end_time)
170
169
 
171
170
  audio_proto = AudioProto()
172
- coordinates = self.dg._get_delta_path_str()
173
171
 
174
172
  is_data_numpy_array = type_util.is_type(data, "numpy.ndarray")
175
173
 
@@ -178,11 +176,11 @@ class MediaMixin:
178
176
  "`sample_rate` must be specified when `data` is a numpy array."
179
177
  )
180
178
  if not is_data_numpy_array and sample_rate is not None:
181
- st.warning(
179
+ self.dg.warning(
182
180
  "Warning: `sample_rate` will be ignored since data is not a numpy "
183
181
  "array."
184
182
  )
185
-
183
+ coordinates = self.dg._get_delta_path_str()
186
184
  marshall_audio(
187
185
  coordinates,
188
186
  audio_proto,
@@ -46,7 +46,7 @@ from streamlit.elements.lib.utils import Key, to_key
46
46
  from streamlit.errors import StreamlitAPIException
47
47
  from streamlit.proto.PlotlyChart_pb2 import PlotlyChart as PlotlyChartProto
48
48
  from streamlit.runtime.metrics_util import gather_metrics
49
- from streamlit.runtime.scriptrunner import get_script_run_ctx
49
+ from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
50
50
  from streamlit.runtime.state import WidgetCallback, register_widget
51
51
  from streamlit.runtime.state.common import compute_widget_id
52
52
 
@@ -53,7 +53,7 @@ from streamlit.proto.ArrowVegaLiteChart_pb2 import (
53
53
  ArrowVegaLiteChart as ArrowVegaLiteChartProto,
54
54
  )
55
55
  from streamlit.runtime.metrics_util import gather_metrics
56
- from streamlit.runtime.scriptrunner import get_script_run_ctx
56
+ from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
57
57
  from streamlit.runtime.state import WidgetCallback, register_widget
58
58
  from streamlit.runtime.state.common import compute_widget_id
59
59
  from streamlit.util import HASHLIB_KWARGS
@@ -740,10 +740,13 @@ class ButtonMixin:
740
740
  page_link_proto.page = page_name
741
741
  break
742
742
 
743
- if page_link_proto.page_script_hash == "":
744
- raise StreamlitAPIException(
745
- f"Could not find page: `{page}`. Must be the file path relative to the main script, from the directory: `{os.path.basename(main_script_directory)}`. Only the main app file and files in the `pages/` directory are supported."
746
- )
743
+ if page_link_proto.page_script_hash == "":
744
+ raise StreamlitAPIException(
745
+ f"Could not find page: `{page}`. Must be the file path relative to "
746
+ "the main script, from the directory: "
747
+ f"`{os.path.basename(main_script_directory)}`. Only the main app "
748
+ "file and files in the `pages/` directory are supported."
749
+ )
747
750
 
748
751
  return self.dg._enqueue("page_link", page_link_proto)
749
752