streamlit 1.45.1__py3-none-any.whl → 1.46.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- streamlit/__init__.py +5 -1
- streamlit/auth_util.py +12 -12
- streamlit/cli_util.py +4 -3
- streamlit/column_config.py +11 -9
- streamlit/commands/echo.py +6 -4
- streamlit/commands/execution_control.py +33 -32
- streamlit/commands/experimental_query_params.py +2 -2
- streamlit/commands/logo.py +9 -4
- streamlit/commands/navigation.py +61 -18
- streamlit/commands/page_config.py +57 -47
- streamlit/components/types/base_custom_component.py +7 -7
- streamlit/components/v1/component_registry.py +7 -3
- streamlit/components/v1/components.py +1 -1
- streamlit/components/v1/custom_component.py +8 -8
- streamlit/config.py +289 -144
- streamlit/config_option.py +19 -15
- streamlit/config_util.py +29 -23
- streamlit/connections/__init__.py +2 -2
- streamlit/connections/base_connection.py +5 -5
- streamlit/connections/snowflake_connection.py +13 -11
- streamlit/connections/snowpark_connection.py +3 -3
- streamlit/connections/sql_connection.py +20 -18
- streamlit/connections/util.py +2 -2
- streamlit/cursor.py +6 -6
- streamlit/dataframe_util.py +52 -52
- streamlit/delta_generator.py +46 -48
- streamlit/delta_generator_singletons.py +3 -3
- streamlit/deprecation_util.py +6 -6
- streamlit/elements/alert.py +37 -29
- streamlit/elements/arrow.py +40 -22
- streamlit/elements/code.py +46 -13
- streamlit/elements/deck_gl_json_chart.py +38 -27
- streamlit/elements/dialog_decorator.py +3 -4
- streamlit/elements/doc_string.py +64 -58
- streamlit/elements/exception.py +23 -27
- streamlit/elements/form.py +41 -0
- streamlit/elements/graphviz_chart.py +1 -1
- streamlit/elements/heading.py +60 -9
- streamlit/elements/html.py +3 -4
- streamlit/elements/image.py +8 -9
- streamlit/elements/json.py +21 -2
- streamlit/elements/layouts.py +120 -31
- streamlit/elements/lib/built_in_chart_utils.py +96 -73
- streamlit/elements/lib/color_util.py +3 -3
- streamlit/elements/lib/column_config_utils.py +2 -4
- streamlit/elements/lib/column_types.py +14 -8
- streamlit/elements/lib/dialog.py +9 -5
- streamlit/elements/lib/image_utils.py +39 -40
- streamlit/elements/lib/js_number.py +4 -4
- streamlit/elements/lib/layout_utils.py +65 -1
- streamlit/elements/lib/mutable_status_container.py +14 -3
- streamlit/elements/lib/options_selector_utils.py +22 -12
- streamlit/elements/lib/pandas_styler_utils.py +25 -21
- streamlit/elements/lib/policies.py +6 -5
- streamlit/elements/lib/streamlit_plotly_theme.py +54 -53
- streamlit/elements/lib/subtitle_utils.py +6 -9
- streamlit/elements/lib/utils.py +20 -5
- streamlit/elements/map.py +32 -56
- streamlit/elements/markdown.py +101 -12
- streamlit/elements/media.py +78 -21
- streamlit/elements/metric.py +32 -16
- streamlit/elements/plotly_chart.py +15 -15
- streamlit/elements/progress.py +33 -15
- streamlit/elements/spinner.py +31 -6
- streamlit/elements/text.py +21 -1
- streamlit/elements/toast.py +1 -2
- streamlit/elements/vega_charts.py +54 -23
- streamlit/elements/widgets/audio_input.py +24 -7
- streamlit/elements/widgets/button.py +26 -19
- streamlit/elements/widgets/button_group.py +10 -15
- streamlit/elements/widgets/camera_input.py +27 -7
- streamlit/elements/widgets/chat.py +91 -38
- streamlit/elements/widgets/checkbox.py +45 -4
- streamlit/elements/widgets/color_picker.py +40 -17
- streamlit/elements/widgets/data_editor.py +76 -37
- streamlit/elements/widgets/file_uploader.py +42 -13
- streamlit/elements/widgets/multiselect.py +7 -10
- streamlit/elements/widgets/number_input.py +123 -47
- streamlit/elements/widgets/radio.py +59 -13
- streamlit/elements/widgets/select_slider.py +35 -30
- streamlit/elements/widgets/selectbox.py +56 -9
- streamlit/elements/widgets/slider.py +190 -99
- streamlit/elements/widgets/text_widgets.py +54 -8
- streamlit/elements/widgets/time_widgets.py +53 -14
- streamlit/elements/write.py +5 -8
- streamlit/env_util.py +2 -7
- streamlit/error_util.py +16 -9
- streamlit/errors.py +69 -48
- streamlit/external/langchain/streamlit_callback_handler.py +10 -5
- streamlit/file_util.py +27 -10
- streamlit/git_util.py +29 -24
- streamlit/hello/animation_demo.py +9 -9
- streamlit/hello/dataframe_demo.py +5 -5
- streamlit/hello/hello.py +1 -0
- streamlit/hello/mapping_demo.py +7 -8
- streamlit/hello/plotting_demo.py +3 -3
- streamlit/hello/streamlit_app.py +28 -26
- streamlit/hello/utils.py +2 -1
- streamlit/logger.py +10 -11
- streamlit/navigation/page.py +11 -8
- streamlit/proto/Audio_pb2.py +4 -3
- streamlit/proto/Audio_pb2.pyi +8 -1
- streamlit/proto/Block_pb2.py +38 -29
- streamlit/proto/Block_pb2.pyi +72 -4
- streamlit/proto/ClientState_pb2.py +4 -4
- streamlit/proto/ClientState_pb2.pyi +7 -2
- streamlit/proto/Code_pb2.py +4 -2
- streamlit/proto/Code_pb2.pyi +1 -0
- streamlit/proto/DataFrame_pb2.pyi +1 -1
- streamlit/proto/DeckGlJsonChart_pb2.pyi +1 -1
- streamlit/proto/Element_pb2.py +5 -3
- streamlit/proto/Element_pb2.pyi +20 -3
- streamlit/proto/GapSize_pb2.py +29 -0
- streamlit/proto/GapSize_pb2.pyi +70 -0
- streamlit/proto/HeightConfig_pb2.py +27 -0
- streamlit/proto/HeightConfig_pb2.pyi +48 -0
- streamlit/proto/NamedDataSet_pb2.pyi +1 -1
- streamlit/proto/Navigation_pb2.py +3 -3
- streamlit/proto/Navigation_pb2.pyi +4 -0
- streamlit/proto/NewSession_pb2.py +18 -16
- streamlit/proto/NewSession_pb2.pyi +29 -3
- streamlit/proto/PageConfig_pb2.py +7 -7
- streamlit/proto/PageConfig_pb2.pyi +21 -1
- streamlit/proto/Video_pb2.py +8 -7
- streamlit/proto/Video_pb2.pyi +8 -1
- streamlit/proto/WidthConfig_pb2.py +2 -2
- streamlit/proto/WidthConfig_pb2.pyi +15 -1
- streamlit/runtime/__init__.py +1 -1
- streamlit/runtime/app_session.py +53 -40
- streamlit/runtime/caching/__init__.py +9 -9
- streamlit/runtime/caching/cache_data_api.py +36 -30
- streamlit/runtime/caching/cache_errors.py +4 -4
- streamlit/runtime/caching/cache_resource_api.py +8 -8
- streamlit/runtime/caching/cache_utils.py +15 -14
- streamlit/runtime/caching/cached_message_replay.py +14 -8
- streamlit/runtime/caching/hashing.py +91 -97
- streamlit/runtime/caching/legacy_cache_api.py +2 -2
- streamlit/runtime/caching/storage/cache_storage_protocol.py +1 -1
- streamlit/runtime/caching/storage/dummy_cache_storage.py +1 -1
- streamlit/runtime/caching/storage/in_memory_cache_storage_wrapper.py +12 -14
- streamlit/runtime/caching/storage/local_disk_cache_storage.py +6 -6
- streamlit/runtime/connection_factory.py +36 -36
- streamlit/runtime/context.py +58 -9
- streamlit/runtime/credentials.py +29 -40
- streamlit/runtime/forward_msg_queue.py +11 -11
- streamlit/runtime/fragment.py +7 -7
- streamlit/runtime/media_file_manager.py +3 -4
- streamlit/runtime/memory_media_file_storage.py +6 -5
- streamlit/runtime/memory_uploaded_file_manager.py +2 -2
- streamlit/runtime/metrics_util.py +11 -12
- streamlit/runtime/pages_manager.py +4 -6
- streamlit/runtime/runtime.py +8 -6
- streamlit/runtime/runtime_util.py +7 -6
- streamlit/runtime/scriptrunner/__init__.py +4 -4
- streamlit/runtime/scriptrunner/exec_code.py +12 -5
- streamlit/runtime/scriptrunner/magic.py +16 -12
- streamlit/runtime/scriptrunner/script_cache.py +1 -1
- streamlit/runtime/scriptrunner/script_runner.py +53 -29
- streamlit/runtime/scriptrunner_utils/exceptions.py +1 -1
- streamlit/runtime/scriptrunner_utils/script_requests.py +7 -4
- streamlit/runtime/scriptrunner_utils/script_run_context.py +10 -23
- streamlit/runtime/secrets.py +40 -35
- streamlit/runtime/session_manager.py +2 -1
- streamlit/runtime/state/__init__.py +5 -5
- streamlit/runtime/state/common.py +2 -2
- streamlit/runtime/state/query_params.py +13 -15
- streamlit/runtime/state/query_params_proxy.py +17 -13
- streamlit/runtime/state/safe_session_state.py +2 -2
- streamlit/runtime/state/session_state.py +52 -34
- streamlit/runtime/stats.py +2 -2
- streamlit/runtime/uploaded_file_manager.py +1 -1
- streamlit/runtime/websocket_session_manager.py +10 -6
- streamlit/source_util.py +8 -6
- streamlit/static/index.html +3 -17
- streamlit/static/manifest.json +1180 -0
- streamlit/static/static/css/{index.DqDwtg6_.css → index.CJVRHjQZ.css} +1 -1
- streamlit/static/static/js/{ErrorOutline.esm.DU9IrB3M.js → ErrorOutline.esm.DitPpe1Y.js} +1 -1
- streamlit/static/static/js/{FileDownload.esm.P9rKwKo8.js → FileDownload.esm.AI3watX9.js} +1 -1
- streamlit/static/static/js/{FileHelper.D7RMkx0e.js → FileHelper.kt7mhnu8.js} +5 -5
- streamlit/static/static/js/{FormClearHelper.B67tgll0.js → FormClearHelper.D1M9GM_c.js} +1 -1
- streamlit/static/static/js/{Hooks.ncTJktu9.js → Hooks.BGwHKeUc.js} +1 -1
- streamlit/static/static/js/{InputInstructions.D-Y8geDN.js → InputInstructions.DaZ89mzH.js} +1 -1
- streamlit/static/static/js/{ProgressBar.B-kexwwD.js → ProgressBar.C0zPMe-p.js} +2 -2
- streamlit/static/static/js/{RenderInPortalIfExists.BgaoZgep.js → RenderInPortalIfExists.Ox8gQvdz.js} +1 -1
- streamlit/static/static/js/Toolbar.KhlcEc0K.js +1 -0
- streamlit/static/static/js/UploadFileInfo.0DCkpDDf.js +6 -0
- streamlit/static/static/js/{base-input.BoAa1U94.js → base-input.BJ4qsfSq.js} +4 -4
- streamlit/static/static/js/{checkbox.Z6iSfe5F.js → checkbox.DSDh78Xz.js} +2 -2
- streamlit/static/static/js/{createSuper.B4oGDYRm.js → createSuper.wQ9SIXEJ.js} +1 -1
- streamlit/static/static/js/{data-grid-overlay-editor.msYws2Ou.js → data-grid-overlay-editor.DvbdPJ15.js} +1 -1
- streamlit/static/static/js/{downloader.kc14n2Hv.js → downloader.CD9rzih5.js} +1 -1
- streamlit/static/static/js/{es6.CxQz807-.js → es6.48Q9Qjgb.js} +2 -2
- streamlit/static/static/js/{iframeResizer.contentWindow.B19u0ONI.js → iframeResizer.contentWindow.CKdem3Bn.js} +1 -1
- streamlit/static/static/js/{index.LaIasviC.js → index.6md5Qhod.js} +1 -1
- streamlit/static/static/js/index.7hy6AeJ1.js +1 -0
- streamlit/static/static/js/index.B4CGJiBW.js +1 -0
- streamlit/static/static/js/index.B8oW0ZTD.js +1 -0
- streamlit/static/static/js/index.BU6RnlHI.js +73 -0
- streamlit/static/static/js/index.BUq9Wcf8.js +197 -0
- streamlit/static/static/js/{index.BFz9U2y0.js → index.BXXo-Yoj.js} +1 -1
- streamlit/static/static/js/index.Bae9H0OS.js +1 -0
- streamlit/static/static/js/{index.-5ruC9At.js → index.BhTl2Uyb.js} +1 -1
- streamlit/static/static/js/{index.BpILzHf_.js → index.BiSaCB1o.js} +20 -20
- streamlit/static/static/js/{index.xNQq3Ei5.js → index.BulSAJ9z.js} +1 -1
- streamlit/static/static/js/{index.9V1KdxfP.js → index.Bv-EuTKR.js} +1 -1
- streamlit/static/static/js/index.BvMLYCHi.js +1 -0
- streamlit/static/static/js/index.C1NIn1Y2.js +783 -0
- streamlit/static/static/js/index.CP-fthOJ.js +2 -0
- streamlit/static/static/js/{index.BoigZiu7.js → index.CS9guO3p.js} +1 -1
- streamlit/static/static/js/index.CYTBHth8.js +1 -0
- streamlit/static/static/js/{index.CmTAF0dM.js → index.CcJufcuD.js} +1 -1
- streamlit/static/static/js/index.CnENU1yn.js +1 -0
- streamlit/static/static/js/index.Cns13qBb.js +1 -0
- streamlit/static/static/js/index.Ct_xXq7w.js +1 -0
- streamlit/static/static/js/{index.BqfdT8-Q.js → index.CxGSemHL.js} +1 -1
- streamlit/static/static/js/index.D5S0ldVb.js +1 -0
- streamlit/static/static/js/index.D72B_ksb.js +2 -0
- streamlit/static/static/js/index.DI4yZ27M.js +1 -0
- streamlit/static/static/js/index.DN51vLxR.js +1 -0
- streamlit/static/static/js/index.DRtq5dka.js +1 -0
- streamlit/static/static/js/{index.BHXxWdde.js → index.DX-oiXlb.js} +1 -1
- streamlit/static/static/js/index.DlFE4_Aq.js +12 -0
- streamlit/static/static/js/{index.BHGGDa8K.js → index.J7BJwXOi.js} +2 -2
- streamlit/static/static/js/index.Jg38kJPP.js +1 -0
- streamlit/static/static/js/index.JhIO6abf.js +3 -0
- streamlit/static/static/js/{index.DeB9iKFW.js → index.NkRcWwc5.js} +255 -255
- streamlit/static/static/js/{index.BGga-hcS.js → index.prekPLrm.js} +25 -25
- streamlit/static/static/js/{index.BRXmLIsC.js → index.wyzngKUE.js} +1 -1
- streamlit/static/static/js/index.xW7mVdI8.js +1 -0
- streamlit/static/static/js/index.yk07dYGx.js +1 -0
- streamlit/static/static/js/{input.DsCfafm0.js → input.CxKZ5Wrc.js} +2 -2
- streamlit/static/static/js/{memory.nY_lMTtu.js → memory.DeZ9VUvl.js} +1 -1
- streamlit/static/static/js/{mergeWith.B_7zmsM4.js → mergeWith.CVkhrWUb.js} +1 -1
- streamlit/static/static/js/{number-overlay-editor.CSeVhHRU.js → number-overlay-editor.Bpkm3nTq.js} +1 -1
- streamlit/static/static/js/{possibleConstructorReturn.nNhsvgRd.js → possibleConstructorReturn.CIDCId52.js} +1 -1
- streamlit/static/static/js/{sandbox.Cgm3iuL6.js → sandbox.TrkMaokR.js} +1 -1
- streamlit/static/static/js/{textarea.BR8rlyih.js → textarea.QKjxR64N.js} +2 -2
- streamlit/static/static/js/{timepicker.w4XhAenH.js → timepicker.DJYmE1dK.js} +1 -1
- streamlit/static/static/js/{toConsumableArray.CgkEPBwD.js → toConsumableArray.BZoworE-.js} +1 -1
- streamlit/static/static/js/{uniqueId.j-1rlNNH.js → uniqueId.O0UbJ2Bu.js} +1 -1
- streamlit/static/static/js/{useBasicWidgetState.zXY9CjFS.js → useBasicWidgetState.Ci89jaH5.js} +1 -1
- streamlit/static/static/js/useOnInputChange.Cxh6ExEn.js +1 -0
- streamlit/static/static/js/{withFullScreenWrapper.Ov13692o.js → withFullScreenWrapper.iW37lS8Z.js} +1 -1
- streamlit/static/static/media/SourceCodeVF-Italic.ttf.Ba1oaZG1.woff2 +0 -0
- streamlit/static/static/media/SourceCodeVF-Upright.ttf.BjWn63N-.woff2 +0 -0
- streamlit/static/static/media/SourceSansVF-Italic.ttf.Bt9VkdQ3.woff2 +0 -0
- streamlit/static/static/media/SourceSansVF-Upright.ttf.BsWL4Kly.woff2 +0 -0
- streamlit/static/static/media/SourceSerifVariable-Italic.ttf.CVdzAtxO.woff2 +0 -0
- streamlit/static/static/media/SourceSerifVariable-Roman.ttf.mdpVL9bi.woff2 +0 -0
- streamlit/string_util.py +14 -19
- streamlit/temporary_directory.py +13 -4
- streamlit/testing/v1/app_test.py +15 -10
- streamlit/testing/v1/element_tree.py +157 -178
- streamlit/testing/v1/local_script_runner.py +11 -15
- streamlit/testing/v1/util.py +11 -4
- streamlit/type_util.py +8 -12
- streamlit/url_util.py +1 -1
- streamlit/user_info.py +6 -5
- streamlit/util.py +25 -1
- streamlit/vendor/pympler/asizeof.py +3 -2
- streamlit/watcher/event_based_path_watcher.py +21 -2
- streamlit/watcher/folder_black_list.py +2 -2
- streamlit/watcher/local_sources_watcher.py +64 -18
- streamlit/watcher/path_watcher.py +6 -10
- streamlit/watcher/polling_path_watcher.py +8 -7
- streamlit/watcher/util.py +7 -6
- streamlit/web/bootstrap.py +16 -14
- streamlit/web/cli.py +52 -45
- streamlit/web/server/__init__.py +7 -3
- streamlit/web/server/app_static_file_handler.py +1 -1
- streamlit/web/server/authlib_tornado_integration.py +9 -4
- streamlit/web/server/browser_websocket_handler.py +8 -2
- streamlit/web/server/component_request_handler.py +14 -10
- streamlit/web/server/media_file_handler.py +14 -7
- streamlit/web/server/oauth_authlib_routes.py +41 -9
- streamlit/web/server/oidc_mixin.py +35 -17
- streamlit/web/server/routes.py +32 -22
- streamlit/web/server/server.py +13 -24
- streamlit/web/server/server_util.py +43 -9
- streamlit/web/server/stats_request_handler.py +7 -5
- streamlit/web/server/upload_file_request_handler.py +22 -19
- streamlit/web/server/websocket_headers.py +1 -1
- {streamlit-1.45.1.dist-info → streamlit-1.46.1.dist-info}/METADATA +4 -4
- streamlit-1.46.1.dist-info/RECORD +559 -0
- {streamlit-1.45.1.dist-info → streamlit-1.46.1.dist-info}/WHEEL +1 -1
- streamlit/elements/lib/event_utils.py +0 -39
- streamlit/static/static/js/Toolbar.D9RUZv9G.js +0 -1
- streamlit/static/static/js/UploadFileInfo.C-jY39rj.js +0 -1
- streamlit/static/static/js/index.8jhZBWF2.js +0 -3
- streamlit/static/static/js/index.BCx3C6e_.js +0 -1
- streamlit/static/static/js/index.BRuTz_S4.js +0 -1
- streamlit/static/static/js/index.Bcru_ti-.js +0 -1
- streamlit/static/static/js/index.Bl1FMJRd.js +0 -1
- streamlit/static/static/js/index.C1z8KpLA.js +0 -779
- streamlit/static/static/js/index.C32I2PUe.js +0 -2
- streamlit/static/static/js/index.C5GnDRB7.js +0 -1
- streamlit/static/static/js/index.CG4qPaaW.js +0 -2
- streamlit/static/static/js/index.C_msmT1u.js +0 -1
- streamlit/static/static/js/index.CbeNTdd6.js +0 -1
- streamlit/static/static/js/index.CnGQVJcw.js +0 -12
- streamlit/static/static/js/index.CopVVq4l.js +0 -1
- streamlit/static/static/js/index.CtXupx4d.js +0 -197
- streamlit/static/static/js/index.DGmCchO7.js +0 -1
- streamlit/static/static/js/index.DH6zBk0e.js +0 -1
- streamlit/static/static/js/index.DHVlVWsm.js +0 -1
- streamlit/static/static/js/index.DRKIVBoi.js +0 -1
- streamlit/static/static/js/index.DUd-lFXx.js +0 -73
- streamlit/static/static/js/index.D_uRBA4B.js +0 -1
- streamlit/static/static/js/index.QHNfgPJd.js +0 -1
- streamlit/static/static/js/index.a-RJocYL.js +0 -1
- streamlit/static/static/js/index.cvz4B1gy.js +0 -1
- streamlit/static/static/js/index.t--hEgTQ.js +0 -6
- streamlit/static/static/js/useOnInputChange.z04u96A8.js +0 -1
- streamlit/static/static/media/SourceCodePro-Bold.CFEfr7-q.woff2 +0 -0
- streamlit/static/static/media/SourceCodePro-BoldItalic.C-LkFXxa.woff2 +0 -0
- streamlit/static/static/media/SourceCodePro-Italic.CxFOx7N-.woff2 +0 -0
- streamlit/static/static/media/SourceCodePro-Regular.CBOlD63d.woff2 +0 -0
- streamlit/static/static/media/SourceCodePro-SemiBold.CFHwW3Wd.woff2 +0 -0
- streamlit/static/static/media/SourceCodePro-SemiBoldItalic.Cg2yRu82.woff2 +0 -0
- streamlit/static/static/media/SourceSansPro-Bold.-6c9oR8J.woff2 +0 -0
- streamlit/static/static/media/SourceSansPro-BoldItalic.DmM_grLY.woff2 +0 -0
- streamlit/static/static/media/SourceSansPro-Italic.I1ipWe7Q.woff2 +0 -0
- streamlit/static/static/media/SourceSansPro-Regular.DZLUzqI4.woff2 +0 -0
- streamlit/static/static/media/SourceSansPro-SemiBold.sKQIyTMz.woff2 +0 -0
- streamlit/static/static/media/SourceSansPro-SemiBoldItalic.C0wP0icr.woff2 +0 -0
- streamlit/static/static/media/SourceSerifPro-Bold.8TUnKj4x.woff2 +0 -0
- streamlit/static/static/media/SourceSerifPro-BoldItalic.CBVO7Ve7.woff2 +0 -0
- streamlit/static/static/media/SourceSerifPro-Italic.DkFgL2HZ.woff2 +0 -0
- streamlit/static/static/media/SourceSerifPro-Regular.CNJNET2S.woff2 +0 -0
- streamlit/static/static/media/SourceSerifPro-SemiBold.CHyh9GC5.woff2 +0 -0
- streamlit/static/static/media/SourceSerifPro-SemiBoldItalic.CBtz8sWN.woff2 +0 -0
- streamlit-1.45.1.dist-info/RECORD +0 -568
- {streamlit-1.45.1.data → streamlit-1.46.1.data}/scripts/streamlit.cmd +0 -0
- {streamlit-1.45.1.dist-info → streamlit-1.46.1.dist-info}/entry_points.txt +0 -0
- {streamlit-1.45.1.dist-info → streamlit-1.46.1.dist-info}/top_level.txt +0 -0
|
@@ -69,7 +69,7 @@ def _equal_validate_funcs(a: ValidateFunc | None, b: ValidateFunc | None) -> boo
|
|
|
69
69
|
class ResourceCaches(CacheStatsProvider):
|
|
70
70
|
"""Manages all ResourceCache instances."""
|
|
71
71
|
|
|
72
|
-
def __init__(self):
|
|
72
|
+
def __init__(self) -> None:
|
|
73
73
|
self._caches_lock = threading.Lock()
|
|
74
74
|
self._function_caches: dict[str, ResourceCache] = {}
|
|
75
75
|
|
|
@@ -151,7 +151,7 @@ class CachedResourceFuncInfo(CachedFuncInfo):
|
|
|
151
151
|
ttl: float | timedelta | str | None,
|
|
152
152
|
validate: ValidateFunc | None,
|
|
153
153
|
hash_funcs: HashFuncsDict | None = None,
|
|
154
|
-
):
|
|
154
|
+
) -> None:
|
|
155
155
|
super().__init__(
|
|
156
156
|
func,
|
|
157
157
|
show_spinner=show_spinner,
|
|
@@ -189,7 +189,7 @@ class CacheResourceAPI:
|
|
|
189
189
|
and st.cache_resource.clear().
|
|
190
190
|
"""
|
|
191
191
|
|
|
192
|
-
def __init__(self, decorator_metric_name: str):
|
|
192
|
+
def __init__(self, decorator_metric_name: str) -> None:
|
|
193
193
|
"""Create a CacheResourceAPI instance.
|
|
194
194
|
|
|
195
195
|
Parameters
|
|
@@ -234,7 +234,7 @@ class CacheResourceAPI:
|
|
|
234
234
|
validate: ValidateFunc | None = None,
|
|
235
235
|
experimental_allow_widgets: bool = False,
|
|
236
236
|
hash_funcs: HashFuncsDict | None = None,
|
|
237
|
-
):
|
|
237
|
+
) -> F | Callable[[F], F]:
|
|
238
238
|
return self._decorator(
|
|
239
239
|
func,
|
|
240
240
|
ttl=ttl,
|
|
@@ -255,7 +255,7 @@ class CacheResourceAPI:
|
|
|
255
255
|
validate: ValidateFunc | None,
|
|
256
256
|
experimental_allow_widgets: bool,
|
|
257
257
|
hash_funcs: HashFuncsDict | None = None,
|
|
258
|
-
):
|
|
258
|
+
) -> F | Callable[[F], F]:
|
|
259
259
|
"""Decorator to cache functions that return global resources (e.g. database connections, ML models).
|
|
260
260
|
|
|
261
261
|
Cached objects are shared across all users, sessions, and reruns. They
|
|
@@ -417,9 +417,9 @@ class CacheResourceAPI:
|
|
|
417
417
|
# Support passing the params via function decorator, e.g.
|
|
418
418
|
# @st.cache_resource(show_spinner=False)
|
|
419
419
|
if func is None:
|
|
420
|
-
return lambda f: make_cached_func_wrapper(
|
|
420
|
+
return lambda f: make_cached_func_wrapper( # type: ignore
|
|
421
421
|
CachedResourceFuncInfo(
|
|
422
|
-
func=f,
|
|
422
|
+
func=f, # type: ignore
|
|
423
423
|
show_spinner=show_spinner,
|
|
424
424
|
max_entries=max_entries,
|
|
425
425
|
ttl=ttl,
|
|
@@ -455,7 +455,7 @@ class ResourceCache(Cache):
|
|
|
455
455
|
ttl_seconds: float,
|
|
456
456
|
validate: ValidateFunc | None,
|
|
457
457
|
display_name: str,
|
|
458
|
-
):
|
|
458
|
+
) -> None:
|
|
459
459
|
super().__init__()
|
|
460
460
|
self.key = key
|
|
461
461
|
self.display_name = display_name
|
|
@@ -65,7 +65,7 @@ TTLCACHE_TIMER = time.monotonic
|
|
|
65
65
|
class Cache:
|
|
66
66
|
"""Function cache interface. Caches persist across script runs."""
|
|
67
67
|
|
|
68
|
-
def __init__(self):
|
|
68
|
+
def __init__(self) -> None:
|
|
69
69
|
self._value_locks: dict[str, threading.Lock] = defaultdict(threading.Lock)
|
|
70
70
|
self._value_locks_lock = threading.Lock()
|
|
71
71
|
|
|
@@ -100,7 +100,7 @@ class Cache:
|
|
|
100
100
|
with self._value_locks_lock:
|
|
101
101
|
return self._value_locks[value_key]
|
|
102
102
|
|
|
103
|
-
def clear(self, key: str | None = None):
|
|
103
|
+
def clear(self, key: str | None = None) -> None:
|
|
104
104
|
"""Clear values from this cache.
|
|
105
105
|
If no argument is passed, all items are cleared from the cache.
|
|
106
106
|
A key can be passed to clear that key from the cache only.
|
|
@@ -130,7 +130,7 @@ class CachedFuncInfo:
|
|
|
130
130
|
func: FunctionType,
|
|
131
131
|
show_spinner: bool | str,
|
|
132
132
|
hash_funcs: HashFuncsDict | None,
|
|
133
|
-
):
|
|
133
|
+
) -> None:
|
|
134
134
|
self.func = func
|
|
135
135
|
self.show_spinner = show_spinner
|
|
136
136
|
self.hash_funcs = hash_funcs
|
|
@@ -167,17 +167,17 @@ class BoundCachedFunc:
|
|
|
167
167
|
decorated function is a class method.
|
|
168
168
|
"""
|
|
169
169
|
|
|
170
|
-
def __init__(self, cached_func: CachedFunc, instance: Any):
|
|
170
|
+
def __init__(self, cached_func: CachedFunc, instance: Any) -> None:
|
|
171
171
|
self._cached_func = cached_func
|
|
172
172
|
self._instance = instance
|
|
173
173
|
|
|
174
|
-
def __call__(self, *args, **kwargs) -> Any:
|
|
174
|
+
def __call__(self, *args: Any, **kwargs: Any) -> Any:
|
|
175
175
|
return self._cached_func(self._instance, *args, **kwargs)
|
|
176
176
|
|
|
177
|
-
def __repr__(self):
|
|
177
|
+
def __repr__(self) -> str:
|
|
178
178
|
return f"<BoundCachedFunc: {self._cached_func._info.func} of {self._instance}>"
|
|
179
179
|
|
|
180
|
-
def clear(self, *args, **kwargs):
|
|
180
|
+
def clear(self, *args: Any, **kwargs: Any) -> None:
|
|
181
181
|
if args or kwargs:
|
|
182
182
|
# The instance is required as first parameter to allow
|
|
183
183
|
# args to be correctly resolved to the parameter names:
|
|
@@ -189,21 +189,21 @@ class BoundCachedFunc:
|
|
|
189
189
|
|
|
190
190
|
|
|
191
191
|
class CachedFunc:
|
|
192
|
-
def __init__(self, info: CachedFuncInfo):
|
|
192
|
+
def __init__(self, info: CachedFuncInfo) -> None:
|
|
193
193
|
self._info = info
|
|
194
194
|
self._function_key = _make_function_key(info.cache_type, info.func)
|
|
195
195
|
|
|
196
|
-
def __repr__(self):
|
|
196
|
+
def __repr__(self) -> str:
|
|
197
197
|
return f"<CachedFunc: {self._info.func}>"
|
|
198
198
|
|
|
199
|
-
def __get__(self, instance, owner=None):
|
|
199
|
+
def __get__(self, instance: Any, owner: Any | None = None) -> Any:
|
|
200
200
|
"""CachedFunc implements descriptor protocol to support cache methods."""
|
|
201
201
|
if instance is None:
|
|
202
202
|
return self
|
|
203
203
|
|
|
204
204
|
return functools.update_wrapper(BoundCachedFunc(self, instance), self)
|
|
205
205
|
|
|
206
|
-
def __call__(self, *args, **kwargs) -> Any:
|
|
206
|
+
def __call__(self, *args: Any, **kwargs: Any) -> Any:
|
|
207
207
|
"""The wrapper. We'll only call our underlying function on a cache miss."""
|
|
208
208
|
|
|
209
209
|
spinner_message: str | None = None
|
|
@@ -346,7 +346,7 @@ class CachedFunc:
|
|
|
346
346
|
return_value=computed_value, func=self._info.func
|
|
347
347
|
)
|
|
348
348
|
|
|
349
|
-
def clear(self, *args, **kwargs):
|
|
349
|
+
def clear(self, *args: Any, **kwargs: Any) -> None:
|
|
350
350
|
"""Clear the cached function's associated cache.
|
|
351
351
|
|
|
352
352
|
If no arguments are passed, Streamlit will clear all values cached for
|
|
@@ -473,8 +473,9 @@ def _make_function_key(cache_type: CacheType, func: FunctionType) -> str:
|
|
|
473
473
|
|
|
474
474
|
# Include the function's __module__ and __qualname__ strings in the hash.
|
|
475
475
|
# This means that two identical functions in different modules
|
|
476
|
-
# will not share a hash
|
|
477
|
-
# functions in the same module
|
|
476
|
+
# will not share a hash.
|
|
477
|
+
# It also means that two identical *nested* functions in the same module
|
|
478
|
+
# *will* share a hash (see https://github.com/streamlit/streamlit/issues/11157).
|
|
478
479
|
update_hash(
|
|
479
480
|
(func.__module__, func.__qualname__),
|
|
480
481
|
hasher=func_hasher,
|
|
@@ -121,7 +121,7 @@ class CachedMessageReplayContext(threading.local):
|
|
|
121
121
|
of this class across multiple threads.
|
|
122
122
|
"""
|
|
123
123
|
|
|
124
|
-
def __init__(self, cache_type: CacheType):
|
|
124
|
+
def __init__(self, cache_type: CacheType) -> None:
|
|
125
125
|
self._cached_message_stack: list[list[MsgData]] = []
|
|
126
126
|
self._seen_dg_stack: list[set[str]] = []
|
|
127
127
|
self._most_recent_messages: list[MsgData] = []
|
|
@@ -132,7 +132,7 @@ class CachedMessageReplayContext(threading.local):
|
|
|
132
132
|
return util.repr_(self)
|
|
133
133
|
|
|
134
134
|
@contextlib.contextmanager
|
|
135
|
-
def calling_cached_function(self, func: FunctionType) -> Iterator[None]:
|
|
135
|
+
def calling_cached_function(self, func: FunctionType) -> Iterator[None]: # noqa: ARG002
|
|
136
136
|
"""Context manager that should wrap the invocation of a cached function.
|
|
137
137
|
It allows us to track any `st.foo` messages that are generated from inside the
|
|
138
138
|
function for playback during cache retrieval.
|
|
@@ -169,8 +169,9 @@ class CachedMessageReplayContext(threading.local):
|
|
|
169
169
|
executing cached functions, so they can be replayed any time the function's
|
|
170
170
|
execution is skipped because they're in the cache.
|
|
171
171
|
"""
|
|
172
|
-
if not runtime.exists():
|
|
172
|
+
if not runtime.exists() or not in_cached_function.get():
|
|
173
173
|
return
|
|
174
|
+
|
|
174
175
|
if len(self._cached_message_stack) >= 1:
|
|
175
176
|
id_to_save = self.select_dg_to_save(invoked_dg_id, used_dg_id)
|
|
176
177
|
|
|
@@ -200,6 +201,9 @@ class CachedMessageReplayContext(threading.local):
|
|
|
200
201
|
used_dg_id: str,
|
|
201
202
|
returned_dg_id: str,
|
|
202
203
|
) -> None:
|
|
204
|
+
if not in_cached_function.get():
|
|
205
|
+
return
|
|
206
|
+
|
|
203
207
|
id_to_save = self.select_dg_to_save(invoked_dg_id, used_dg_id)
|
|
204
208
|
for msgs in self._cached_message_stack:
|
|
205
209
|
msgs.append(BlockMsgData(block_proto, id_to_save, returned_dg_id))
|
|
@@ -218,13 +222,15 @@ class CachedMessageReplayContext(threading.local):
|
|
|
218
222
|
"""
|
|
219
223
|
if len(self._seen_dg_stack) > 0 and acting_on_id in self._seen_dg_stack[-1]:
|
|
220
224
|
return acting_on_id
|
|
221
|
-
|
|
222
|
-
return invoked_id
|
|
225
|
+
return invoked_id
|
|
223
226
|
|
|
224
|
-
def
|
|
225
|
-
self,
|
|
227
|
+
def save_media_data(
|
|
228
|
+
self, media_data: bytes | str, mimetype: str, media_id: str
|
|
226
229
|
) -> None:
|
|
227
|
-
|
|
230
|
+
if not in_cached_function.get():
|
|
231
|
+
return
|
|
232
|
+
|
|
233
|
+
self._media_data.append(MediaMsgData(media_data, mimetype, media_id))
|
|
228
234
|
|
|
229
235
|
|
|
230
236
|
def replay_cached_messages(
|
|
@@ -34,7 +34,7 @@ import weakref
|
|
|
34
34
|
from enum import Enum
|
|
35
35
|
from re import Pattern
|
|
36
36
|
from types import MappingProxyType
|
|
37
|
-
from typing import Any, Callable, Final, Union, cast
|
|
37
|
+
from typing import TYPE_CHECKING, Any, Callable, Final, Union, cast
|
|
38
38
|
|
|
39
39
|
from typing_extensions import TypeAlias
|
|
40
40
|
|
|
@@ -44,6 +44,10 @@ from streamlit.runtime.caching.cache_errors import UnhashableTypeError
|
|
|
44
44
|
from streamlit.runtime.caching.cache_type import CacheType
|
|
45
45
|
from streamlit.runtime.uploaded_file_manager import UploadedFile
|
|
46
46
|
|
|
47
|
+
if TYPE_CHECKING:
|
|
48
|
+
import numpy.typing as npt
|
|
49
|
+
from PIL.Image import Image
|
|
50
|
+
|
|
47
51
|
_LOGGER: Final = logger.get_logger(__name__)
|
|
48
52
|
|
|
49
53
|
# If a dataframe has more than this many rows, we consider it large and hash a sample.
|
|
@@ -66,11 +70,11 @@ _CYCLE_PLACEHOLDER: Final = (
|
|
|
66
70
|
class UserHashError(StreamlitAPIException):
|
|
67
71
|
def __init__(
|
|
68
72
|
self,
|
|
69
|
-
orig_exc,
|
|
70
|
-
object_to_hash,
|
|
71
|
-
hash_func,
|
|
73
|
+
orig_exc: BaseException,
|
|
74
|
+
object_to_hash: Any,
|
|
75
|
+
hash_func: Callable[[Any], Any],
|
|
72
76
|
cache_type: CacheType | None = None,
|
|
73
|
-
):
|
|
77
|
+
) -> None:
|
|
74
78
|
self.alternate_name = type(orig_exc).__name__
|
|
75
79
|
self.hash_func = hash_func
|
|
76
80
|
self.cache_type = cache_type
|
|
@@ -80,30 +84,33 @@ class UserHashError(StreamlitAPIException):
|
|
|
80
84
|
super().__init__(msg)
|
|
81
85
|
self.with_traceback(orig_exc.__traceback__)
|
|
82
86
|
|
|
83
|
-
def _get_message_from_func(
|
|
87
|
+
def _get_message_from_func(
|
|
88
|
+
self,
|
|
89
|
+
orig_exc: BaseException,
|
|
90
|
+
cached_func: Any,
|
|
91
|
+
) -> str:
|
|
84
92
|
args = self._get_error_message_args(orig_exc, cached_func)
|
|
85
93
|
|
|
86
94
|
return (
|
|
87
|
-
"""
|
|
88
|
-
|
|
95
|
+
f"""
|
|
96
|
+
{args["orig_exception_desc"]}
|
|
89
97
|
|
|
90
|
-
This error is likely due to a bug in
|
|
91
|
-
user-defined hash function that was passed into the
|
|
92
|
-
|
|
98
|
+
This error is likely due to a bug in {args["hash_func_name"]}, which is a
|
|
99
|
+
user-defined hash function that was passed into the `{args["cache_primitive"]}` decorator of
|
|
100
|
+
{args["object_desc"]}.
|
|
93
101
|
|
|
94
|
-
|
|
95
|
-
|
|
102
|
+
{args["hash_func_name"]} failed when hashing an object of type
|
|
103
|
+
`{args["failed_obj_type_str"]}`. If you don't know where that object is coming from,
|
|
96
104
|
try looking at the hash chain below for an object that you do recognize, then
|
|
97
105
|
pass that to `hash_funcs` instead:
|
|
98
106
|
|
|
99
107
|
```
|
|
100
|
-
|
|
108
|
+
{args["hash_stack"]}
|
|
101
109
|
```
|
|
102
110
|
|
|
103
111
|
If you think this is actually a Streamlit bug, please
|
|
104
112
|
[file a bug report here](https://github.com/streamlit/streamlit/issues/new/choose).
|
|
105
113
|
"""
|
|
106
|
-
% args
|
|
107
114
|
).strip("\n")
|
|
108
115
|
|
|
109
116
|
def _get_error_message_args(
|
|
@@ -117,11 +124,10 @@ If you think this is actually a Streamlit bug, please
|
|
|
117
124
|
|
|
118
125
|
if hash_source is None:
|
|
119
126
|
object_desc = "something"
|
|
127
|
+
elif hasattr(hash_source, "__name__"):
|
|
128
|
+
object_desc = f"`{hash_source.__name__}()`"
|
|
120
129
|
else:
|
|
121
|
-
|
|
122
|
-
object_desc = f"`{hash_source.__name__}()`"
|
|
123
|
-
else:
|
|
124
|
-
object_desc = "a function"
|
|
130
|
+
object_desc = "a function"
|
|
125
131
|
|
|
126
132
|
decorator_name = ""
|
|
127
133
|
if self.cache_type is CacheType.RESOURCE:
|
|
@@ -129,10 +135,11 @@ If you think this is actually a Streamlit bug, please
|
|
|
129
135
|
elif self.cache_type is CacheType.DATA:
|
|
130
136
|
decorator_name = "@st.cache_data"
|
|
131
137
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
138
|
+
hash_func_name = (
|
|
139
|
+
f"`{self.hash_func.__name__}()`"
|
|
140
|
+
if hasattr(self.hash_func, "__name__")
|
|
141
|
+
else "a function"
|
|
142
|
+
)
|
|
136
143
|
|
|
137
144
|
return {
|
|
138
145
|
"orig_exception_desc": str(orig_exc),
|
|
@@ -146,7 +153,7 @@ If you think this is actually a Streamlit bug, please
|
|
|
146
153
|
|
|
147
154
|
def update_hash(
|
|
148
155
|
val: Any,
|
|
149
|
-
hasher,
|
|
156
|
+
hasher: Any,
|
|
150
157
|
cache_type: CacheType,
|
|
151
158
|
hash_source: Callable[..., Any] | None = None,
|
|
152
159
|
hash_funcs: HashFuncsDict | None = None,
|
|
@@ -174,7 +181,7 @@ class _HashStack:
|
|
|
174
181
|
This causes the "in" to crash since it expects a boolean.
|
|
175
182
|
"""
|
|
176
183
|
|
|
177
|
-
def __init__(self):
|
|
184
|
+
def __init__(self) -> None:
|
|
178
185
|
self._stack: collections.OrderedDict[int, list[Any]] = collections.OrderedDict()
|
|
179
186
|
# A function that we decorate with streamlit cache
|
|
180
187
|
# primitive (st.cache_data or st.cache_resource).
|
|
@@ -183,19 +190,19 @@ class _HashStack:
|
|
|
183
190
|
def __repr__(self) -> str:
|
|
184
191
|
return util.repr_(self)
|
|
185
192
|
|
|
186
|
-
def push(self, val: Any):
|
|
193
|
+
def push(self, val: Any) -> None:
|
|
187
194
|
self._stack[id(val)] = val
|
|
188
195
|
|
|
189
|
-
def pop(self):
|
|
196
|
+
def pop(self) -> None:
|
|
190
197
|
self._stack.popitem()
|
|
191
198
|
|
|
192
|
-
def __contains__(self, val: Any):
|
|
199
|
+
def __contains__(self, val: Any) -> bool:
|
|
193
200
|
return id(val) in self._stack
|
|
194
201
|
|
|
195
202
|
def pretty_print(self) -> str:
|
|
196
203
|
def to_str(v: Any) -> str:
|
|
197
204
|
try:
|
|
198
|
-
return f"Object of type {type_util.get_fqn_type(v)}: {
|
|
205
|
+
return f"Object of type {type_util.get_fqn_type(v)}: {v}"
|
|
199
206
|
except Exception:
|
|
200
207
|
return "<Unable to convert item to string>"
|
|
201
208
|
|
|
@@ -205,7 +212,7 @@ class _HashStack:
|
|
|
205
212
|
class _HashStacks:
|
|
206
213
|
"""Stacks of what has been hashed, with at most 1 stack per thread."""
|
|
207
214
|
|
|
208
|
-
def __init__(self):
|
|
215
|
+
def __init__(self) -> None:
|
|
209
216
|
self._stacks: weakref.WeakKeyDictionary[threading.Thread, _HashStack] = (
|
|
210
217
|
weakref.WeakKeyDictionary()
|
|
211
218
|
)
|
|
@@ -248,28 +255,20 @@ def _key(obj: Any | None) -> Any:
|
|
|
248
255
|
if obj is None:
|
|
249
256
|
return None
|
|
250
257
|
|
|
251
|
-
def is_simple(obj):
|
|
258
|
+
def is_simple(obj: Any) -> bool:
|
|
252
259
|
return (
|
|
253
|
-
isinstance(obj, bytes)
|
|
254
|
-
or isinstance(obj, bytearray)
|
|
255
|
-
or isinstance(obj, str)
|
|
256
|
-
or isinstance(obj, float)
|
|
257
|
-
or isinstance(obj, int)
|
|
258
|
-
or isinstance(obj, bool)
|
|
259
|
-
or isinstance(obj, uuid.UUID)
|
|
260
|
+
isinstance(obj, (bytes, bytearray, str, float, int, bool, uuid.UUID))
|
|
260
261
|
or obj is None
|
|
261
262
|
)
|
|
262
263
|
|
|
263
264
|
if is_simple(obj):
|
|
264
265
|
return obj
|
|
265
266
|
|
|
266
|
-
if isinstance(obj, tuple):
|
|
267
|
-
|
|
268
|
-
return obj
|
|
267
|
+
if isinstance(obj, tuple) and all(map(is_simple, obj)):
|
|
268
|
+
return obj
|
|
269
269
|
|
|
270
|
-
if isinstance(obj, list):
|
|
271
|
-
|
|
272
|
-
return ("__l", tuple(obj))
|
|
270
|
+
if isinstance(obj, list) and all(map(is_simple, obj)):
|
|
271
|
+
return ("__l", tuple(obj))
|
|
273
272
|
|
|
274
273
|
if inspect.isbuiltin(obj) or inspect.isroutine(obj) or inspect.iscode(obj):
|
|
275
274
|
return id(obj)
|
|
@@ -280,7 +279,9 @@ def _key(obj: Any | None) -> Any:
|
|
|
280
279
|
class _CacheFuncHasher:
|
|
281
280
|
"""A hasher that can hash objects with cycles."""
|
|
282
281
|
|
|
283
|
-
def __init__(
|
|
282
|
+
def __init__(
|
|
283
|
+
self, cache_type: CacheType, hash_funcs: HashFuncsDict | None = None
|
|
284
|
+
) -> None:
|
|
284
285
|
# Can't use types as the keys in the internal _hash_funcs because
|
|
285
286
|
# we always remove user-written modules from memory when rerunning a
|
|
286
287
|
# script in order to reload it and grab the latest code changes.
|
|
@@ -312,9 +313,8 @@ class _CacheFuncHasher:
|
|
|
312
313
|
key = (tname, _key(obj))
|
|
313
314
|
|
|
314
315
|
# Memoize if possible.
|
|
315
|
-
if key[1] is not NoResult:
|
|
316
|
-
|
|
317
|
-
return self._hashes[key]
|
|
316
|
+
if key[1] is not NoResult and key in self._hashes:
|
|
317
|
+
return self._hashes[key]
|
|
318
318
|
|
|
319
319
|
# Break recursive cycles.
|
|
320
320
|
if obj in hash_stacks.current:
|
|
@@ -340,7 +340,7 @@ class _CacheFuncHasher:
|
|
|
340
340
|
|
|
341
341
|
return b
|
|
342
342
|
|
|
343
|
-
def update(self, hasher, obj: Any) -> None:
|
|
343
|
+
def update(self, hasher: Any, obj: Any) -> None:
|
|
344
344
|
"""Update the provided hasher with the hash of an object."""
|
|
345
345
|
b = self.to_bytes(obj)
|
|
346
346
|
hasher.update(b)
|
|
@@ -361,10 +361,10 @@ class _CacheFuncHasher:
|
|
|
361
361
|
# deep, so we don't try to hash them at all.
|
|
362
362
|
return self.to_bytes(id(obj))
|
|
363
363
|
|
|
364
|
-
|
|
364
|
+
if isinstance(obj, (bytes, bytearray)):
|
|
365
365
|
return obj
|
|
366
366
|
|
|
367
|
-
|
|
367
|
+
if type_util.get_fqn_type(obj) in self._hash_funcs:
|
|
368
368
|
# Escape hatch for unsupported objects
|
|
369
369
|
hash_func = self._hash_funcs[type_util.get_fqn_type(obj)]
|
|
370
370
|
try:
|
|
@@ -375,57 +375,59 @@ class _CacheFuncHasher:
|
|
|
375
375
|
) from ex
|
|
376
376
|
return self.to_bytes(output)
|
|
377
377
|
|
|
378
|
-
|
|
378
|
+
if isinstance(obj, str):
|
|
379
379
|
return obj.encode()
|
|
380
380
|
|
|
381
|
-
|
|
381
|
+
if isinstance(obj, float):
|
|
382
382
|
return _float_to_bytes(obj)
|
|
383
383
|
|
|
384
|
-
|
|
384
|
+
if isinstance(obj, int):
|
|
385
385
|
return _int_to_bytes(obj)
|
|
386
386
|
|
|
387
|
-
|
|
387
|
+
if isinstance(obj, uuid.UUID):
|
|
388
388
|
return obj.bytes
|
|
389
389
|
|
|
390
|
-
|
|
390
|
+
if isinstance(obj, datetime.datetime):
|
|
391
391
|
return obj.isoformat().encode()
|
|
392
392
|
|
|
393
|
-
|
|
393
|
+
if isinstance(obj, (list, tuple)):
|
|
394
394
|
for item in obj:
|
|
395
395
|
self.update(h, item)
|
|
396
396
|
return h.digest()
|
|
397
397
|
|
|
398
|
-
|
|
398
|
+
if isinstance(obj, dict):
|
|
399
399
|
for item in obj.items():
|
|
400
400
|
self.update(h, item)
|
|
401
401
|
return h.digest()
|
|
402
402
|
|
|
403
|
-
|
|
403
|
+
if obj is None:
|
|
404
404
|
return b"0"
|
|
405
405
|
|
|
406
|
-
|
|
406
|
+
if obj is True:
|
|
407
407
|
return b"1"
|
|
408
408
|
|
|
409
|
-
|
|
409
|
+
if obj is False:
|
|
410
410
|
return b"0"
|
|
411
411
|
|
|
412
|
-
|
|
412
|
+
if not isinstance(obj, type) and dataclasses.is_dataclass(obj):
|
|
413
413
|
return self.to_bytes(dataclasses.asdict(obj))
|
|
414
|
-
|
|
414
|
+
if isinstance(obj, Enum):
|
|
415
415
|
return str(obj).encode()
|
|
416
416
|
|
|
417
|
-
|
|
417
|
+
if type_util.is_type(obj, "pandas.core.series.Series"):
|
|
418
418
|
import pandas as pd
|
|
419
419
|
|
|
420
|
-
|
|
421
|
-
self.update(h,
|
|
422
|
-
self.update(h,
|
|
420
|
+
series_obj: pd.Series = cast("pd.Series", obj)
|
|
421
|
+
self.update(h, series_obj.size)
|
|
422
|
+
self.update(h, series_obj.dtype.name)
|
|
423
423
|
|
|
424
|
-
if len(
|
|
425
|
-
|
|
424
|
+
if len(series_obj) >= _PANDAS_ROWS_LARGE:
|
|
425
|
+
series_obj = series_obj.sample(n=_PANDAS_SAMPLE_SIZE, random_state=0)
|
|
426
426
|
|
|
427
427
|
try:
|
|
428
|
-
self.update(
|
|
428
|
+
self.update(
|
|
429
|
+
h, pd.util.hash_pandas_object(series_obj).to_numpy().tobytes()
|
|
430
|
+
)
|
|
429
431
|
return h.digest()
|
|
430
432
|
except TypeError:
|
|
431
433
|
_LOGGER.warning(
|
|
@@ -435,22 +437,22 @@ class _CacheFuncHasher:
|
|
|
435
437
|
|
|
436
438
|
# Use pickle if pandas cannot hash the object for example if
|
|
437
439
|
# it contains unhashable objects.
|
|
438
|
-
return b"%s" % pickle.dumps(
|
|
440
|
+
return b"%s" % pickle.dumps(series_obj, pickle.HIGHEST_PROTOCOL)
|
|
439
441
|
|
|
440
442
|
elif type_util.is_type(obj, "pandas.core.frame.DataFrame"):
|
|
441
443
|
import pandas as pd
|
|
442
444
|
|
|
443
|
-
|
|
444
|
-
self.update(h,
|
|
445
|
+
df_obj: pd.DataFrame = cast("pd.DataFrame", obj)
|
|
446
|
+
self.update(h, df_obj.shape)
|
|
445
447
|
|
|
446
|
-
if len(
|
|
447
|
-
|
|
448
|
+
if len(df_obj) >= _PANDAS_ROWS_LARGE:
|
|
449
|
+
df_obj = df_obj.sample(n=_PANDAS_SAMPLE_SIZE, random_state=0)
|
|
448
450
|
try:
|
|
449
451
|
column_hash_bytes = self.to_bytes(
|
|
450
|
-
pd.util.hash_pandas_object(
|
|
452
|
+
pd.util.hash_pandas_object(df_obj.dtypes)
|
|
451
453
|
)
|
|
452
454
|
self.update(h, column_hash_bytes)
|
|
453
|
-
values_hash_bytes = self.to_bytes(pd.util.hash_pandas_object(
|
|
455
|
+
values_hash_bytes = self.to_bytes(pd.util.hash_pandas_object(df_obj))
|
|
454
456
|
self.update(h, values_hash_bytes)
|
|
455
457
|
return h.digest()
|
|
456
458
|
except TypeError:
|
|
@@ -461,7 +463,7 @@ class _CacheFuncHasher:
|
|
|
461
463
|
|
|
462
464
|
# Use pickle if pandas cannot hash the object for example if
|
|
463
465
|
# it contains unhashable objects.
|
|
464
|
-
return b"%s" % pickle.dumps(
|
|
466
|
+
return b"%s" % pickle.dumps(df_obj, pickle.HIGHEST_PROTOCOL)
|
|
465
467
|
|
|
466
468
|
elif type_util.is_type(obj, "polars.series.series.Series"):
|
|
467
469
|
import polars as pl # type: ignore[import-not-found]
|
|
@@ -514,39 +516,32 @@ class _CacheFuncHasher:
|
|
|
514
516
|
# it contains unhashable objects.
|
|
515
517
|
return b"%s" % pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)
|
|
516
518
|
elif type_util.is_type(obj, "numpy.ndarray"):
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
# - can be removed once we sunset support for Python 3.8
|
|
521
|
-
obj = cast("np.ndarray[Any, Any]", obj)
|
|
522
|
-
self.update(h, obj.shape)
|
|
523
|
-
self.update(h, str(obj.dtype))
|
|
519
|
+
np_obj: npt.NDArray[Any] = cast("npt.NDArray[Any]", obj)
|
|
520
|
+
self.update(h, np_obj.shape)
|
|
521
|
+
self.update(h, str(np_obj.dtype))
|
|
524
522
|
|
|
525
|
-
if
|
|
523
|
+
if np_obj.size >= _NP_SIZE_LARGE:
|
|
526
524
|
import numpy as np
|
|
527
525
|
|
|
528
526
|
state = np.random.RandomState(0)
|
|
529
|
-
|
|
527
|
+
np_obj = state.choice(np_obj.flat, size=_NP_SAMPLE_SIZE)
|
|
530
528
|
|
|
531
|
-
self.update(h,
|
|
529
|
+
self.update(h, np_obj.tobytes())
|
|
532
530
|
return h.digest()
|
|
533
531
|
elif type_util.is_type(obj, "PIL.Image.Image"):
|
|
534
532
|
import numpy as np
|
|
535
|
-
from PIL.Image import Image # noqa: TC002
|
|
536
533
|
|
|
537
|
-
|
|
534
|
+
pil_obj: Image = cast("Image", obj)
|
|
538
535
|
|
|
539
536
|
# we don't just hash the results of obj.tobytes() because we want to use
|
|
540
537
|
# the sampling logic for numpy data
|
|
541
|
-
np_array = np.frombuffer(
|
|
538
|
+
np_array = np.frombuffer(pil_obj.tobytes(), dtype="uint8")
|
|
542
539
|
return self.to_bytes(np_array)
|
|
543
540
|
|
|
544
541
|
elif inspect.isbuiltin(obj):
|
|
545
542
|
return bytes(obj.__name__.encode())
|
|
546
543
|
|
|
547
|
-
elif isinstance(obj, MappingProxyType)
|
|
548
|
-
obj, collections.abc.ItemsView
|
|
549
|
-
):
|
|
544
|
+
elif isinstance(obj, (MappingProxyType, collections.abc.ItemsView)):
|
|
550
545
|
return self.to_bytes(dict(obj))
|
|
551
546
|
|
|
552
547
|
elif type_util.is_type(obj, "builtins.getset_descriptor"):
|
|
@@ -562,9 +557,8 @@ class _CacheFuncHasher:
|
|
|
562
557
|
return h.digest()
|
|
563
558
|
|
|
564
559
|
elif hasattr(obj, "name") and (
|
|
565
|
-
isinstance(obj, io.IOBase)
|
|
566
560
|
# Handle temporary files used during testing
|
|
567
|
-
|
|
561
|
+
isinstance(obj, (io.IOBase, tempfile._TemporaryFileWrapper))
|
|
568
562
|
):
|
|
569
563
|
# Hash files as name + last modification date + offset.
|
|
570
564
|
# NB: we're using hasattr("name") to differentiate between
|
|
@@ -580,7 +574,7 @@ class _CacheFuncHasher:
|
|
|
580
574
|
elif isinstance(obj, Pattern):
|
|
581
575
|
return self.to_bytes([obj.pattern, obj.flags])
|
|
582
576
|
|
|
583
|
-
elif isinstance(obj, io.StringIO
|
|
577
|
+
elif isinstance(obj, (io.StringIO, io.BytesIO)):
|
|
584
578
|
# Hash in-memory StringIO/BytesIO by their full contents
|
|
585
579
|
# and seek position.
|
|
586
580
|
self.update(h, obj.tell())
|
|
@@ -36,11 +36,11 @@ def cache(
|
|
|
36
36
|
persist: bool = False,
|
|
37
37
|
allow_output_mutation: bool = False,
|
|
38
38
|
show_spinner: bool = True,
|
|
39
|
-
suppress_st_warning: bool = False,
|
|
39
|
+
suppress_st_warning: bool = False, # noqa: ARG001
|
|
40
40
|
hash_funcs: HashFuncsDict | None = None,
|
|
41
41
|
max_entries: int | None = None,
|
|
42
42
|
ttl: float | None = None,
|
|
43
|
-
):
|
|
43
|
+
) -> F:
|
|
44
44
|
"""Legacy caching decorator (deprecated).
|
|
45
45
|
|
|
46
46
|
Legacy caching with ``st.cache`` has been removed from Streamlit. This is
|
|
@@ -68,7 +68,7 @@ class CacheStorageKeyNotFoundError(CacheStorageError):
|
|
|
68
68
|
"""Raised when the key is not found in the cache storage."""
|
|
69
69
|
|
|
70
70
|
|
|
71
|
-
class
|
|
71
|
+
class InvalidCacheStorageContextError(CacheStorageError):
|
|
72
72
|
"""Raised if the cache storage manager is not able to work with
|
|
73
73
|
provided CacheStorageContext.
|
|
74
74
|
"""
|
|
@@ -40,7 +40,7 @@ class MemoryCacheStorageManager(CacheStorageManager):
|
|
|
40
40
|
|
|
41
41
|
|
|
42
42
|
class DummyCacheStorage(CacheStorage):
|
|
43
|
-
def get(self, key: str) -> bytes:
|
|
43
|
+
def get(self, key: str) -> bytes: # noqa: ARG002
|
|
44
44
|
"""
|
|
45
45
|
Dummy gets the value for a given key,
|
|
46
46
|
always raises an CacheStorageKeyNotFoundError.
|