streamlit-nightly 1.37.2.dev20240819__py2.py3-none-any.whl → 1.37.2.dev20240820__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- streamlit/elements/image.py +15 -25
- streamlit/elements/lib/policies.py +10 -9
- streamlit/runtime/caching/__init__.py +1 -11
- streamlit/runtime/caching/cache_data_api.py +11 -83
- streamlit/runtime/caching/cache_errors.py +13 -9
- streamlit/runtime/caching/cache_resource_api.py +9 -58
- streamlit/runtime/caching/cache_utils.py +7 -12
- streamlit/runtime/caching/cached_message_replay.py +29 -185
- streamlit/runtime/caching/legacy_cache_api.py +15 -11
- streamlit/runtime/scriptrunner_utils/script_run_context.py +9 -4
- streamlit/runtime/state/widgets.py +0 -5
- streamlit/static/asset-manifest.json +3 -3
- streamlit/static/index.html +1 -1
- streamlit/static/static/js/1307.74bce9ab.chunk.js +1 -0
- streamlit/static/static/js/{main.8c6fc86e.js → main.ff81c7a3.js} +2 -2
- {streamlit_nightly-1.37.2.dev20240819.dist-info → streamlit_nightly-1.37.2.dev20240820.dist-info}/METADATA +1 -1
- {streamlit_nightly-1.37.2.dev20240819.dist-info → streamlit_nightly-1.37.2.dev20240820.dist-info}/RECORD +22 -22
- {streamlit_nightly-1.37.2.dev20240819.dist-info → streamlit_nightly-1.37.2.dev20240820.dist-info}/WHEEL +1 -1
- streamlit/static/static/js/1307.74a443f7.chunk.js +0 -1
- /streamlit/static/static/js/{main.8c6fc86e.js.LICENSE.txt → main.ff81c7a3.js.LICENSE.txt} +0 -0
- {streamlit_nightly-1.37.2.dev20240819.data → streamlit_nightly-1.37.2.dev20240820.data}/scripts/streamlit.cmd +0 -0
- {streamlit_nightly-1.37.2.dev20240819.dist-info → streamlit_nightly-1.37.2.dev20240820.dist-info}/entry_points.txt +0 -0
- {streamlit_nightly-1.37.2.dev20240819.dist-info → streamlit_nightly-1.37.2.dev20240820.dist-info}/top_level.txt +0 -0
streamlit/elements/image.py
CHANGED
@@ -25,7 +25,7 @@ import io
|
|
25
25
|
import os
|
26
26
|
import re
|
27
27
|
from enum import IntEnum
|
28
|
-
from typing import TYPE_CHECKING, Final,
|
28
|
+
from typing import TYPE_CHECKING, Final, Literal, Sequence, Union, cast
|
29
29
|
|
30
30
|
from typing_extensions import TypeAlias
|
31
31
|
|
@@ -54,7 +54,7 @@ PILImage: TypeAlias = Union[
|
|
54
54
|
"ImageFile.ImageFile", "Image.Image", "GifImagePlugin.GifImageFile"
|
55
55
|
]
|
56
56
|
AtomicImage: TypeAlias = Union[PILImage, "npt.NDArray[Any]", io.BytesIO, str, bytes]
|
57
|
-
ImageOrImageList: TypeAlias = Union[AtomicImage,
|
57
|
+
ImageOrImageList: TypeAlias = Union[AtomicImage, Sequence[AtomicImage]]
|
58
58
|
UseColumnWith: TypeAlias = Union[Literal["auto", "always", "never"], bool, None]
|
59
59
|
Channels: TypeAlias = Literal["RGB", "BGR"]
|
60
60
|
ImageFormat: TypeAlias = Literal["JPEG", "PNG", "GIF"]
|
@@ -178,14 +178,11 @@ class ImageMixin:
|
|
178
178
|
|
179
179
|
|
180
180
|
def _image_may_have_alpha_channel(image: PILImage) -> bool:
|
181
|
-
|
182
|
-
return True
|
183
|
-
else:
|
184
|
-
return False
|
181
|
+
return image.mode in ("RGBA", "LA", "P")
|
185
182
|
|
186
183
|
|
187
184
|
def _image_is_gif(image: PILImage) -> bool:
|
188
|
-
return
|
185
|
+
return image.format == "GIF"
|
189
186
|
|
190
187
|
|
191
188
|
def _validate_image_format_string(
|
@@ -199,7 +196,7 @@ def _validate_image_format_string(
|
|
199
196
|
"GIF" if the image is a GIF, and "JPEG" otherwise.
|
200
197
|
"""
|
201
198
|
format = format.upper()
|
202
|
-
if format
|
199
|
+
if format in {"JPEG", "PNG"}:
|
203
200
|
return cast(ImageFormat, format)
|
204
201
|
|
205
202
|
# We are forgiving on the spelling of JPEG
|
@@ -509,30 +506,23 @@ def marshall_images(
|
|
509
506
|
|
510
507
|
# Turn single image and caption into one element list.
|
511
508
|
images: Sequence[AtomicImage]
|
512
|
-
if isinstance(image, list):
|
513
|
-
images = image
|
509
|
+
if isinstance(image, (list, set, tuple)):
|
510
|
+
images = list(image)
|
514
511
|
elif isinstance(image, np.ndarray) and len(cast(NumpyShape, image.shape)) == 4:
|
515
512
|
images = _4d_to_list_3d(image)
|
516
513
|
else:
|
517
|
-
images = [image]
|
514
|
+
images = [image] # type: ignore
|
518
515
|
|
519
516
|
if isinstance(caption, list):
|
520
517
|
captions: Sequence[str | None] = caption
|
518
|
+
elif isinstance(caption, str):
|
519
|
+
captions = [caption]
|
520
|
+
elif isinstance(caption, np.ndarray) and len(cast(NumpyShape, caption.shape)) == 1:
|
521
|
+
captions = caption.tolist()
|
522
|
+
elif caption is None:
|
523
|
+
captions = [None] * len(images)
|
521
524
|
else:
|
522
|
-
|
523
|
-
captions = [caption]
|
524
|
-
# You can pass in a 1-D Numpy array as captions.
|
525
|
-
elif (
|
526
|
-
isinstance(caption, np.ndarray)
|
527
|
-
and len(cast(NumpyShape, caption.shape)) == 1
|
528
|
-
):
|
529
|
-
captions = caption.tolist()
|
530
|
-
# If there are no captions then make the captions list the same size
|
531
|
-
# as the images list.
|
532
|
-
elif caption is None:
|
533
|
-
captions = [None] * len(images)
|
534
|
-
else:
|
535
|
-
captions = [str(caption)]
|
525
|
+
captions = [str(caption)]
|
536
526
|
|
537
527
|
assert isinstance(
|
538
528
|
captions, list
|
@@ -19,7 +19,10 @@ from typing import TYPE_CHECKING, Any, Final, Sequence
|
|
19
19
|
from streamlit import config, errors, logger, runtime
|
20
20
|
from streamlit.elements.form_utils import is_in_form
|
21
21
|
from streamlit.errors import StreamlitAPIException, StreamlitAPIWarning
|
22
|
-
from streamlit.runtime.scriptrunner_utils.script_run_context import
|
22
|
+
from streamlit.runtime.scriptrunner_utils.script_run_context import (
|
23
|
+
get_script_run_ctx,
|
24
|
+
in_cached_function,
|
25
|
+
)
|
23
26
|
from streamlit.runtime.state import WidgetCallback, get_session_state
|
24
27
|
|
25
28
|
if TYPE_CHECKING:
|
@@ -114,14 +117,12 @@ def check_cache_replay_rules() -> None:
|
|
114
117
|
If there are other similar checks in the future, we could extend this
|
115
118
|
function to check for those as well. And rename it to check_widget_usage_rules.
|
116
119
|
"""
|
117
|
-
if
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
# that indicates to the user where the issue is.
|
124
|
-
exception(CachedWidgetWarning())
|
120
|
+
if in_cached_function.get():
|
121
|
+
from streamlit import exception
|
122
|
+
|
123
|
+
# We use an exception here to show a proper stack trace
|
124
|
+
# that indicates to the user where the issue is.
|
125
|
+
exception(CachedWidgetWarning())
|
125
126
|
|
126
127
|
|
127
128
|
_fragment_writes_widget_to_outside_error = (
|
@@ -14,7 +14,7 @@
|
|
14
14
|
|
15
15
|
from __future__ import annotations
|
16
16
|
|
17
|
-
from typing import TYPE_CHECKING
|
17
|
+
from typing import TYPE_CHECKING
|
18
18
|
|
19
19
|
from streamlit.runtime.caching.cache_data_api import (
|
20
20
|
CACHE_DATA_MESSAGE_REPLAY_CTX,
|
@@ -33,7 +33,6 @@ if TYPE_CHECKING:
|
|
33
33
|
from google.protobuf.message import Message
|
34
34
|
|
35
35
|
from streamlit.proto.Block_pb2 import Block
|
36
|
-
from streamlit.runtime.state.common import WidgetMetadata
|
37
36
|
|
38
37
|
|
39
38
|
def save_element_message(
|
@@ -73,14 +72,6 @@ def save_block_message(
|
|
73
72
|
)
|
74
73
|
|
75
74
|
|
76
|
-
def save_widget_metadata(metadata: WidgetMetadata[Any]) -> None:
|
77
|
-
"""Save a widget's metadata to a thread-local callstack, so the widget
|
78
|
-
can be registered again when that widget is replayed.
|
79
|
-
"""
|
80
|
-
CACHE_DATA_MESSAGE_REPLAY_CTX.save_widget_metadata(metadata)
|
81
|
-
CACHE_RESOURCE_MESSAGE_REPLAY_CTX.save_widget_metadata(metadata)
|
82
|
-
|
83
|
-
|
84
75
|
def save_media_data(image_data: bytes | str, mimetype: str, image_id: str) -> None:
|
85
76
|
CACHE_DATA_MESSAGE_REPLAY_CTX.save_image_data(image_data, mimetype, image_id)
|
86
77
|
CACHE_RESOURCE_MESSAGE_REPLAY_CTX.save_image_data(image_data, mimetype, image_id)
|
@@ -99,7 +90,6 @@ __all__ = [
|
|
99
90
|
"CACHE_DOCS_URL",
|
100
91
|
"save_element_message",
|
101
92
|
"save_block_message",
|
102
|
-
"save_widget_metadata",
|
103
93
|
"save_media_data",
|
104
94
|
"get_data_cache_stats_provider",
|
105
95
|
"get_resource_cache_stats_provider",
|
@@ -47,9 +47,7 @@ from streamlit.runtime.caching.cache_utils import (
|
|
47
47
|
from streamlit.runtime.caching.cached_message_replay import (
|
48
48
|
CachedMessageReplayContext,
|
49
49
|
CachedResult,
|
50
|
-
ElementMsgData,
|
51
50
|
MsgData,
|
52
|
-
MultiCacheResults,
|
53
51
|
show_widget_replay_deprecation,
|
54
52
|
)
|
55
53
|
from streamlit.runtime.caching.storage import (
|
@@ -66,7 +64,6 @@ from streamlit.runtime.caching.storage.dummy_cache_storage import (
|
|
66
64
|
MemoryCacheStorageManager,
|
67
65
|
)
|
68
66
|
from streamlit.runtime.metrics_util import gather_metrics
|
69
|
-
from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
|
70
67
|
from streamlit.runtime.stats import CacheStat, CacheStatsProvider, group_stats
|
71
68
|
from streamlit.time_util import time_to_seconds
|
72
69
|
|
@@ -93,13 +90,11 @@ class CachedDataFuncInfo(CachedFuncInfo):
|
|
93
90
|
persist: CachePersistType,
|
94
91
|
max_entries: int | None,
|
95
92
|
ttl: float | timedelta | str | None,
|
96
|
-
allow_widgets: bool,
|
97
93
|
hash_funcs: HashFuncsDict | None = None,
|
98
94
|
):
|
99
95
|
super().__init__(
|
100
96
|
func,
|
101
97
|
show_spinner=show_spinner,
|
102
|
-
allow_widgets=allow_widgets,
|
103
98
|
hash_funcs=hash_funcs,
|
104
99
|
)
|
105
100
|
self.persist = persist
|
@@ -128,7 +123,6 @@ class CachedDataFuncInfo(CachedFuncInfo):
|
|
128
123
|
max_entries=self.max_entries,
|
129
124
|
ttl=self.ttl,
|
130
125
|
display_name=self.display_name,
|
131
|
-
allow_widgets=self.allow_widgets,
|
132
126
|
)
|
133
127
|
|
134
128
|
def validate_params(self) -> None:
|
@@ -160,7 +154,6 @@ class DataCaches(CacheStatsProvider):
|
|
160
154
|
max_entries: int | None,
|
161
155
|
ttl: int | float | timedelta | str | None,
|
162
156
|
display_name: str,
|
163
|
-
allow_widgets: bool,
|
164
157
|
) -> DataCache:
|
165
158
|
"""Return the mem cache for the given key.
|
166
159
|
|
@@ -220,7 +213,6 @@ class DataCaches(CacheStatsProvider):
|
|
220
213
|
max_entries=max_entries,
|
221
214
|
ttl_seconds=ttl_seconds,
|
222
215
|
display_name=display_name,
|
223
|
-
allow_widgets=allow_widgets,
|
224
216
|
)
|
225
217
|
self._function_caches[key] = cache
|
226
218
|
return cache
|
@@ -443,9 +435,11 @@ class CacheDataAPI:
|
|
443
435
|
|
444
436
|
experimental_allow_widgets : bool
|
445
437
|
Allow widgets to be used in the cached function. Defaults to False.
|
446
|
-
|
447
|
-
|
448
|
-
|
438
|
+
|
439
|
+
.. deprecated::
|
440
|
+
The cached widget replay functionality was removed in 1.38. Please
|
441
|
+
remove the ``experimental_allow_widgets`` parameter from your
|
442
|
+
caching decorators.
|
449
443
|
|
450
444
|
hash_funcs : dict or None
|
451
445
|
Mapping of types or fully qualified names to hash functions.
|
@@ -455,10 +449,6 @@ class CacheDataAPI:
|
|
455
449
|
the provided function to generate a hash for it. See below for an example
|
456
450
|
of how this can be used.
|
457
451
|
|
458
|
-
.. deprecated::
|
459
|
-
``experimental_allow_widgets`` is deprecated and will be removed in
|
460
|
-
a later version.
|
461
|
-
|
462
452
|
Example
|
463
453
|
-------
|
464
454
|
>>> import streamlit as st
|
@@ -574,7 +564,6 @@ class CacheDataAPI:
|
|
574
564
|
show_spinner=show_spinner,
|
575
565
|
max_entries=max_entries,
|
576
566
|
ttl=ttl,
|
577
|
-
allow_widgets=experimental_allow_widgets,
|
578
567
|
hash_funcs=hash_funcs,
|
579
568
|
)
|
580
569
|
)
|
@@ -589,7 +578,6 @@ class CacheDataAPI:
|
|
589
578
|
show_spinner=show_spinner,
|
590
579
|
max_entries=max_entries,
|
591
580
|
ttl=ttl,
|
592
|
-
allow_widgets=experimental_allow_widgets,
|
593
581
|
hash_funcs=hash_funcs,
|
594
582
|
)
|
595
583
|
)
|
@@ -611,7 +599,6 @@ class DataCache(Cache):
|
|
611
599
|
max_entries: int | None,
|
612
600
|
ttl_seconds: float | None,
|
613
601
|
display_name: str,
|
614
|
-
allow_widgets: bool = False,
|
615
602
|
):
|
616
603
|
super().__init__()
|
617
604
|
self.key = key
|
@@ -620,7 +607,6 @@ class DataCache(Cache):
|
|
620
607
|
self.ttl_seconds = ttl_seconds
|
621
608
|
self.max_entries = max_entries
|
622
609
|
self.persist = persist
|
623
|
-
self.allow_widgets = allow_widgets
|
624
610
|
|
625
611
|
def get_stats(self) -> list[CacheStat]:
|
626
612
|
if isinstance(self.storage, CacheStatsProvider):
|
@@ -641,21 +627,12 @@ class DataCache(Cache):
|
|
641
627
|
|
642
628
|
try:
|
643
629
|
entry = pickle.loads(pickled_entry)
|
644
|
-
if not isinstance(entry,
|
630
|
+
if not isinstance(entry, CachedResult):
|
645
631
|
# Loaded an old cache file format, remove it and let the caller
|
646
632
|
# rerun the function.
|
647
633
|
self.storage.delete(key)
|
648
634
|
raise CacheKeyNotFoundError()
|
649
|
-
|
650
|
-
ctx = get_script_run_ctx()
|
651
|
-
if not ctx:
|
652
|
-
raise CacheKeyNotFoundError()
|
653
|
-
|
654
|
-
widget_key = entry.get_current_widget_key(ctx, CacheType.DATA)
|
655
|
-
if widget_key in entry.results:
|
656
|
-
return entry.results[widget_key]
|
657
|
-
else:
|
658
|
-
raise CacheKeyNotFoundError()
|
635
|
+
return entry
|
659
636
|
except pickle.UnpicklingError as exc:
|
660
637
|
raise CacheError(f"Failed to unpickle {key}") from exc
|
661
638
|
|
@@ -664,43 +641,13 @@ class DataCache(Cache):
|
|
664
641
|
"""Write a value and associated messages to the cache.
|
665
642
|
The value must be pickleable.
|
666
643
|
"""
|
667
|
-
ctx = get_script_run_ctx()
|
668
|
-
if ctx is None:
|
669
|
-
return
|
670
|
-
|
671
|
-
main_id = st._main.id
|
672
|
-
sidebar_id = st.sidebar.id
|
673
|
-
|
674
|
-
if self.allow_widgets:
|
675
|
-
widgets = {
|
676
|
-
msg.widget_metadata.widget_id
|
677
|
-
for msg in messages
|
678
|
-
if isinstance(msg, ElementMsgData) and msg.widget_metadata is not None
|
679
|
-
}
|
680
|
-
else:
|
681
|
-
widgets = set()
|
682
|
-
|
683
|
-
multi_cache_results: MultiCacheResults | None = None
|
684
|
-
|
685
|
-
# Try to find in cache storage, then falling back to a new result instance
|
686
|
-
try:
|
687
|
-
multi_cache_results = self._read_multi_results_from_storage(key)
|
688
|
-
except (CacheKeyNotFoundError, pickle.UnpicklingError):
|
689
|
-
pass
|
690
|
-
|
691
|
-
if multi_cache_results is None:
|
692
|
-
multi_cache_results = MultiCacheResults(widget_ids=widgets, results={})
|
693
|
-
multi_cache_results.widget_ids.update(widgets)
|
694
|
-
widget_key = multi_cache_results.get_current_widget_key(ctx, CacheType.DATA)
|
695
|
-
|
696
|
-
result = CachedResult(value, messages, main_id, sidebar_id)
|
697
|
-
multi_cache_results.results[widget_key] = result
|
698
|
-
|
699
644
|
try:
|
700
|
-
|
645
|
+
main_id = st._main.id
|
646
|
+
sidebar_id = st.sidebar.id
|
647
|
+
entry = CachedResult(value, messages, main_id, sidebar_id)
|
648
|
+
pickled_entry = pickle.dumps(entry)
|
701
649
|
except (pickle.PicklingError, TypeError) as exc:
|
702
650
|
raise CacheError(f"Failed to pickle {key}") from exc
|
703
|
-
|
704
651
|
self.storage.set(key, pickled_entry)
|
705
652
|
|
706
653
|
def _clear(self, key: str | None = None) -> None:
|
@@ -708,22 +655,3 @@ class DataCache(Cache):
|
|
708
655
|
self.storage.clear()
|
709
656
|
else:
|
710
657
|
self.storage.delete(key)
|
711
|
-
|
712
|
-
def _read_multi_results_from_storage(self, key: str) -> MultiCacheResults:
|
713
|
-
"""Look up the results from storage and ensure it has the right type.
|
714
|
-
|
715
|
-
Raises a `CacheKeyNotFoundError` if the key has no entry, or if the
|
716
|
-
entry is malformed.
|
717
|
-
"""
|
718
|
-
try:
|
719
|
-
pickled = self.storage.get(key)
|
720
|
-
except CacheStorageKeyNotFoundError as e:
|
721
|
-
raise CacheKeyNotFoundError(str(e)) from e
|
722
|
-
|
723
|
-
maybe_results = pickle.loads(pickled)
|
724
|
-
|
725
|
-
if isinstance(maybe_results, MultiCacheResults):
|
726
|
-
return maybe_results
|
727
|
-
else:
|
728
|
-
self.storage.delete(key)
|
729
|
-
raise CacheKeyNotFoundError()
|
@@ -29,7 +29,7 @@ CACHE_DOCS_URL = "https://docs.streamlit.io/develop/concepts/architecture/cachin
|
|
29
29
|
def get_cached_func_name_md(func: Any) -> str:
|
30
30
|
"""Get markdown representation of the function name."""
|
31
31
|
if hasattr(func, "__name__"):
|
32
|
-
return "
|
32
|
+
return f"`{func.__name__}()`"
|
33
33
|
elif hasattr(type(func), "__name__"):
|
34
34
|
return f"`{type(func).__name__}`"
|
35
35
|
return f"`{type(func)}`"
|
@@ -105,9 +105,10 @@ class CacheReplayClosureError(StreamlitAPIException):
|
|
105
105
|
|
106
106
|
msg = (
|
107
107
|
f"""
|
108
|
-
While running {func_name}, a streamlit element is called on some layout block
|
109
|
-
This is incompatible with replaying the cached
|
110
|
-
the referenced block might not exist when
|
108
|
+
While running {func_name}, a streamlit element is called on some layout block
|
109
|
+
created outside the function. This is incompatible with replaying the cached
|
110
|
+
effect of that element, because the the referenced block might not exist when
|
111
|
+
the replay happens.
|
111
112
|
|
112
113
|
How to fix this:
|
113
114
|
* Move the creation of $THING inside {func_name}.
|
@@ -124,11 +125,14 @@ class UnserializableReturnValueError(MarkdownFormattedException):
|
|
124
125
|
MarkdownFormattedException.__init__(
|
125
126
|
self,
|
126
127
|
f"""
|
127
|
-
Cannot serialize the return value (of type {get_return_value_type(return_value)})
|
128
|
-
`st.cache_data` uses
|
129
|
-
|
130
|
-
|
131
|
-
|
128
|
+
Cannot serialize the return value (of type {get_return_value_type(return_value)})
|
129
|
+
in {get_cached_func_name_md(func)}. `st.cache_data` uses
|
130
|
+
[pickle](https://docs.python.org/3/library/pickle.html) to serialize the
|
131
|
+
function's return value and safely store it in the cache
|
132
|
+
without mutating the original object. Please convert the return value to a
|
133
|
+
pickle-serializable type. If you want to cache unserializable objects such
|
134
|
+
as database connections or Tensorflow sessions, use `st.cache_resource`
|
135
|
+
instead (see [our docs]({CACHE_DOCS_URL}) for differences).""",
|
132
136
|
)
|
133
137
|
|
134
138
|
|
@@ -37,13 +37,10 @@ from streamlit.runtime.caching.cache_utils import (
|
|
37
37
|
from streamlit.runtime.caching.cached_message_replay import (
|
38
38
|
CachedMessageReplayContext,
|
39
39
|
CachedResult,
|
40
|
-
ElementMsgData,
|
41
40
|
MsgData,
|
42
|
-
MultiCacheResults,
|
43
41
|
show_widget_replay_deprecation,
|
44
42
|
)
|
45
43
|
from streamlit.runtime.metrics_util import gather_metrics
|
46
|
-
from streamlit.runtime.scriptrunner_utils.script_run_context import get_script_run_ctx
|
47
44
|
from streamlit.runtime.stats import CacheStat, CacheStatsProvider, group_stats
|
48
45
|
from streamlit.time_util import time_to_seconds
|
49
46
|
|
@@ -83,7 +80,6 @@ class ResourceCaches(CacheStatsProvider):
|
|
83
80
|
max_entries: int | float | None,
|
84
81
|
ttl: float | timedelta | str | None,
|
85
82
|
validate: ValidateFunc | None,
|
86
|
-
allow_widgets: bool,
|
87
83
|
) -> ResourceCache:
|
88
84
|
"""Return the mem cache for the given key.
|
89
85
|
|
@@ -114,7 +110,6 @@ class ResourceCaches(CacheStatsProvider):
|
|
114
110
|
max_entries=max_entries,
|
115
111
|
ttl_seconds=ttl_seconds,
|
116
112
|
validate=validate,
|
117
|
-
allow_widgets=allow_widgets,
|
118
113
|
)
|
119
114
|
self._function_caches[key] = cache
|
120
115
|
return cache
|
@@ -155,13 +150,11 @@ class CachedResourceFuncInfo(CachedFuncInfo):
|
|
155
150
|
max_entries: int | None,
|
156
151
|
ttl: float | timedelta | str | None,
|
157
152
|
validate: ValidateFunc | None,
|
158
|
-
allow_widgets: bool,
|
159
153
|
hash_funcs: HashFuncsDict | None = None,
|
160
154
|
):
|
161
155
|
super().__init__(
|
162
156
|
func,
|
163
157
|
show_spinner=show_spinner,
|
164
|
-
allow_widgets=allow_widgets,
|
165
158
|
hash_funcs=hash_funcs,
|
166
159
|
)
|
167
160
|
self.max_entries = max_entries
|
@@ -188,7 +181,6 @@ class CachedResourceFuncInfo(CachedFuncInfo):
|
|
188
181
|
max_entries=self.max_entries,
|
189
182
|
ttl=self.ttl,
|
190
183
|
validate=self.validate,
|
191
|
-
allow_widgets=self.allow_widgets,
|
192
184
|
)
|
193
185
|
|
194
186
|
|
@@ -315,9 +307,11 @@ class CacheResourceAPI:
|
|
315
307
|
|
316
308
|
experimental_allow_widgets : bool
|
317
309
|
Allow widgets to be used in the cached function. Defaults to False.
|
318
|
-
|
319
|
-
|
320
|
-
|
310
|
+
|
311
|
+
.. deprecated::
|
312
|
+
The cached widget replay functionality was removed in 1.38. Please
|
313
|
+
remove the ``experimental_allow_widgets`` parameter from your
|
314
|
+
caching decorators.
|
321
315
|
|
322
316
|
hash_funcs : dict or None
|
323
317
|
Mapping of types or fully qualified names to hash functions.
|
@@ -327,10 +321,6 @@ class CacheResourceAPI:
|
|
327
321
|
the provided function to generate a hash for it. See below for an example
|
328
322
|
of how this can be used.
|
329
323
|
|
330
|
-
.. deprecated::
|
331
|
-
``experimental_allow_widgets`` is deprecated and will be removed in
|
332
|
-
a later version.
|
333
|
-
|
334
324
|
Example
|
335
325
|
-------
|
336
326
|
>>> import streamlit as st
|
@@ -426,7 +416,6 @@ class CacheResourceAPI:
|
|
426
416
|
max_entries=max_entries,
|
427
417
|
ttl=ttl,
|
428
418
|
validate=validate,
|
429
|
-
allow_widgets=experimental_allow_widgets,
|
430
419
|
hash_funcs=hash_funcs,
|
431
420
|
)
|
432
421
|
)
|
@@ -438,7 +427,6 @@ class CacheResourceAPI:
|
|
438
427
|
max_entries=max_entries,
|
439
428
|
ttl=ttl,
|
440
429
|
validate=validate,
|
441
|
-
allow_widgets=experimental_allow_widgets,
|
442
430
|
hash_funcs=hash_funcs,
|
443
431
|
)
|
444
432
|
)
|
@@ -459,17 +447,15 @@ class ResourceCache(Cache):
|
|
459
447
|
ttl_seconds: float,
|
460
448
|
validate: ValidateFunc | None,
|
461
449
|
display_name: str,
|
462
|
-
allow_widgets: bool,
|
463
450
|
):
|
464
451
|
super().__init__()
|
465
452
|
self.key = key
|
466
453
|
self.display_name = display_name
|
467
|
-
self._mem_cache: TTLCache[str,
|
454
|
+
self._mem_cache: TTLCache[str, CachedResult] = TTLCache(
|
468
455
|
maxsize=max_entries, ttl=ttl_seconds, timer=cache_utils.TTLCACHE_TIMER
|
469
456
|
)
|
470
457
|
self._mem_cache_lock = threading.Lock()
|
471
458
|
self.validate = validate
|
472
|
-
self.allow_widgets = allow_widgets
|
473
459
|
|
474
460
|
@property
|
475
461
|
def max_entries(self) -> float:
|
@@ -488,24 +474,11 @@ class ResourceCache(Cache):
|
|
488
474
|
# key does not exist in cache.
|
489
475
|
raise CacheKeyNotFoundError()
|
490
476
|
|
491
|
-
|
492
|
-
|
493
|
-
ctx = get_script_run_ctx()
|
494
|
-
if not ctx:
|
495
|
-
# ScriptRunCtx does not exist (we're probably running in "raw" mode).
|
496
|
-
raise CacheKeyNotFoundError()
|
497
|
-
|
498
|
-
widget_key = multi_results.get_current_widget_key(ctx, CacheType.RESOURCE)
|
499
|
-
if widget_key not in multi_results.results:
|
500
|
-
# widget_key does not exist in cache (this combination of widgets hasn't been
|
501
|
-
# seen for the value_key yet).
|
502
|
-
raise CacheKeyNotFoundError()
|
503
|
-
|
504
|
-
result = multi_results.results[widget_key]
|
477
|
+
result = self._mem_cache[key]
|
505
478
|
|
506
479
|
if self.validate is not None and not self.validate(result.value):
|
507
480
|
# Validate failed: delete the entry and raise an error.
|
508
|
-
del
|
481
|
+
del self._mem_cache[key]
|
509
482
|
raise CacheKeyNotFoundError()
|
510
483
|
|
511
484
|
return result
|
@@ -513,33 +486,11 @@ class ResourceCache(Cache):
|
|
513
486
|
@gather_metrics("_cache_resource_object")
|
514
487
|
def write_result(self, key: str, value: Any, messages: list[MsgData]) -> None:
|
515
488
|
"""Write a value and associated messages to the cache."""
|
516
|
-
ctx = get_script_run_ctx()
|
517
|
-
if ctx is None:
|
518
|
-
return
|
519
|
-
|
520
489
|
main_id = st._main.id
|
521
490
|
sidebar_id = st.sidebar.id
|
522
|
-
if self.allow_widgets:
|
523
|
-
widgets = {
|
524
|
-
msg.widget_metadata.widget_id
|
525
|
-
for msg in messages
|
526
|
-
if isinstance(msg, ElementMsgData) and msg.widget_metadata is not None
|
527
|
-
}
|
528
|
-
else:
|
529
|
-
widgets = set()
|
530
491
|
|
531
492
|
with self._mem_cache_lock:
|
532
|
-
|
533
|
-
multi_results = self._mem_cache[key]
|
534
|
-
except KeyError:
|
535
|
-
multi_results = MultiCacheResults(widget_ids=widgets, results={})
|
536
|
-
|
537
|
-
multi_results.widget_ids.update(widgets)
|
538
|
-
widget_key = multi_results.get_current_widget_key(ctx, CacheType.RESOURCE)
|
539
|
-
|
540
|
-
result = CachedResult(value, messages, main_id, sidebar_id)
|
541
|
-
multi_results.results[widget_key] = result
|
542
|
-
self._mem_cache[key] = multi_results
|
493
|
+
self._mem_cache[key] = CachedResult(value, messages, main_id, sidebar_id)
|
543
494
|
|
544
495
|
def _clear(self, key: str | None = None) -> None:
|
545
496
|
with self._mem_cache_lock:
|
@@ -16,6 +16,7 @@
|
|
16
16
|
|
17
17
|
from __future__ import annotations
|
18
18
|
|
19
|
+
import contextlib
|
19
20
|
import functools
|
20
21
|
import hashlib
|
21
22
|
import inspect
|
@@ -125,12 +126,10 @@ class CachedFuncInfo:
|
|
125
126
|
self,
|
126
127
|
func: FunctionType,
|
127
128
|
show_spinner: bool | str,
|
128
|
-
allow_widgets: bool,
|
129
129
|
hash_funcs: HashFuncsDict | None,
|
130
130
|
):
|
131
131
|
self.func = func
|
132
132
|
self.show_spinner = show_spinner
|
133
|
-
self.allow_widgets = allow_widgets
|
134
133
|
self.hash_funcs = hash_funcs
|
135
134
|
|
136
135
|
@property
|
@@ -230,11 +229,9 @@ class CachedFunc:
|
|
230
229
|
hash_funcs=self._info.hash_funcs,
|
231
230
|
)
|
232
231
|
|
233
|
-
|
232
|
+
with contextlib.suppress(CacheKeyNotFoundError):
|
234
233
|
cached_result = cache.read_result(value_key)
|
235
234
|
return self._handle_cache_hit(cached_result)
|
236
|
-
except CacheKeyNotFoundError:
|
237
|
-
pass
|
238
235
|
return self._handle_cache_miss(cache, value_key, func_args, func_kwargs)
|
239
236
|
|
240
237
|
def _handle_cache_hit(self, result: CachedResult) -> Any:
|
@@ -279,17 +276,14 @@ class CachedFunc:
|
|
279
276
|
# We've acquired the lock - but another thread may have acquired it first
|
280
277
|
# and already computed the value. So we need to test for a cache hit again,
|
281
278
|
# before computing.
|
282
|
-
|
279
|
+
with contextlib.suppress(CacheKeyNotFoundError):
|
283
280
|
cached_result = cache.read_result(value_key)
|
284
281
|
# Another thread computed the value before us. Early exit!
|
285
282
|
return self._handle_cache_hit(cached_result)
|
286
283
|
|
287
|
-
except CacheKeyNotFoundError:
|
288
|
-
pass
|
289
|
-
|
290
284
|
# We acquired the lock before any other thread. Compute the value!
|
291
285
|
with self._info.cached_message_replay_ctx.calling_cached_function(
|
292
|
-
self._info.func
|
286
|
+
self._info.func
|
293
287
|
):
|
294
288
|
computed_value = self._info.func(*func_args, **func_kwargs)
|
295
289
|
|
@@ -329,8 +323,10 @@ class CachedFunc:
|
|
329
323
|
|
330
324
|
Parameters
|
331
325
|
----------
|
326
|
+
|
332
327
|
*args: Any
|
333
328
|
Arguments of the cached functions.
|
329
|
+
|
334
330
|
**kwargs: Any
|
335
331
|
Keyword arguments of the cached function.
|
336
332
|
|
@@ -471,8 +467,7 @@ def _make_function_key(cache_type: CacheType, func: FunctionType) -> str:
|
|
471
467
|
source_code, hasher=func_hasher, cache_type=cache_type, hash_source=func
|
472
468
|
)
|
473
469
|
|
474
|
-
|
475
|
-
return cache_key
|
470
|
+
return func_hasher.hexdigest()
|
476
471
|
|
477
472
|
|
478
473
|
def _get_positional_arg_name(func: FunctionType, arg_index: int) -> str | None:
|