streamlit-nightly 1.44.1.dev20250327__py3-none-any.whl → 1.44.2.dev20250403__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (95) hide show
  1. streamlit/commands/execution_control.py +3 -0
  2. streamlit/commands/page_config.py +1 -1
  3. streamlit/config.py +0 -26
  4. streamlit/elements/write.py +8 -0
  5. streamlit/proto/ClientState_pb2.py +2 -2
  6. streamlit/proto/ClientState_pb2.pyi +11 -1
  7. streamlit/proto/ForwardMsg_pb2.pyi +4 -7
  8. streamlit/runtime/app_session.py +5 -4
  9. streamlit/runtime/context.py +14 -0
  10. streamlit/runtime/forward_msg_cache.py +35 -230
  11. streamlit/runtime/forward_msg_queue.py +41 -23
  12. streamlit/runtime/runtime.py +2 -60
  13. streamlit/runtime/runtime_util.py +15 -14
  14. streamlit/runtime/scriptrunner/script_runner.py +9 -1
  15. streamlit/runtime/scriptrunner_utils/script_requests.py +3 -0
  16. streamlit/runtime/scriptrunner_utils/script_run_context.py +25 -3
  17. streamlit/runtime/session_manager.py +5 -6
  18. streamlit/static/index.html +1 -1
  19. streamlit/static/static/js/{FileDownload.esm.jX-9l2Ep.js → FileDownload.esm.B3kD8CdX.js} +1 -1
  20. streamlit/static/static/js/{FileHelper.aCeQQwv9.js → FileHelper.CZMZF2jC.js} +1 -1
  21. streamlit/static/static/js/{FormClearHelper.CWUgHOqb.js → FormClearHelper.BjztopwY.js} +1 -1
  22. streamlit/static/static/js/{Hooks.z6bpnOa4.js → Hooks.B2w-p718.js} +1 -1
  23. streamlit/static/static/js/{InputInstructions.CxNXqmaa.js → InputInstructions.CuF13wx9.js} +1 -1
  24. streamlit/static/static/js/{ProgressBar.DeJx_v03.js → ProgressBar.BbIgSlVa.js} +1 -1
  25. streamlit/static/static/js/{RenderInPortalIfExists.BzVEnQEP.js → RenderInPortalIfExists.yENMDBLa.js} +1 -1
  26. streamlit/static/static/js/{Toolbar.Buaxb3gQ.js → Toolbar.DJ7G2mIO.js} +1 -1
  27. streamlit/static/static/js/{base-input.B02pchZb.js → base-input.DIlOlDo4.js} +1 -1
  28. streamlit/static/static/js/{checkbox.BNevNWhL.js → checkbox.BQjuWiaH.js} +1 -1
  29. streamlit/static/static/js/{createSuper.HF1JI-bK.js → createSuper.DsCxV-v8.js} +1 -1
  30. streamlit/static/static/js/{data-grid-overlay-editor.DHpEpsQ_.js → data-grid-overlay-editor.D4K6HcP0.js} +1 -1
  31. streamlit/static/static/js/{downloader.B32k91dq.js → downloader.Lq-fJZlW.js} +1 -1
  32. streamlit/static/static/js/{es6.j4L3xv_m.js → es6.BE-9QZmp.js} +2 -2
  33. streamlit/static/static/js/{iframeResizer.contentWindow.DQOV--zq.js → iframeResizer.contentWindow.CVN6zezL.js} +1 -1
  34. streamlit/static/static/js/{index.BTG2J5Pk.js → index.0HztFnVa.js} +1 -1
  35. streamlit/static/static/js/{index.ChvqDLgw.js → index.5819-zgt.js} +1 -1
  36. streamlit/static/static/js/{index.DaJw5fna.js → index.B-KH-iJ1.js} +1 -1
  37. streamlit/static/static/js/{index.1tdxODWC.js → index.B0f5Sqj9.js} +1 -1
  38. streamlit/static/static/js/{index.CKYXxi_d.js → index.B1gZ98Ih.js} +1 -1
  39. streamlit/static/static/js/{index.DTDyF8nE.js → index.B2XNA40L.js} +1 -1
  40. streamlit/static/static/js/index.B7RGQCfs.js +1 -0
  41. streamlit/static/static/js/{index.CTwaWONb.js → index.BB3TfMGO.js} +1 -1
  42. streamlit/static/static/js/{index.DsRxnb2z.js → index.BDovkxCx.js} +1 -1
  43. streamlit/static/static/js/{index.CpDe9l-f.js → index.BMWHViZF.js} +1 -1
  44. streamlit/static/static/js/{index.R8Go3XlF.js → index.BM_qS2Z6.js} +1 -1
  45. streamlit/static/static/js/{index.BT-PT2u0.js → index.BYWoEwfP.js} +1 -1
  46. streamlit/static/static/js/{index.BDIF1v3E.js → index.Bs_nrSu4.js} +1 -1
  47. streamlit/static/static/js/{index.Cq_L2WtW.js → index.BwOqVS7B.js} +1 -1
  48. streamlit/static/static/js/{index.CExICAHy.js → index.BysxR7v7.js} +1 -1
  49. streamlit/static/static/js/{index.CTgHTp02.js → index.C6uLzsGN.js} +1 -1
  50. streamlit/static/static/js/{index.DbqewZ6W.js → index.CDspsVkR.js} +1 -1
  51. streamlit/static/static/js/{index.C1B9TyzK.js → index.CE-qPTwY.js} +1 -1
  52. streamlit/static/static/js/{index.BnYJb__c.js → index.CFnDv8Ab.js} +1 -1
  53. streamlit/static/static/js/{index.CcMFXZBL.js → index.CQYXOt0H.js} +1 -1
  54. streamlit/static/static/js/{index.B2-yUxP6.js → index.CU_SzKbN.js} +1 -1
  55. streamlit/static/static/js/{index.C-GJaT09.js → index.CWOluRqa.js} +1 -1
  56. streamlit/static/static/js/{index.V3D0L00K.js → index.CbDE21BB.js} +1 -1
  57. streamlit/static/static/js/{index.DNURUtUa.js → index.ChX_XN9v.js} +1 -1
  58. streamlit/static/static/js/index.Cn3q8ZVn.js +2 -0
  59. streamlit/static/static/js/{index.DBEif7dq.js → index.CzzEyXiC.js} +3 -3
  60. streamlit/static/static/js/{index.B28jf8c_.js → index.DIxzN4oM.js} +51 -51
  61. streamlit/static/static/js/index.DSsFpl6V.js +3 -0
  62. streamlit/static/static/js/{index.ClfebD_T.js → index.DmZP-_fu.js} +1 -1
  63. streamlit/static/static/js/{index.Nb8G9oM-.js → index.Dw1CjpSH.js} +1 -1
  64. streamlit/static/static/js/index.IydYP9PK.js +1 -0
  65. streamlit/static/static/js/{index.Uid-bSyh.js → index.TnCoD1oP.js} +1 -1
  66. streamlit/static/static/js/{index.BRDvEQpe.js → index.WW1pXc2g.js} +1 -1
  67. streamlit/static/static/js/{index.qkhdJyyt.js → index.eNGIEtgK.js} +7 -7
  68. streamlit/static/static/js/{index.BBHrAwbG.js → index.m_aVBz2b.js} +1 -1
  69. streamlit/static/static/js/{index.BeuGcxG8.js → index.rPwcP7b8.js} +1 -1
  70. streamlit/static/static/js/{index.m0rRkw04.js → index.u-RVYMyk.js} +1 -1
  71. streamlit/static/static/js/{input.DogdK8Cg.js → input.CVw5KI0x.js} +2 -2
  72. streamlit/static/static/js/{memory.B_1d0kyG.js → memory.CnrS2dOM.js} +1 -1
  73. streamlit/static/static/js/{mergeWith.9h0p6sC_.js → mergeWith.DUDC520V.js} +1 -1
  74. streamlit/static/static/js/{number-overlay-editor.yRe6Yodu.js → number-overlay-editor.Ccw3Yu2p.js} +1 -1
  75. streamlit/static/static/js/{possibleConstructorReturn.C73_6grg.js → possibleConstructorReturn.wVJHSwqv.js} +1 -1
  76. streamlit/static/static/js/{sandbox.2u3nOS5d.js → sandbox.CpAztCDM.js} +1 -1
  77. streamlit/static/static/js/{textarea.DFCEFjUj.js → textarea.CFjEVTUg.js} +2 -2
  78. streamlit/static/static/js/{timepicker.GuNna1EN.js → timepicker.DMKQZM-V.js} +1 -1
  79. streamlit/static/static/js/{toConsumableArray.DARzcvE5.js → toConsumableArray.KVlxKIgo.js} +1 -1
  80. streamlit/static/static/js/{uniqueId.fceb1ayN.js → uniqueId.kOCufBPt.js} +1 -1
  81. streamlit/static/static/js/{useBasicWidgetState.D6255-xX.js → useBasicWidgetState.Bs_D3hOD.js} +1 -1
  82. streamlit/static/static/js/{useOnInputChange.BjnOKne4.js → useOnInputChange.BH-JyYhR.js} +1 -1
  83. streamlit/static/static/js/{withFullScreenWrapper.B7h9p1kI.js → withFullScreenWrapper.vQYihw6n.js} +1 -1
  84. streamlit/web/server/routes.py +1 -59
  85. streamlit/web/server/server.py +7 -8
  86. {streamlit_nightly-1.44.1.dev20250327.dist-info → streamlit_nightly-1.44.2.dev20250403.dist-info}/METADATA +1 -1
  87. {streamlit_nightly-1.44.1.dev20250327.dist-info → streamlit_nightly-1.44.2.dev20250403.dist-info}/RECORD +91 -91
  88. streamlit/static/static/js/index.BUz0sS-V.js +0 -1
  89. streamlit/static/static/js/index.CDMGlkYx.js +0 -3
  90. streamlit/static/static/js/index.CPMy5pwd.js +0 -1
  91. streamlit/static/static/js/index.DfvKnm4Q.js +0 -2
  92. {streamlit_nightly-1.44.1.dev20250327.data → streamlit_nightly-1.44.2.dev20250403.data}/scripts/streamlit.cmd +0 -0
  93. {streamlit_nightly-1.44.1.dev20250327.dist-info → streamlit_nightly-1.44.2.dev20250403.dist-info}/WHEEL +0 -0
  94. {streamlit_nightly-1.44.1.dev20250327.dist-info → streamlit_nightly-1.44.2.dev20250403.dist-info}/entry_points.txt +0 -0
  95. {streamlit_nightly-1.44.1.dev20250327.dist-info → streamlit_nightly-1.44.2.dev20250403.dist-info}/top_level.txt +0 -0
@@ -140,6 +140,7 @@ def rerun( # type: ignore[misc]
140
140
  if ctx and ctx.script_requests:
141
141
  query_string = ctx.query_string
142
142
  page_script_hash = ctx.page_script_hash
143
+ cached_message_hashes = ctx.cached_message_hashes
143
144
 
144
145
  ctx.script_requests.request_rerun(
145
146
  RerunData(
@@ -147,6 +148,7 @@ def rerun( # type: ignore[misc]
147
148
  page_script_hash=page_script_hash,
148
149
  fragment_id_queue=_new_fragment_id_queue(ctx, scope),
149
150
  is_fragment_scoped_rerun=scope == "fragment",
151
+ cached_message_hashes=cached_message_hashes,
150
152
  )
151
153
  )
152
154
  # Force a yield point so the runner can do the rerun
@@ -232,6 +234,7 @@ def switch_page(page: str | Path | StreamlitPage) -> NoReturn: # type: ignore[m
232
234
  RerunData(
233
235
  query_string=ctx.query_string,
234
236
  page_script_hash=page_script_hash,
237
+ cached_message_hashes=ctx.cached_message_hashes,
235
238
  )
236
239
  )
237
240
  # Force a yield point so the runner can do the rerun
@@ -102,7 +102,7 @@ def _get_favicon_string(page_icon: PageIcon) -> str:
102
102
 
103
103
  # If page_icon is an emoji, return it as is.
104
104
  if isinstance(page_icon, str) and is_emoji(page_icon):
105
- return page_icon
105
+ return f"emoji:{page_icon}"
106
106
 
107
107
  if isinstance(page_icon, str) and page_icon.startswith(":material"):
108
108
  return validate_material_icon(page_icon)
streamlit/config.py CHANGED
@@ -444,32 +444,6 @@ _create_option(
444
444
  type_=int,
445
445
  )
446
446
 
447
- _create_option(
448
- "global.storeCachedForwardMessagesInMemory",
449
- description="""
450
- If True, store cached ForwardMsgs in backend memory. This is an
451
- internal flag to validate a potential removal of the in-memory
452
- forward message cache.
453
- """,
454
- visibility="hidden",
455
- default_val=True,
456
- type_=bool,
457
- )
458
-
459
- _create_option(
460
- "global.includeFragmentRunsInForwardMessageCacheCount",
461
- description="""
462
- If True, the server will include fragment runs in the count for the
463
- forward message cache. The implication is that apps with fragments may
464
- see messages being removed from the cache faster. This aligns the server
465
- count with the frontend count. This is a temporary fix while we assess the
466
- design of the cache.
467
- """,
468
- visibility="hidden",
469
- default_val=False,
470
- type_=bool,
471
- )
472
-
473
447
 
474
448
  # Config Section: Logger #
475
449
  _create_section("logger", "Settings to customize Streamlit log messages.")
@@ -407,6 +407,14 @@ class WriteMixin:
407
407
  kwargs,
408
408
  )
409
409
 
410
+ if len(args) == 1 and isinstance(args[0], str):
411
+ # Optimization: If there is only one arg, and it's a string,
412
+ # we can just call markdown directly and skip the buffer logic.
413
+ # This also prevents unnecessary usage of `st.empty()`.
414
+ # This covers > 80% of all `st.write` uses.
415
+ self.dg.markdown(args[0], unsafe_allow_html=unsafe_allow_html)
416
+ return
417
+
410
418
  string_buffer: list[str] = []
411
419
 
412
420
  # This bans some valid cases like: e = st.empty(); e.write("a", "b").
@@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default()
15
15
  from streamlit.proto import WidgetStates_pb2 as streamlit_dot_proto_dot_WidgetStates__pb2
16
16
 
17
17
 
18
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!streamlit/proto/ClientState.proto\x1a\"streamlit/proto/WidgetStates.proto\"\x9d\x01\n\x0b\x43ontextInfo\x12\x15\n\x08timezone\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1c\n\x0ftimezone_offset\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x13\n\x06locale\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x10\n\x03url\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x0b\n\t_timezoneB\x12\n\x10_timezone_offsetB\t\n\x07_localeB\x06\n\x04_url\"\xc6\x01\n\x0b\x43lientState\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12$\n\rwidget_states\x18\x02 \x01(\x0b\x32\r.WidgetStates\x12\x18\n\x10page_script_hash\x18\x03 \x01(\t\x12\x11\n\tpage_name\x18\x04 \x01(\t\x12\x13\n\x0b\x66ragment_id\x18\x05 \x01(\t\x12\x15\n\ris_auto_rerun\x18\x06 \x01(\x08\x12\"\n\x0c\x63ontext_info\x18\x08 \x01(\x0b\x32\x0c.ContextInfoB0\n\x1c\x63om.snowflake.apps.streamlitB\x10\x43lientStateProtob\x06proto3')
18
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n!streamlit/proto/ClientState.proto\x1a\"streamlit/proto/WidgetStates.proto\"\x9d\x01\n\x0b\x43ontextInfo\x12\x15\n\x08timezone\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1c\n\x0ftimezone_offset\x18\x02 \x01(\x05H\x01\x88\x01\x01\x12\x13\n\x06locale\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x10\n\x03url\x18\x04 \x01(\tH\x03\x88\x01\x01\x42\x0b\n\t_timezoneB\x12\n\x10_timezone_offsetB\t\n\x07_localeB\x06\n\x04_url\"\xe5\x01\n\x0b\x43lientState\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12$\n\rwidget_states\x18\x02 \x01(\x0b\x32\r.WidgetStates\x12\x18\n\x10page_script_hash\x18\x03 \x01(\t\x12\x11\n\tpage_name\x18\x04 \x01(\t\x12\x13\n\x0b\x66ragment_id\x18\x05 \x01(\t\x12\x15\n\ris_auto_rerun\x18\x06 \x01(\x08\x12\x1d\n\x15\x63\x61\x63hed_message_hashes\x18\x07 \x03(\t\x12\"\n\x0c\x63ontext_info\x18\x08 \x01(\x0b\x32\x0c.ContextInfoB0\n\x1c\x63om.snowflake.apps.streamlitB\x10\x43lientStateProtob\x06proto3')
19
19
 
20
20
  _globals = globals()
21
21
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -26,5 +26,5 @@ if not _descriptor._USE_C_DESCRIPTORS:
26
26
  _globals['_CONTEXTINFO']._serialized_start=74
27
27
  _globals['_CONTEXTINFO']._serialized_end=231
28
28
  _globals['_CLIENTSTATE']._serialized_start=234
29
- _globals['_CLIENTSTATE']._serialized_end=432
29
+ _globals['_CLIENTSTATE']._serialized_end=463
30
30
  # @@protoc_insertion_point(module_scope)
@@ -18,7 +18,9 @@ limitations under the License.
18
18
  """
19
19
 
20
20
  import builtins
21
+ import collections.abc
21
22
  import google.protobuf.descriptor
23
+ import google.protobuf.internal.containers
22
24
  import google.protobuf.message
23
25
  import streamlit.proto.WidgetStates_pb2
24
26
  import typing
@@ -68,6 +70,7 @@ class ClientState(google.protobuf.message.Message):
68
70
  PAGE_NAME_FIELD_NUMBER: builtins.int
69
71
  FRAGMENT_ID_FIELD_NUMBER: builtins.int
70
72
  IS_AUTO_RERUN_FIELD_NUMBER: builtins.int
73
+ CACHED_MESSAGE_HASHES_FIELD_NUMBER: builtins.int
71
74
  CONTEXT_INFO_FIELD_NUMBER: builtins.int
72
75
  query_string: builtins.str
73
76
  page_script_hash: builtins.str
@@ -76,6 +79,12 @@ class ClientState(google.protobuf.message.Message):
76
79
  is_auto_rerun: builtins.bool
77
80
  @property
78
81
  def widget_states(self) -> streamlit.proto.WidgetStates_pb2.WidgetStates: ...
82
+ @property
83
+ def cached_message_hashes(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
84
+ """List of hashes of messages that are currently cached in
85
+ the frontend forward message cache.
86
+ """
87
+
79
88
  @property
80
89
  def context_info(self) -> global___ContextInfo: ...
81
90
  def __init__(
@@ -87,9 +96,10 @@ class ClientState(google.protobuf.message.Message):
87
96
  page_name: builtins.str = ...,
88
97
  fragment_id: builtins.str = ...,
89
98
  is_auto_rerun: builtins.bool = ...,
99
+ cached_message_hashes: collections.abc.Iterable[builtins.str] | None = ...,
90
100
  context_info: global___ContextInfo | None = ...,
91
101
  ) -> None: ...
92
102
  def HasField(self, field_name: typing.Literal["context_info", b"context_info", "widget_states", b"widget_states"]) -> builtins.bool: ...
93
- def ClearField(self, field_name: typing.Literal["context_info", b"context_info", "fragment_id", b"fragment_id", "is_auto_rerun", b"is_auto_rerun", "page_name", b"page_name", "page_script_hash", b"page_script_hash", "query_string", b"query_string", "widget_states", b"widget_states"]) -> None: ...
103
+ def ClearField(self, field_name: typing.Literal["cached_message_hashes", b"cached_message_hashes", "context_info", b"context_info", "fragment_id", b"fragment_id", "is_auto_rerun", b"is_auto_rerun", "page_name", b"page_name", "page_script_hash", b"page_script_hash", "query_string", b"query_string", "widget_states", b"widget_states"]) -> None: ...
94
104
 
95
105
  global___ClientState = ClientState
@@ -107,10 +107,9 @@ class ForwardMsg(google.protobuf.message.Message):
107
107
  """A hash that uniquely identifies this ForwardMsg, for caching."""
108
108
  script_finished: global___ForwardMsg.ScriptFinishedStatus.ValueType
109
109
  ref_hash: builtins.str
110
- """A reference to a ForwardMsg that has already been delivered.
111
- The client should substitute the message with the given hash
112
- for this one. If the client does not have the referenced message
113
- in its cache, it can retrieve it from the server.
110
+ """A reference to a ForwardMsg that has already been delivered
111
+ and cached in the frontend. The client should substitute the message
112
+ with the given hash for this one.
114
113
  """
115
114
  debug_last_backmsg_id: builtins.str
116
115
  """The ID of the last BackMsg that we received before sending this
@@ -214,9 +213,7 @@ class ForwardMsgMetadata(google.protobuf.message.Message):
214
213
  ELEMENT_DIMENSION_SPEC_FIELD_NUMBER: builtins.int
215
214
  ACTIVE_SCRIPT_HASH_FIELD_NUMBER: builtins.int
216
215
  cacheable: builtins.bool
217
- """If this is set, the server will have cached this message,
218
- and a client that receives it should do the same.
219
- """
216
+ """Marks a message as cacheable for the frontend."""
220
217
  active_script_hash: builtins.str
221
218
  """active_script_hash the forward message is associated from.
222
219
  For multipage apps v1, this will always be the page file running
@@ -383,12 +383,13 @@ class AppSession:
383
383
  self._client_state.context_info.CopyFrom(client_state.context_info)
384
384
 
385
385
  rerun_data = RerunData(
386
- client_state.query_string,
387
- client_state.widget_states,
388
- client_state.page_script_hash,
389
- client_state.page_name,
386
+ query_string=client_state.query_string,
387
+ widget_states=client_state.widget_states,
388
+ page_script_hash=client_state.page_script_hash,
389
+ page_name=client_state.page_name,
390
390
  fragment_id=fragment_id if fragment_id else None,
391
391
  is_auto_rerun=client_state.is_auto_rerun,
392
+ cached_message_hashes=set(client_state.cached_message_hashes),
392
393
  context_info=client_state.context_info,
393
394
  )
394
395
  else:
@@ -307,3 +307,17 @@ class ContextProxy:
307
307
  if ctx is None or ctx.context_info is None:
308
308
  return None
309
309
  return ctx.context_info.url
310
+
311
+ @property
312
+ @gather_metrics("context.ip_address")
313
+ def ip_address(self) -> str | None:
314
+ """The read-only IP address of the user's connection.
315
+ This should not be used for security measures as it can be easily spoofed.
316
+ """
317
+ session_client_request = _get_request()
318
+ if session_client_request is not None:
319
+ remote_ip = session_client_request.remote_ip
320
+ if remote_ip == "::1" or remote_ip == "127.0.0.1":
321
+ return None
322
+ return remote_ip
323
+ return None
@@ -14,23 +14,16 @@
14
14
 
15
15
  from __future__ import annotations
16
16
 
17
- from typing import TYPE_CHECKING, Final
18
- from weakref import WeakKeyDictionary
17
+ from typing import Final
19
18
 
20
19
  from streamlit import config, util
21
20
  from streamlit.logger import get_logger
22
21
  from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
23
- from streamlit.runtime.stats import CacheStat, CacheStatsProvider, group_stats
24
-
25
- if TYPE_CHECKING:
26
- from collections.abc import MutableMapping
27
-
28
- from streamlit.runtime.app_session import AppSession
29
22
 
30
23
  _LOGGER: Final = get_logger(__name__)
31
24
 
32
25
 
33
- def populate_hash_if_needed(msg: ForwardMsg) -> str:
26
+ def populate_hash_if_needed(msg: ForwardMsg) -> None:
34
27
  """Computes and assigns the unique hash for a ForwardMsg.
35
28
 
36
29
  If the ForwardMsg already has a hash, this is a no-op.
@@ -39,27 +32,38 @@ def populate_hash_if_needed(msg: ForwardMsg) -> str:
39
32
  ----------
40
33
  msg : ForwardMsg
41
34
 
42
- Returns
43
- -------
44
- string
45
- The message's hash, returned here for convenience. (The hash
46
- will also be assigned to the ForwardMsg; callers do not need
47
- to do this.)
48
-
49
35
  """
50
- if msg.hash == "":
36
+ if msg.hash == "" and msg.WhichOneof("type") not in {"ref_hash", "initialize"}:
51
37
  # Move the message's metadata aside. It's not part of the
52
38
  # hash calculation.
53
39
  metadata = msg.metadata
54
40
  msg.ClearField("metadata")
55
41
 
42
+ # Serialize the message to bytes using the deterministic serializer to
43
+ # ensure consistent hashing.
44
+ serialized_msg = msg.SerializeToString(deterministic=True)
45
+
46
+ # TODO(lukasmasuch): Evaluate more optimized hashing for larger messages:
47
+ # - Add the type element type and number of bytes to the hash.
48
+ # - Only hash the first N bytes of the message.
49
+
56
50
  # MD5 is good enough for what we need, which is uniqueness.
57
- msg.hash = util.calc_md5(msg.SerializeToString())
51
+ msg.hash = util.calc_md5(serialized_msg)
58
52
 
59
53
  # Restore metadata.
60
54
  msg.metadata.CopyFrom(metadata)
61
55
 
62
- return msg.hash
56
+ # Set cacheable flag if above the min cached size and if its a `new_element`
57
+ # delta. We only cache new_element and add_block deltas since container's
58
+ # are not expected to be larger than a few KB and have other side-effects
59
+ # to consider if cached. But `add_block` deltas should still get a hash.
60
+ # In case we ever allow other delta types to be cached, we should
61
+ # also need to adapt the composable logic in forward_msg_queue.
62
+ msg.metadata.cacheable = (
63
+ len(serialized_msg) >= int(config.get_option("global.minCachedMessageSize"))
64
+ and msg.WhichOneof("type") == "delta"
65
+ and msg.delta.WhichOneof("type") == "new_element"
66
+ )
63
67
 
64
68
 
65
69
  def create_reference_msg(msg: ForwardMsg) -> ForwardMsg:
@@ -80,217 +84,18 @@ def create_reference_msg(msg: ForwardMsg) -> ForwardMsg:
80
84
  ref_hash field.
81
85
 
82
86
  """
87
+ if not msg.hash:
88
+ _LOGGER.warning(
89
+ "Failed to create a reference message for a ForwardMsg since the "
90
+ "message does not have a hash. This is not expected to happen, "
91
+ "please report this as a bug. Falling back to the original message."
92
+ )
93
+ # Fallback to the original message if the hash is not set.
94
+ # This is not expected to happen.
95
+ return msg
96
+
83
97
  ref_msg = ForwardMsg()
84
- ref_msg.ref_hash = populate_hash_if_needed(msg)
98
+ ref_msg.ref_hash = msg.hash
85
99
  ref_msg.metadata.CopyFrom(msg.metadata)
100
+ ref_msg.metadata.cacheable = False
86
101
  return ref_msg
87
-
88
-
89
- class ForwardMsgCache(CacheStatsProvider):
90
- """A cache of ForwardMsgs.
91
-
92
- Large ForwardMsgs (e.g. those containing big DataFrame payloads) are
93
- stored in this cache. The server can choose to send a ForwardMsg's hash,
94
- rather than the message itself, to a client. Clients can then
95
- request messages from this cache via another endpoint.
96
-
97
- This cache is *not* thread safe. It's intended to only be accessed by
98
- the server thread.
99
-
100
- """
101
-
102
- class Entry:
103
- """Cache entry.
104
-
105
- Stores the cached message, and the set of AppSessions
106
- that we've sent the cached message to.
107
-
108
- """
109
-
110
- def __init__(self, msg: ForwardMsg | None):
111
- self.msg = msg
112
- self._session_script_run_counts: MutableMapping[AppSession, int] = (
113
- WeakKeyDictionary()
114
- )
115
-
116
- def __repr__(self) -> str:
117
- return util.repr_(self)
118
-
119
- def add_session_ref(self, session: AppSession, script_run_count: int) -> None:
120
- """Adds a reference to a AppSession that has referenced
121
- this Entry's message.
122
-
123
- Parameters
124
- ----------
125
- session : AppSession
126
- script_run_count : int
127
- The session's run count at the time of the call
128
-
129
- """
130
- prev_run_count = self._session_script_run_counts.get(session, 0)
131
- if script_run_count < prev_run_count:
132
- _LOGGER.error(
133
- "New script_run_count (%s) is < prev_run_count (%s). "
134
- "This should never happen!",
135
- script_run_count,
136
- prev_run_count,
137
- )
138
- script_run_count = prev_run_count
139
- self._session_script_run_counts[session] = script_run_count
140
-
141
- def has_session_ref(self, session: AppSession) -> bool:
142
- return session in self._session_script_run_counts
143
-
144
- def get_session_ref_age(
145
- self, session: AppSession, script_run_count: int
146
- ) -> int:
147
- """The age of the given session's reference to the Entry,
148
- given a new script_run_count.
149
-
150
- """
151
- return script_run_count - self._session_script_run_counts[session]
152
-
153
- def remove_session_ref(self, session: AppSession) -> None:
154
- del self._session_script_run_counts[session]
155
-
156
- def has_refs(self) -> bool:
157
- """True if this Entry has references from any AppSession.
158
-
159
- If not, it can be removed from the cache.
160
- """
161
- return len(self._session_script_run_counts) > 0
162
-
163
- def __init__(self):
164
- self._entries: dict[str, ForwardMsgCache.Entry] = {}
165
-
166
- def __repr__(self) -> str:
167
- return util.repr_(self)
168
-
169
- def add_message(
170
- self, msg: ForwardMsg, session: AppSession, script_run_count: int
171
- ) -> None:
172
- """Add a ForwardMsg to the cache.
173
-
174
- The cache will also record a reference to the given AppSession,
175
- so that it can track which sessions have already received
176
- each given ForwardMsg.
177
-
178
- Parameters
179
- ----------
180
- msg : ForwardMsg
181
- session : AppSession
182
- script_run_count : int
183
- The number of times the session's script has run
184
-
185
- """
186
- populate_hash_if_needed(msg)
187
- entry = self._entries.get(msg.hash, None)
188
- if entry is None:
189
- if config.get_option("global.storeCachedForwardMessagesInMemory"):
190
- entry = ForwardMsgCache.Entry(msg)
191
- else:
192
- entry = ForwardMsgCache.Entry(None)
193
- self._entries[msg.hash] = entry
194
- entry.add_session_ref(session, script_run_count)
195
-
196
- def get_message(self, hash: str) -> ForwardMsg | None:
197
- """Return the message with the given ID if it exists in the cache.
198
-
199
- Parameters
200
- ----------
201
- hash : str
202
- The id of the message to retrieve.
203
-
204
- Returns
205
- -------
206
- ForwardMsg | None
207
-
208
- """
209
- entry = self._entries.get(hash, None)
210
- return entry.msg if entry else None
211
-
212
- def has_message_reference(
213
- self, msg: ForwardMsg, session: AppSession, script_run_count: int
214
- ) -> bool:
215
- """Return True if a session has a reference to a message."""
216
- populate_hash_if_needed(msg)
217
-
218
- entry = self._entries.get(msg.hash, None)
219
- if entry is None or not entry.has_session_ref(session):
220
- return False
221
-
222
- # Ensure we're not expired
223
- age = entry.get_session_ref_age(session, script_run_count)
224
- return age <= int(config.get_option("global.maxCachedMessageAge"))
225
-
226
- def remove_refs_for_session(self, session: AppSession) -> None:
227
- """Remove refs for all entries for the given session.
228
-
229
- This should be called when an AppSession is disconnected or closed.
230
-
231
- Parameters
232
- ----------
233
- session : AppSession
234
- """
235
-
236
- # Operate on a copy of our entries dict.
237
- # We may be deleting from it.
238
- for msg_hash, entry in self._entries.copy().items():
239
- if entry.has_session_ref(session):
240
- entry.remove_session_ref(session)
241
-
242
- if not entry.has_refs():
243
- # The entry has no more references. Remove it from
244
- # the cache completely.
245
- del self._entries[msg_hash]
246
-
247
- def remove_expired_entries_for_session(
248
- self, session: AppSession, script_run_count: int
249
- ) -> None:
250
- """Remove any cached messages that have expired from the given session.
251
-
252
- This should be called each time a AppSession finishes executing.
253
-
254
- Parameters
255
- ----------
256
- session : AppSession
257
- script_run_count : int
258
- The number of times the session's script has run
259
-
260
- """
261
- max_age = config.get_option("global.maxCachedMessageAge")
262
-
263
- # Operate on a copy of our entries dict.
264
- # We may be deleting from it.
265
- for msg_hash, entry in self._entries.copy().items():
266
- if not entry.has_session_ref(session):
267
- continue
268
-
269
- age = entry.get_session_ref_age(session, script_run_count)
270
- if age > max_age:
271
- _LOGGER.debug(
272
- "Removing expired entry [session=%s, hash=%s, age=%s]",
273
- id(session),
274
- msg_hash,
275
- age,
276
- )
277
- entry.remove_session_ref(session)
278
- if not entry.has_refs():
279
- # The entry has no more references. Remove it from
280
- # the cache completely.
281
- del self._entries[msg_hash]
282
-
283
- def clear(self) -> None:
284
- """Remove all entries from the cache."""
285
- self._entries.clear()
286
-
287
- def get_stats(self) -> list[CacheStat]:
288
- stats: list[CacheStat] = [
289
- CacheStat(
290
- category_name="ForwardMessageCache",
291
- cache_name="",
292
- byte_length=entry.msg.ByteSize() if entry.msg is not None else 0,
293
- )
294
- for _, entry in self._entries.items()
295
- ]
296
- return group_stats(stats)
@@ -14,13 +14,10 @@
14
14
 
15
15
  from __future__ import annotations
16
16
 
17
- from typing import TYPE_CHECKING, Any, Callable
17
+ from typing import Any, Callable
18
18
 
19
19
  from streamlit.proto.ForwardMsg_pb2 import ForwardMsg
20
20
 
21
- if TYPE_CHECKING:
22
- from streamlit.proto.Delta_pb2 import Delta
23
-
24
21
 
25
22
  class ForwardMsgQueue:
26
23
  """Accumulates a session's outgoing ForwardMsgs.
@@ -79,16 +76,27 @@ class ForwardMsgQueue:
79
76
  # the app - we attempt to combine this new Delta into the old
80
77
  # one. This is an optimization that prevents redundant Deltas
81
78
  # from being sent to the frontend.
79
+ # One common case where this happens is with `st.write` since
80
+ # it uses a trick with `st.empty` to handle lists of args.
81
+ # Note: its not guaranteed that the optimization is always applied
82
+ # since the queue can be flushed to the browser at any time.
83
+ # For example:
84
+ # queue 1:
85
+ # empty [0, 0] <- skipped
86
+ # markdown [0, 0]
87
+ # empty [1, 0] <- send to frontend
88
+ #
89
+ # queue 2:
90
+ # markdown [1, 0]
91
+ # ...
92
+
82
93
  delta_key = tuple(msg.metadata.delta_path)
83
94
  if delta_key in self._delta_index_map:
84
95
  index = self._delta_index_map[delta_key]
85
96
  old_msg = self._queue[index]
86
- composed_delta = _maybe_compose_deltas(old_msg.delta, msg.delta)
87
- if composed_delta is not None:
88
- new_msg = ForwardMsg()
89
- new_msg.delta.CopyFrom(composed_delta)
90
- new_msg.metadata.CopyFrom(msg.metadata)
91
- self._queue[index] = new_msg
97
+ composed_msg = _maybe_compose_delta_msgs(old_msg, msg)
98
+ if composed_msg is not None:
99
+ self._queue[index] = composed_msg
92
100
  return
93
101
 
94
102
  # No composition occurred. Append this message to the queue, and
@@ -163,6 +171,11 @@ class ForwardMsgQueue:
163
171
 
164
172
  def _is_composable_message(msg: ForwardMsg) -> bool:
165
173
  """True if the ForwardMsg is potentially composable with other ForwardMsgs."""
174
+ if msg.HasField("ref_hash"):
175
+ # reference messages (cached in frontend) are always composable.
176
+ # Only new_element deltas can be reference messages.
177
+ return True
178
+
166
179
  if not msg.HasField("delta"):
167
180
  # Non-delta messages are never composable.
168
181
  return False
@@ -174,17 +187,20 @@ def _is_composable_message(msg: ForwardMsg) -> bool:
174
187
  return delta_type != "add_rows" and delta_type != "arrow_add_rows"
175
188
 
176
189
 
177
- def _maybe_compose_deltas(old_delta: Delta, new_delta: Delta) -> Delta | None:
178
- """Combines new_delta onto old_delta if possible.
190
+ def _maybe_compose_delta_msgs(
191
+ old_msg: ForwardMsg, new_msg: ForwardMsg
192
+ ) -> ForwardMsg | None:
193
+ """Optimization logic that composes new_msg onto old_msg if possible.
179
194
 
180
- If the combination takes place, the function returns a new Delta that
181
- should replace old_delta in the queue.
195
+ If the combination takes place, the function returns a new ForwardMsg that
196
+ should replace old_msg in the queue. This basically means that the old_msg
197
+ is not send to the browser since its considered unnecessary.
182
198
 
183
- If the new_delta is incompatible with old_delta, the function returns None.
184
- In this case, the new_delta should just be appended to the queue as normal.
199
+ If the new_msg is incompatible with old_msg, the function returns None.
200
+ In this case, the new_msg should just be appended to the queue as normal.
185
201
  """
186
- old_delta_type = old_delta.WhichOneof("type")
187
- if old_delta_type == "add_block":
202
+
203
+ if old_msg.HasField("delta") and old_msg.delta.WhichOneof("type") == "add_block":
188
204
  # We never replace add_block deltas, because blocks can have
189
205
  # other dependent deltas later in the queue. For example:
190
206
  #
@@ -200,12 +216,14 @@ def _maybe_compose_deltas(old_delta: Delta, new_delta: Delta) -> Delta | None:
200
216
  # now just an element, and not a block.
201
217
  return None
202
218
 
203
- new_delta_type = new_delta.WhichOneof("type")
204
- if new_delta_type == "new_element":
205
- return new_delta
219
+ if new_msg.HasField("ref_hash"):
220
+ # ref_hash messages are always composable.
221
+ # Only new_element deltas can be reference messages.
222
+ return new_msg
206
223
 
207
- if new_delta_type == "add_block":
208
- return new_delta
224
+ new_delta_type = new_msg.delta.WhichOneof("type")
225
+ if new_delta_type == "new_element" or new_delta_type == "add_block":
226
+ return new_msg
209
227
 
210
228
  return None
211
229