matrix-synapse 1.143.0rc2__cp310-abi3-manylinux_2_28_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (1058) hide show
  1. matrix_synapse-1.143.0rc2.dist-info/AUTHORS.rst +51 -0
  2. matrix_synapse-1.143.0rc2.dist-info/LICENSE-AGPL-3.0 +661 -0
  3. matrix_synapse-1.143.0rc2.dist-info/LICENSE-COMMERCIAL +6 -0
  4. matrix_synapse-1.143.0rc2.dist-info/METADATA +385 -0
  5. matrix_synapse-1.143.0rc2.dist-info/RECORD +1058 -0
  6. matrix_synapse-1.143.0rc2.dist-info/WHEEL +4 -0
  7. matrix_synapse-1.143.0rc2.dist-info/entry_points.txt +14 -0
  8. synapse/__init__.py +97 -0
  9. synapse/_scripts/__init__.py +0 -0
  10. synapse/_scripts/export_signing_key.py +109 -0
  11. synapse/_scripts/generate_config.py +83 -0
  12. synapse/_scripts/generate_log_config.py +56 -0
  13. synapse/_scripts/generate_signing_key.py +55 -0
  14. synapse/_scripts/generate_workers_map.py +318 -0
  15. synapse/_scripts/hash_password.py +95 -0
  16. synapse/_scripts/move_remote_media_to_new_store.py +128 -0
  17. synapse/_scripts/register_new_matrix_user.py +402 -0
  18. synapse/_scripts/review_recent_signups.py +212 -0
  19. synapse/_scripts/synapse_port_db.py +1604 -0
  20. synapse/_scripts/synctl.py +365 -0
  21. synapse/_scripts/update_synapse_database.py +130 -0
  22. synapse/api/__init__.py +20 -0
  23. synapse/api/auth/__init__.py +207 -0
  24. synapse/api/auth/base.py +406 -0
  25. synapse/api/auth/internal.py +299 -0
  26. synapse/api/auth/mas.py +436 -0
  27. synapse/api/auth/msc3861_delegated.py +617 -0
  28. synapse/api/auth_blocking.py +144 -0
  29. synapse/api/constants.py +362 -0
  30. synapse/api/errors.py +907 -0
  31. synapse/api/filtering.py +537 -0
  32. synapse/api/presence.py +102 -0
  33. synapse/api/ratelimiting.py +480 -0
  34. synapse/api/room_versions.py +535 -0
  35. synapse/api/urls.py +118 -0
  36. synapse/app/__init__.py +60 -0
  37. synapse/app/_base.py +862 -0
  38. synapse/app/admin_cmd.py +388 -0
  39. synapse/app/appservice.py +30 -0
  40. synapse/app/client_reader.py +30 -0
  41. synapse/app/complement_fork_starter.py +206 -0
  42. synapse/app/event_creator.py +29 -0
  43. synapse/app/federation_reader.py +30 -0
  44. synapse/app/federation_sender.py +30 -0
  45. synapse/app/frontend_proxy.py +30 -0
  46. synapse/app/generic_worker.py +474 -0
  47. synapse/app/homeserver.py +505 -0
  48. synapse/app/media_repository.py +30 -0
  49. synapse/app/phone_stats_home.py +296 -0
  50. synapse/app/pusher.py +30 -0
  51. synapse/app/synchrotron.py +30 -0
  52. synapse/app/user_dir.py +31 -0
  53. synapse/appservice/__init__.py +458 -0
  54. synapse/appservice/api.py +567 -0
  55. synapse/appservice/scheduler.py +564 -0
  56. synapse/config/__init__.py +27 -0
  57. synapse/config/__main__.py +62 -0
  58. synapse/config/_base.py +1106 -0
  59. synapse/config/_base.pyi +215 -0
  60. synapse/config/_util.py +99 -0
  61. synapse/config/account_validity.py +116 -0
  62. synapse/config/api.py +141 -0
  63. synapse/config/appservice.py +210 -0
  64. synapse/config/auth.py +80 -0
  65. synapse/config/auto_accept_invites.py +43 -0
  66. synapse/config/background_updates.py +44 -0
  67. synapse/config/cache.py +231 -0
  68. synapse/config/captcha.py +90 -0
  69. synapse/config/cas.py +116 -0
  70. synapse/config/consent.py +73 -0
  71. synapse/config/database.py +184 -0
  72. synapse/config/emailconfig.py +367 -0
  73. synapse/config/experimental.py +595 -0
  74. synapse/config/federation.py +114 -0
  75. synapse/config/homeserver.py +141 -0
  76. synapse/config/jwt.py +55 -0
  77. synapse/config/key.py +447 -0
  78. synapse/config/logger.py +390 -0
  79. synapse/config/mas.py +192 -0
  80. synapse/config/matrixrtc.py +66 -0
  81. synapse/config/metrics.py +84 -0
  82. synapse/config/modules.py +40 -0
  83. synapse/config/oembed.py +185 -0
  84. synapse/config/oidc.py +509 -0
  85. synapse/config/password_auth_providers.py +82 -0
  86. synapse/config/push.py +64 -0
  87. synapse/config/ratelimiting.py +254 -0
  88. synapse/config/redis.py +74 -0
  89. synapse/config/registration.py +296 -0
  90. synapse/config/repository.py +311 -0
  91. synapse/config/retention.py +162 -0
  92. synapse/config/room.py +88 -0
  93. synapse/config/room_directory.py +165 -0
  94. synapse/config/saml2.py +251 -0
  95. synapse/config/server.py +1170 -0
  96. synapse/config/server_notices.py +84 -0
  97. synapse/config/spam_checker.py +66 -0
  98. synapse/config/sso.py +121 -0
  99. synapse/config/stats.py +54 -0
  100. synapse/config/third_party_event_rules.py +40 -0
  101. synapse/config/tls.py +192 -0
  102. synapse/config/tracer.py +71 -0
  103. synapse/config/user_directory.py +47 -0
  104. synapse/config/user_types.py +42 -0
  105. synapse/config/voip.py +59 -0
  106. synapse/config/workers.py +642 -0
  107. synapse/crypto/__init__.py +20 -0
  108. synapse/crypto/context_factory.py +278 -0
  109. synapse/crypto/event_signing.py +194 -0
  110. synapse/crypto/keyring.py +931 -0
  111. synapse/event_auth.py +1266 -0
  112. synapse/events/__init__.py +667 -0
  113. synapse/events/auto_accept_invites.py +216 -0
  114. synapse/events/builder.py +387 -0
  115. synapse/events/presence_router.py +243 -0
  116. synapse/events/snapshot.py +559 -0
  117. synapse/events/utils.py +924 -0
  118. synapse/events/validator.py +305 -0
  119. synapse/federation/__init__.py +22 -0
  120. synapse/federation/federation_base.py +382 -0
  121. synapse/federation/federation_client.py +2132 -0
  122. synapse/federation/federation_server.py +1540 -0
  123. synapse/federation/persistence.py +70 -0
  124. synapse/federation/send_queue.py +531 -0
  125. synapse/federation/sender/__init__.py +1164 -0
  126. synapse/federation/sender/per_destination_queue.py +886 -0
  127. synapse/federation/sender/transaction_manager.py +210 -0
  128. synapse/federation/transport/__init__.py +28 -0
  129. synapse/federation/transport/client.py +1199 -0
  130. synapse/federation/transport/server/__init__.py +334 -0
  131. synapse/federation/transport/server/_base.py +429 -0
  132. synapse/federation/transport/server/federation.py +910 -0
  133. synapse/federation/units.py +133 -0
  134. synapse/handlers/__init__.py +20 -0
  135. synapse/handlers/account.py +162 -0
  136. synapse/handlers/account_data.py +360 -0
  137. synapse/handlers/account_validity.py +361 -0
  138. synapse/handlers/admin.py +615 -0
  139. synapse/handlers/appservice.py +989 -0
  140. synapse/handlers/auth.py +2481 -0
  141. synapse/handlers/cas.py +413 -0
  142. synapse/handlers/deactivate_account.py +363 -0
  143. synapse/handlers/delayed_events.py +599 -0
  144. synapse/handlers/device.py +1870 -0
  145. synapse/handlers/devicemessage.py +399 -0
  146. synapse/handlers/directory.py +545 -0
  147. synapse/handlers/e2e_keys.py +1834 -0
  148. synapse/handlers/e2e_room_keys.py +455 -0
  149. synapse/handlers/event_auth.py +390 -0
  150. synapse/handlers/events.py +201 -0
  151. synapse/handlers/federation.py +2039 -0
  152. synapse/handlers/federation_event.py +2419 -0
  153. synapse/handlers/identity.py +812 -0
  154. synapse/handlers/initial_sync.py +528 -0
  155. synapse/handlers/jwt.py +120 -0
  156. synapse/handlers/message.py +2347 -0
  157. synapse/handlers/oidc.py +1801 -0
  158. synapse/handlers/pagination.py +768 -0
  159. synapse/handlers/password_policy.py +102 -0
  160. synapse/handlers/presence.py +2633 -0
  161. synapse/handlers/profile.py +655 -0
  162. synapse/handlers/push_rules.py +164 -0
  163. synapse/handlers/read_marker.py +79 -0
  164. synapse/handlers/receipts.py +351 -0
  165. synapse/handlers/register.py +1059 -0
  166. synapse/handlers/relations.py +623 -0
  167. synapse/handlers/reports.py +98 -0
  168. synapse/handlers/room.py +2448 -0
  169. synapse/handlers/room_list.py +632 -0
  170. synapse/handlers/room_member.py +2365 -0
  171. synapse/handlers/room_member_worker.py +146 -0
  172. synapse/handlers/room_policy.py +186 -0
  173. synapse/handlers/room_summary.py +1057 -0
  174. synapse/handlers/saml.py +524 -0
  175. synapse/handlers/search.py +723 -0
  176. synapse/handlers/send_email.py +209 -0
  177. synapse/handlers/set_password.py +71 -0
  178. synapse/handlers/sliding_sync/__init__.py +1701 -0
  179. synapse/handlers/sliding_sync/extensions.py +969 -0
  180. synapse/handlers/sliding_sync/room_lists.py +2262 -0
  181. synapse/handlers/sliding_sync/store.py +128 -0
  182. synapse/handlers/sso.py +1291 -0
  183. synapse/handlers/state_deltas.py +82 -0
  184. synapse/handlers/stats.py +321 -0
  185. synapse/handlers/sync.py +3106 -0
  186. synapse/handlers/thread_subscriptions.py +190 -0
  187. synapse/handlers/typing.py +606 -0
  188. synapse/handlers/ui_auth/__init__.py +48 -0
  189. synapse/handlers/ui_auth/checkers.py +332 -0
  190. synapse/handlers/user_directory.py +783 -0
  191. synapse/handlers/worker_lock.py +371 -0
  192. synapse/http/__init__.py +105 -0
  193. synapse/http/additional_resource.py +62 -0
  194. synapse/http/client.py +1373 -0
  195. synapse/http/connectproxyclient.py +316 -0
  196. synapse/http/federation/__init__.py +19 -0
  197. synapse/http/federation/matrix_federation_agent.py +490 -0
  198. synapse/http/federation/srv_resolver.py +196 -0
  199. synapse/http/federation/well_known_resolver.py +367 -0
  200. synapse/http/matrixfederationclient.py +1873 -0
  201. synapse/http/proxy.py +290 -0
  202. synapse/http/proxyagent.py +497 -0
  203. synapse/http/replicationagent.py +202 -0
  204. synapse/http/request_metrics.py +309 -0
  205. synapse/http/server.py +1110 -0
  206. synapse/http/servlet.py +1018 -0
  207. synapse/http/site.py +825 -0
  208. synapse/http/types.py +27 -0
  209. synapse/logging/__init__.py +31 -0
  210. synapse/logging/_remote.py +261 -0
  211. synapse/logging/_terse_json.py +95 -0
  212. synapse/logging/context.py +1209 -0
  213. synapse/logging/formatter.py +62 -0
  214. synapse/logging/handlers.py +99 -0
  215. synapse/logging/loggers.py +25 -0
  216. synapse/logging/opentracing.py +1132 -0
  217. synapse/logging/scopecontextmanager.py +160 -0
  218. synapse/media/_base.py +830 -0
  219. synapse/media/filepath.py +417 -0
  220. synapse/media/media_repository.py +1580 -0
  221. synapse/media/media_storage.py +702 -0
  222. synapse/media/oembed.py +277 -0
  223. synapse/media/preview_html.py +556 -0
  224. synapse/media/storage_provider.py +195 -0
  225. synapse/media/thumbnailer.py +833 -0
  226. synapse/media/url_previewer.py +875 -0
  227. synapse/metrics/__init__.py +748 -0
  228. synapse/metrics/_gc.py +219 -0
  229. synapse/metrics/_reactor_metrics.py +171 -0
  230. synapse/metrics/_types.py +38 -0
  231. synapse/metrics/background_process_metrics.py +555 -0
  232. synapse/metrics/common_usage_metrics.py +94 -0
  233. synapse/metrics/jemalloc.py +248 -0
  234. synapse/module_api/__init__.py +2131 -0
  235. synapse/module_api/callbacks/__init__.py +50 -0
  236. synapse/module_api/callbacks/account_validity_callbacks.py +106 -0
  237. synapse/module_api/callbacks/media_repository_callbacks.py +157 -0
  238. synapse/module_api/callbacks/ratelimit_callbacks.py +78 -0
  239. synapse/module_api/callbacks/spamchecker_callbacks.py +991 -0
  240. synapse/module_api/callbacks/third_party_event_rules_callbacks.py +592 -0
  241. synapse/module_api/errors.py +42 -0
  242. synapse/notifier.py +970 -0
  243. synapse/push/__init__.py +212 -0
  244. synapse/push/bulk_push_rule_evaluator.py +635 -0
  245. synapse/push/clientformat.py +126 -0
  246. synapse/push/emailpusher.py +333 -0
  247. synapse/push/httppusher.py +564 -0
  248. synapse/push/mailer.py +1010 -0
  249. synapse/push/presentable_names.py +216 -0
  250. synapse/push/push_tools.py +114 -0
  251. synapse/push/push_types.py +141 -0
  252. synapse/push/pusher.py +87 -0
  253. synapse/push/pusherpool.py +501 -0
  254. synapse/push/rulekinds.py +33 -0
  255. synapse/py.typed +0 -0
  256. synapse/replication/__init__.py +20 -0
  257. synapse/replication/http/__init__.py +68 -0
  258. synapse/replication/http/_base.py +468 -0
  259. synapse/replication/http/account_data.py +297 -0
  260. synapse/replication/http/deactivate_account.py +81 -0
  261. synapse/replication/http/delayed_events.py +62 -0
  262. synapse/replication/http/devices.py +254 -0
  263. synapse/replication/http/federation.py +334 -0
  264. synapse/replication/http/login.py +106 -0
  265. synapse/replication/http/membership.py +364 -0
  266. synapse/replication/http/presence.py +133 -0
  267. synapse/replication/http/push.py +156 -0
  268. synapse/replication/http/register.py +172 -0
  269. synapse/replication/http/send_events.py +182 -0
  270. synapse/replication/http/state.py +82 -0
  271. synapse/replication/http/streams.py +101 -0
  272. synapse/replication/tcp/__init__.py +56 -0
  273. synapse/replication/tcp/client.py +552 -0
  274. synapse/replication/tcp/commands.py +569 -0
  275. synapse/replication/tcp/context.py +41 -0
  276. synapse/replication/tcp/external_cache.py +156 -0
  277. synapse/replication/tcp/handler.py +922 -0
  278. synapse/replication/tcp/protocol.py +608 -0
  279. synapse/replication/tcp/redis.py +509 -0
  280. synapse/replication/tcp/resource.py +348 -0
  281. synapse/replication/tcp/streams/__init__.py +96 -0
  282. synapse/replication/tcp/streams/_base.py +765 -0
  283. synapse/replication/tcp/streams/events.py +287 -0
  284. synapse/replication/tcp/streams/federation.py +92 -0
  285. synapse/replication/tcp/streams/partial_state.py +80 -0
  286. synapse/res/providers.json +29 -0
  287. synapse/res/templates/_base.html +29 -0
  288. synapse/res/templates/account_previously_renewed.html +6 -0
  289. synapse/res/templates/account_renewed.html +6 -0
  290. synapse/res/templates/add_threepid.html +8 -0
  291. synapse/res/templates/add_threepid.txt +6 -0
  292. synapse/res/templates/add_threepid_failure.html +7 -0
  293. synapse/res/templates/add_threepid_success.html +6 -0
  294. synapse/res/templates/already_in_use.html +12 -0
  295. synapse/res/templates/already_in_use.txt +10 -0
  296. synapse/res/templates/auth_success.html +21 -0
  297. synapse/res/templates/invalid_token.html +6 -0
  298. synapse/res/templates/mail-Element.css +7 -0
  299. synapse/res/templates/mail-Vector.css +7 -0
  300. synapse/res/templates/mail-expiry.css +4 -0
  301. synapse/res/templates/mail.css +156 -0
  302. synapse/res/templates/notice_expiry.html +46 -0
  303. synapse/res/templates/notice_expiry.txt +7 -0
  304. synapse/res/templates/notif.html +51 -0
  305. synapse/res/templates/notif.txt +22 -0
  306. synapse/res/templates/notif_mail.html +59 -0
  307. synapse/res/templates/notif_mail.txt +10 -0
  308. synapse/res/templates/password_reset.html +10 -0
  309. synapse/res/templates/password_reset.txt +7 -0
  310. synapse/res/templates/password_reset_confirmation.html +15 -0
  311. synapse/res/templates/password_reset_failure.html +7 -0
  312. synapse/res/templates/password_reset_success.html +6 -0
  313. synapse/res/templates/recaptcha.html +42 -0
  314. synapse/res/templates/registration.html +12 -0
  315. synapse/res/templates/registration.txt +10 -0
  316. synapse/res/templates/registration_failure.html +6 -0
  317. synapse/res/templates/registration_success.html +6 -0
  318. synapse/res/templates/registration_token.html +18 -0
  319. synapse/res/templates/room.html +33 -0
  320. synapse/res/templates/room.txt +9 -0
  321. synapse/res/templates/sso.css +129 -0
  322. synapse/res/templates/sso_account_deactivated.html +25 -0
  323. synapse/res/templates/sso_auth_account_details.html +186 -0
  324. synapse/res/templates/sso_auth_account_details.js +116 -0
  325. synapse/res/templates/sso_auth_bad_user.html +26 -0
  326. synapse/res/templates/sso_auth_confirm.html +27 -0
  327. synapse/res/templates/sso_auth_success.html +26 -0
  328. synapse/res/templates/sso_error.html +71 -0
  329. synapse/res/templates/sso_footer.html +19 -0
  330. synapse/res/templates/sso_login_idp_picker.html +60 -0
  331. synapse/res/templates/sso_new_user_consent.html +30 -0
  332. synapse/res/templates/sso_partial_profile.html +19 -0
  333. synapse/res/templates/sso_redirect_confirm.html +39 -0
  334. synapse/res/templates/style.css +33 -0
  335. synapse/res/templates/terms.html +27 -0
  336. synapse/rest/__init__.py +197 -0
  337. synapse/rest/admin/__init__.py +390 -0
  338. synapse/rest/admin/_base.py +72 -0
  339. synapse/rest/admin/background_updates.py +171 -0
  340. synapse/rest/admin/devices.py +221 -0
  341. synapse/rest/admin/event_reports.py +173 -0
  342. synapse/rest/admin/events.py +69 -0
  343. synapse/rest/admin/experimental_features.py +137 -0
  344. synapse/rest/admin/federation.py +243 -0
  345. synapse/rest/admin/media.py +540 -0
  346. synapse/rest/admin/registration_tokens.py +358 -0
  347. synapse/rest/admin/rooms.py +1061 -0
  348. synapse/rest/admin/scheduled_tasks.py +70 -0
  349. synapse/rest/admin/server_notice_servlet.py +132 -0
  350. synapse/rest/admin/statistics.py +132 -0
  351. synapse/rest/admin/username_available.py +58 -0
  352. synapse/rest/admin/users.py +1606 -0
  353. synapse/rest/client/__init__.py +20 -0
  354. synapse/rest/client/_base.py +113 -0
  355. synapse/rest/client/account.py +930 -0
  356. synapse/rest/client/account_data.py +319 -0
  357. synapse/rest/client/account_validity.py +103 -0
  358. synapse/rest/client/appservice_ping.py +125 -0
  359. synapse/rest/client/auth.py +218 -0
  360. synapse/rest/client/auth_metadata.py +122 -0
  361. synapse/rest/client/capabilities.py +121 -0
  362. synapse/rest/client/delayed_events.py +165 -0
  363. synapse/rest/client/devices.py +587 -0
  364. synapse/rest/client/directory.py +211 -0
  365. synapse/rest/client/events.py +116 -0
  366. synapse/rest/client/filter.py +112 -0
  367. synapse/rest/client/initial_sync.py +65 -0
  368. synapse/rest/client/keys.py +678 -0
  369. synapse/rest/client/knock.py +104 -0
  370. synapse/rest/client/login.py +750 -0
  371. synapse/rest/client/login_token_request.py +127 -0
  372. synapse/rest/client/logout.py +93 -0
  373. synapse/rest/client/matrixrtc.py +52 -0
  374. synapse/rest/client/media.py +285 -0
  375. synapse/rest/client/mutual_rooms.py +93 -0
  376. synapse/rest/client/notifications.py +137 -0
  377. synapse/rest/client/openid.py +109 -0
  378. synapse/rest/client/password_policy.py +69 -0
  379. synapse/rest/client/presence.py +131 -0
  380. synapse/rest/client/profile.py +291 -0
  381. synapse/rest/client/push_rule.py +331 -0
  382. synapse/rest/client/pusher.py +181 -0
  383. synapse/rest/client/read_marker.py +104 -0
  384. synapse/rest/client/receipts.py +165 -0
  385. synapse/rest/client/register.py +1067 -0
  386. synapse/rest/client/relations.py +138 -0
  387. synapse/rest/client/rendezvous.py +76 -0
  388. synapse/rest/client/reporting.py +207 -0
  389. synapse/rest/client/room.py +1669 -0
  390. synapse/rest/client/room_keys.py +426 -0
  391. synapse/rest/client/room_upgrade_rest_servlet.py +112 -0
  392. synapse/rest/client/sendtodevice.py +85 -0
  393. synapse/rest/client/sync.py +1131 -0
  394. synapse/rest/client/tags.py +129 -0
  395. synapse/rest/client/thirdparty.py +130 -0
  396. synapse/rest/client/thread_subscriptions.py +247 -0
  397. synapse/rest/client/tokenrefresh.py +52 -0
  398. synapse/rest/client/transactions.py +149 -0
  399. synapse/rest/client/user_directory.py +90 -0
  400. synapse/rest/client/versions.py +191 -0
  401. synapse/rest/client/voip.py +88 -0
  402. synapse/rest/consent/__init__.py +0 -0
  403. synapse/rest/consent/consent_resource.py +210 -0
  404. synapse/rest/health.py +38 -0
  405. synapse/rest/key/__init__.py +20 -0
  406. synapse/rest/key/v2/__init__.py +40 -0
  407. synapse/rest/key/v2/local_key_resource.py +125 -0
  408. synapse/rest/key/v2/remote_key_resource.py +302 -0
  409. synapse/rest/media/__init__.py +0 -0
  410. synapse/rest/media/config_resource.py +53 -0
  411. synapse/rest/media/create_resource.py +90 -0
  412. synapse/rest/media/download_resource.py +110 -0
  413. synapse/rest/media/media_repository_resource.py +113 -0
  414. synapse/rest/media/preview_url_resource.py +77 -0
  415. synapse/rest/media/thumbnail_resource.py +142 -0
  416. synapse/rest/media/upload_resource.py +187 -0
  417. synapse/rest/media/v1/__init__.py +39 -0
  418. synapse/rest/media/v1/_base.py +23 -0
  419. synapse/rest/media/v1/media_storage.py +23 -0
  420. synapse/rest/media/v1/storage_provider.py +23 -0
  421. synapse/rest/synapse/__init__.py +20 -0
  422. synapse/rest/synapse/client/__init__.py +93 -0
  423. synapse/rest/synapse/client/federation_whitelist.py +66 -0
  424. synapse/rest/synapse/client/jwks.py +77 -0
  425. synapse/rest/synapse/client/new_user_consent.py +115 -0
  426. synapse/rest/synapse/client/oidc/__init__.py +45 -0
  427. synapse/rest/synapse/client/oidc/backchannel_logout_resource.py +42 -0
  428. synapse/rest/synapse/client/oidc/callback_resource.py +48 -0
  429. synapse/rest/synapse/client/password_reset.py +129 -0
  430. synapse/rest/synapse/client/pick_idp.py +107 -0
  431. synapse/rest/synapse/client/pick_username.py +153 -0
  432. synapse/rest/synapse/client/rendezvous.py +58 -0
  433. synapse/rest/synapse/client/saml2/__init__.py +42 -0
  434. synapse/rest/synapse/client/saml2/metadata_resource.py +46 -0
  435. synapse/rest/synapse/client/saml2/response_resource.py +52 -0
  436. synapse/rest/synapse/client/sso_register.py +56 -0
  437. synapse/rest/synapse/client/unsubscribe.py +88 -0
  438. synapse/rest/synapse/mas/__init__.py +71 -0
  439. synapse/rest/synapse/mas/_base.py +55 -0
  440. synapse/rest/synapse/mas/devices.py +239 -0
  441. synapse/rest/synapse/mas/users.py +469 -0
  442. synapse/rest/well_known.py +148 -0
  443. synapse/server.py +1257 -0
  444. synapse/server_notices/__init__.py +0 -0
  445. synapse/server_notices/consent_server_notices.py +136 -0
  446. synapse/server_notices/resource_limits_server_notices.py +215 -0
  447. synapse/server_notices/server_notices_manager.py +388 -0
  448. synapse/server_notices/server_notices_sender.py +67 -0
  449. synapse/server_notices/worker_server_notices_sender.py +46 -0
  450. synapse/spam_checker_api/__init__.py +31 -0
  451. synapse/state/__init__.py +1022 -0
  452. synapse/state/v1.py +369 -0
  453. synapse/state/v2.py +984 -0
  454. synapse/static/client/login/index.html +47 -0
  455. synapse/static/client/login/js/jquery-3.4.1.min.js +2 -0
  456. synapse/static/client/login/js/login.js +291 -0
  457. synapse/static/client/login/spinner.gif +0 -0
  458. synapse/static/client/login/style.css +79 -0
  459. synapse/static/index.html +63 -0
  460. synapse/storage/__init__.py +43 -0
  461. synapse/storage/_base.py +245 -0
  462. synapse/storage/admin_client_config.py +25 -0
  463. synapse/storage/background_updates.py +1188 -0
  464. synapse/storage/controllers/__init__.py +57 -0
  465. synapse/storage/controllers/persist_events.py +1237 -0
  466. synapse/storage/controllers/purge_events.py +455 -0
  467. synapse/storage/controllers/state.py +950 -0
  468. synapse/storage/controllers/stats.py +119 -0
  469. synapse/storage/database.py +2719 -0
  470. synapse/storage/databases/__init__.py +175 -0
  471. synapse/storage/databases/main/__init__.py +420 -0
  472. synapse/storage/databases/main/account_data.py +1059 -0
  473. synapse/storage/databases/main/appservice.py +473 -0
  474. synapse/storage/databases/main/cache.py +911 -0
  475. synapse/storage/databases/main/censor_events.py +225 -0
  476. synapse/storage/databases/main/client_ips.py +815 -0
  477. synapse/storage/databases/main/delayed_events.py +562 -0
  478. synapse/storage/databases/main/deviceinbox.py +1271 -0
  479. synapse/storage/databases/main/devices.py +2578 -0
  480. synapse/storage/databases/main/directory.py +212 -0
  481. synapse/storage/databases/main/e2e_room_keys.py +689 -0
  482. synapse/storage/databases/main/end_to_end_keys.py +1894 -0
  483. synapse/storage/databases/main/event_federation.py +2508 -0
  484. synapse/storage/databases/main/event_push_actions.py +1933 -0
  485. synapse/storage/databases/main/events.py +3765 -0
  486. synapse/storage/databases/main/events_bg_updates.py +2910 -0
  487. synapse/storage/databases/main/events_forward_extremities.py +126 -0
  488. synapse/storage/databases/main/events_worker.py +2786 -0
  489. synapse/storage/databases/main/experimental_features.py +130 -0
  490. synapse/storage/databases/main/filtering.py +231 -0
  491. synapse/storage/databases/main/keys.py +291 -0
  492. synapse/storage/databases/main/lock.py +553 -0
  493. synapse/storage/databases/main/media_repository.py +1068 -0
  494. synapse/storage/databases/main/metrics.py +460 -0
  495. synapse/storage/databases/main/monthly_active_users.py +443 -0
  496. synapse/storage/databases/main/openid.py +60 -0
  497. synapse/storage/databases/main/presence.py +509 -0
  498. synapse/storage/databases/main/profile.py +539 -0
  499. synapse/storage/databases/main/purge_events.py +521 -0
  500. synapse/storage/databases/main/push_rule.py +970 -0
  501. synapse/storage/databases/main/pusher.py +793 -0
  502. synapse/storage/databases/main/receipts.py +1341 -0
  503. synapse/storage/databases/main/registration.py +3072 -0
  504. synapse/storage/databases/main/rejections.py +37 -0
  505. synapse/storage/databases/main/relations.py +1116 -0
  506. synapse/storage/databases/main/room.py +2779 -0
  507. synapse/storage/databases/main/roommember.py +2110 -0
  508. synapse/storage/databases/main/search.py +939 -0
  509. synapse/storage/databases/main/session.py +151 -0
  510. synapse/storage/databases/main/signatures.py +94 -0
  511. synapse/storage/databases/main/sliding_sync.py +603 -0
  512. synapse/storage/databases/main/state.py +1002 -0
  513. synapse/storage/databases/main/state_deltas.py +329 -0
  514. synapse/storage/databases/main/stats.py +789 -0
  515. synapse/storage/databases/main/stream.py +2577 -0
  516. synapse/storage/databases/main/tags.py +360 -0
  517. synapse/storage/databases/main/task_scheduler.py +225 -0
  518. synapse/storage/databases/main/thread_subscriptions.py +589 -0
  519. synapse/storage/databases/main/transactions.py +675 -0
  520. synapse/storage/databases/main/ui_auth.py +420 -0
  521. synapse/storage/databases/main/user_directory.py +1330 -0
  522. synapse/storage/databases/main/user_erasure_store.py +117 -0
  523. synapse/storage/databases/state/__init__.py +22 -0
  524. synapse/storage/databases/state/bg_updates.py +497 -0
  525. synapse/storage/databases/state/deletion.py +557 -0
  526. synapse/storage/databases/state/store.py +948 -0
  527. synapse/storage/engines/__init__.py +70 -0
  528. synapse/storage/engines/_base.py +154 -0
  529. synapse/storage/engines/postgres.py +261 -0
  530. synapse/storage/engines/sqlite.py +199 -0
  531. synapse/storage/invite_rule.py +112 -0
  532. synapse/storage/keys.py +40 -0
  533. synapse/storage/prepare_database.py +730 -0
  534. synapse/storage/push_rule.py +28 -0
  535. synapse/storage/roommember.py +88 -0
  536. synapse/storage/schema/README.md +4 -0
  537. synapse/storage/schema/__init__.py +186 -0
  538. synapse/storage/schema/common/delta/25/00background_updates.sql +40 -0
  539. synapse/storage/schema/common/delta/35/00background_updates_add_col.sql +36 -0
  540. synapse/storage/schema/common/delta/58/00background_update_ordering.sql +38 -0
  541. synapse/storage/schema/common/full_schemas/72/full.sql.postgres +8 -0
  542. synapse/storage/schema/common/full_schemas/72/full.sql.sqlite +6 -0
  543. synapse/storage/schema/common/schema_version.sql +60 -0
  544. synapse/storage/schema/main/delta/12/v12.sql +82 -0
  545. synapse/storage/schema/main/delta/13/v13.sql +38 -0
  546. synapse/storage/schema/main/delta/14/v14.sql +42 -0
  547. synapse/storage/schema/main/delta/15/appservice_txns.sql +50 -0
  548. synapse/storage/schema/main/delta/15/presence_indices.sql +2 -0
  549. synapse/storage/schema/main/delta/15/v15.sql +24 -0
  550. synapse/storage/schema/main/delta/16/events_order_index.sql +4 -0
  551. synapse/storage/schema/main/delta/16/remote_media_cache_index.sql +2 -0
  552. synapse/storage/schema/main/delta/16/remove_duplicates.sql +9 -0
  553. synapse/storage/schema/main/delta/16/room_alias_index.sql +3 -0
  554. synapse/storage/schema/main/delta/16/unique_constraints.sql +72 -0
  555. synapse/storage/schema/main/delta/16/users.sql +56 -0
  556. synapse/storage/schema/main/delta/17/drop_indexes.sql +37 -0
  557. synapse/storage/schema/main/delta/17/server_keys.sql +43 -0
  558. synapse/storage/schema/main/delta/17/user_threepids.sql +9 -0
  559. synapse/storage/schema/main/delta/18/server_keys_bigger_ints.sql +51 -0
  560. synapse/storage/schema/main/delta/19/event_index.sql +38 -0
  561. synapse/storage/schema/main/delta/20/dummy.sql +1 -0
  562. synapse/storage/schema/main/delta/20/pushers.py +93 -0
  563. synapse/storage/schema/main/delta/21/end_to_end_keys.sql +53 -0
  564. synapse/storage/schema/main/delta/21/receipts.sql +57 -0
  565. synapse/storage/schema/main/delta/22/receipts_index.sql +41 -0
  566. synapse/storage/schema/main/delta/22/user_threepids_unique.sql +19 -0
  567. synapse/storage/schema/main/delta/24/stats_reporting.sql +37 -0
  568. synapse/storage/schema/main/delta/25/fts.py +81 -0
  569. synapse/storage/schema/main/delta/25/guest_access.sql +44 -0
  570. synapse/storage/schema/main/delta/25/history_visibility.sql +44 -0
  571. synapse/storage/schema/main/delta/25/tags.sql +57 -0
  572. synapse/storage/schema/main/delta/26/account_data.sql +36 -0
  573. synapse/storage/schema/main/delta/27/account_data.sql +55 -0
  574. synapse/storage/schema/main/delta/27/forgotten_memberships.sql +45 -0
  575. synapse/storage/schema/main/delta/27/ts.py +61 -0
  576. synapse/storage/schema/main/delta/28/event_push_actions.sql +46 -0
  577. synapse/storage/schema/main/delta/28/events_room_stream.sql +39 -0
  578. synapse/storage/schema/main/delta/28/public_roms_index.sql +39 -0
  579. synapse/storage/schema/main/delta/28/receipts_user_id_index.sql +41 -0
  580. synapse/storage/schema/main/delta/28/upgrade_times.sql +40 -0
  581. synapse/storage/schema/main/delta/28/users_is_guest.sql +41 -0
  582. synapse/storage/schema/main/delta/29/push_actions.sql +54 -0
  583. synapse/storage/schema/main/delta/30/alias_creator.sql +35 -0
  584. synapse/storage/schema/main/delta/30/as_users.py +82 -0
  585. synapse/storage/schema/main/delta/30/deleted_pushers.sql +44 -0
  586. synapse/storage/schema/main/delta/30/presence_stream.sql +49 -0
  587. synapse/storage/schema/main/delta/30/public_rooms.sql +42 -0
  588. synapse/storage/schema/main/delta/30/push_rule_stream.sql +57 -0
  589. synapse/storage/schema/main/delta/30/threepid_guest_access_tokens.sql +43 -0
  590. synapse/storage/schema/main/delta/31/invites.sql +61 -0
  591. synapse/storage/schema/main/delta/31/local_media_repository_url_cache.sql +46 -0
  592. synapse/storage/schema/main/delta/31/pushers_0.py +92 -0
  593. synapse/storage/schema/main/delta/31/pushers_index.sql +41 -0
  594. synapse/storage/schema/main/delta/31/search_update.py +65 -0
  595. synapse/storage/schema/main/delta/32/events.sql +35 -0
  596. synapse/storage/schema/main/delta/32/openid.sql +9 -0
  597. synapse/storage/schema/main/delta/32/pusher_throttle.sql +42 -0
  598. synapse/storage/schema/main/delta/32/remove_indices.sql +52 -0
  599. synapse/storage/schema/main/delta/32/reports.sql +44 -0
  600. synapse/storage/schema/main/delta/33/access_tokens_device_index.sql +36 -0
  601. synapse/storage/schema/main/delta/33/devices.sql +40 -0
  602. synapse/storage/schema/main/delta/33/devices_for_e2e_keys.sql +38 -0
  603. synapse/storage/schema/main/delta/33/devices_for_e2e_keys_clear_unknown_device.sql +39 -0
  604. synapse/storage/schema/main/delta/33/event_fields.py +61 -0
  605. synapse/storage/schema/main/delta/33/remote_media_ts.py +43 -0
  606. synapse/storage/schema/main/delta/33/user_ips_index.sql +36 -0
  607. synapse/storage/schema/main/delta/34/appservice_stream.sql +42 -0
  608. synapse/storage/schema/main/delta/34/cache_stream.py +50 -0
  609. synapse/storage/schema/main/delta/34/device_inbox.sql +43 -0
  610. synapse/storage/schema/main/delta/34/push_display_name_rename.sql +39 -0
  611. synapse/storage/schema/main/delta/34/received_txn_purge.py +36 -0
  612. synapse/storage/schema/main/delta/35/contains_url.sql +36 -0
  613. synapse/storage/schema/main/delta/35/device_outbox.sql +58 -0
  614. synapse/storage/schema/main/delta/35/device_stream_id.sql +40 -0
  615. synapse/storage/schema/main/delta/35/event_push_actions_index.sql +36 -0
  616. synapse/storage/schema/main/delta/35/public_room_list_change_stream.sql +52 -0
  617. synapse/storage/schema/main/delta/35/stream_order_to_extrem.sql +56 -0
  618. synapse/storage/schema/main/delta/36/readd_public_rooms.sql +45 -0
  619. synapse/storage/schema/main/delta/37/remove_auth_idx.py +89 -0
  620. synapse/storage/schema/main/delta/37/user_threepids.sql +71 -0
  621. synapse/storage/schema/main/delta/38/postgres_fts_gist.sql +38 -0
  622. synapse/storage/schema/main/delta/39/appservice_room_list.sql +48 -0
  623. synapse/storage/schema/main/delta/39/device_federation_stream_idx.sql +35 -0
  624. synapse/storage/schema/main/delta/39/event_push_index.sql +36 -0
  625. synapse/storage/schema/main/delta/39/federation_out_position.sql +41 -0
  626. synapse/storage/schema/main/delta/39/membership_profile.sql +39 -0
  627. synapse/storage/schema/main/delta/40/current_state_idx.sql +36 -0
  628. synapse/storage/schema/main/delta/40/device_inbox.sql +40 -0
  629. synapse/storage/schema/main/delta/40/device_list_streams.sql +79 -0
  630. synapse/storage/schema/main/delta/40/event_push_summary.sql +57 -0
  631. synapse/storage/schema/main/delta/40/pushers.sql +58 -0
  632. synapse/storage/schema/main/delta/41/device_list_stream_idx.sql +36 -0
  633. synapse/storage/schema/main/delta/41/device_outbound_index.sql +35 -0
  634. synapse/storage/schema/main/delta/41/event_search_event_id_idx.sql +36 -0
  635. synapse/storage/schema/main/delta/41/ratelimit.sql +41 -0
  636. synapse/storage/schema/main/delta/42/current_state_delta.sql +48 -0
  637. synapse/storage/schema/main/delta/42/device_list_last_id.sql +52 -0
  638. synapse/storage/schema/main/delta/42/event_auth_state_only.sql +36 -0
  639. synapse/storage/schema/main/delta/42/user_dir.py +88 -0
  640. synapse/storage/schema/main/delta/43/blocked_rooms.sql +40 -0
  641. synapse/storage/schema/main/delta/43/quarantine_media.sql +36 -0
  642. synapse/storage/schema/main/delta/43/url_cache.sql +35 -0
  643. synapse/storage/schema/main/delta/43/user_share.sql +52 -0
  644. synapse/storage/schema/main/delta/44/expire_url_cache.sql +60 -0
  645. synapse/storage/schema/main/delta/45/group_server.sql +186 -0
  646. synapse/storage/schema/main/delta/45/profile_cache.sql +47 -0
  647. synapse/storage/schema/main/delta/46/drop_refresh_tokens.sql +36 -0
  648. synapse/storage/schema/main/delta/46/drop_unique_deleted_pushers.sql +54 -0
  649. synapse/storage/schema/main/delta/46/group_server.sql +51 -0
  650. synapse/storage/schema/main/delta/46/local_media_repository_url_idx.sql +43 -0
  651. synapse/storage/schema/main/delta/46/user_dir_null_room_ids.sql +54 -0
  652. synapse/storage/schema/main/delta/46/user_dir_typos.sql +43 -0
  653. synapse/storage/schema/main/delta/47/last_access_media.sql +35 -0
  654. synapse/storage/schema/main/delta/47/postgres_fts_gin.sql +36 -0
  655. synapse/storage/schema/main/delta/47/push_actions_staging.sql +47 -0
  656. synapse/storage/schema/main/delta/48/add_user_consent.sql +37 -0
  657. synapse/storage/schema/main/delta/48/add_user_ips_last_seen_index.sql +36 -0
  658. synapse/storage/schema/main/delta/48/deactivated_users.sql +44 -0
  659. synapse/storage/schema/main/delta/48/group_unique_indexes.py +67 -0
  660. synapse/storage/schema/main/delta/48/groups_joinable.sql +41 -0
  661. synapse/storage/schema/main/delta/49/add_user_consent_server_notice_sent.sql +39 -0
  662. synapse/storage/schema/main/delta/49/add_user_daily_visits.sql +40 -0
  663. synapse/storage/schema/main/delta/49/add_user_ips_last_seen_only_index.sql +36 -0
  664. synapse/storage/schema/main/delta/50/add_creation_ts_users_index.sql +38 -0
  665. synapse/storage/schema/main/delta/50/erasure_store.sql +40 -0
  666. synapse/storage/schema/main/delta/50/make_event_content_nullable.py +102 -0
  667. synapse/storage/schema/main/delta/51/e2e_room_keys.sql +58 -0
  668. synapse/storage/schema/main/delta/51/monthly_active_users.sql +46 -0
  669. synapse/storage/schema/main/delta/52/add_event_to_state_group_index.sql +38 -0
  670. synapse/storage/schema/main/delta/52/device_list_streams_unique_idx.sql +55 -0
  671. synapse/storage/schema/main/delta/52/e2e_room_keys.sql +72 -0
  672. synapse/storage/schema/main/delta/53/add_user_type_to_users.sql +38 -0
  673. synapse/storage/schema/main/delta/53/drop_sent_transactions.sql +35 -0
  674. synapse/storage/schema/main/delta/53/event_format_version.sql +35 -0
  675. synapse/storage/schema/main/delta/53/user_dir_populate.sql +49 -0
  676. synapse/storage/schema/main/delta/53/user_ips_index.sql +49 -0
  677. synapse/storage/schema/main/delta/53/user_share.sql +63 -0
  678. synapse/storage/schema/main/delta/53/user_threepid_id.sql +48 -0
  679. synapse/storage/schema/main/delta/53/users_in_public_rooms.sql +47 -0
  680. synapse/storage/schema/main/delta/54/account_validity_with_renewal.sql +49 -0
  681. synapse/storage/schema/main/delta/54/add_validity_to_server_keys.sql +42 -0
  682. synapse/storage/schema/main/delta/54/delete_forward_extremities.sql +42 -0
  683. synapse/storage/schema/main/delta/54/drop_legacy_tables.sql +49 -0
  684. synapse/storage/schema/main/delta/54/drop_presence_list.sql +35 -0
  685. synapse/storage/schema/main/delta/54/relations.sql +46 -0
  686. synapse/storage/schema/main/delta/54/stats.sql +99 -0
  687. synapse/storage/schema/main/delta/54/stats2.sql +47 -0
  688. synapse/storage/schema/main/delta/55/access_token_expiry.sql +37 -0
  689. synapse/storage/schema/main/delta/55/track_threepid_validations.sql +50 -0
  690. synapse/storage/schema/main/delta/55/users_alter_deactivated.sql +38 -0
  691. synapse/storage/schema/main/delta/56/add_spans_to_device_lists.sql +39 -0
  692. synapse/storage/schema/main/delta/56/current_state_events_membership.sql +41 -0
  693. synapse/storage/schema/main/delta/56/current_state_events_membership_mk2.sql +43 -0
  694. synapse/storage/schema/main/delta/56/delete_keys_from_deleted_backups.sql +44 -0
  695. synapse/storage/schema/main/delta/56/destinations_failure_ts.sql +44 -0
  696. synapse/storage/schema/main/delta/56/destinations_retry_interval_type.sql.postgres +18 -0
  697. synapse/storage/schema/main/delta/56/device_stream_id_insert.sql +39 -0
  698. synapse/storage/schema/main/delta/56/devices_last_seen.sql +43 -0
  699. synapse/storage/schema/main/delta/56/drop_unused_event_tables.sql +39 -0
  700. synapse/storage/schema/main/delta/56/event_expiry.sql +40 -0
  701. synapse/storage/schema/main/delta/56/event_labels.sql +49 -0
  702. synapse/storage/schema/main/delta/56/event_labels_background_update.sql +36 -0
  703. synapse/storage/schema/main/delta/56/fix_room_keys_index.sql +37 -0
  704. synapse/storage/schema/main/delta/56/hidden_devices.sql +37 -0
  705. synapse/storage/schema/main/delta/56/hidden_devices_fix.sql.sqlite +42 -0
  706. synapse/storage/schema/main/delta/56/nuke_empty_communities_from_db.sql +48 -0
  707. synapse/storage/schema/main/delta/56/public_room_list_idx.sql +35 -0
  708. synapse/storage/schema/main/delta/56/redaction_censor.sql +35 -0
  709. synapse/storage/schema/main/delta/56/redaction_censor2.sql +41 -0
  710. synapse/storage/schema/main/delta/56/redaction_censor3_fix_update.sql.postgres +25 -0
  711. synapse/storage/schema/main/delta/56/redaction_censor4.sql +35 -0
  712. synapse/storage/schema/main/delta/56/remove_tombstoned_rooms_from_directory.sql +38 -0
  713. synapse/storage/schema/main/delta/56/room_key_etag.sql +36 -0
  714. synapse/storage/schema/main/delta/56/room_membership_idx.sql +37 -0
  715. synapse/storage/schema/main/delta/56/room_retention.sql +52 -0
  716. synapse/storage/schema/main/delta/56/signing_keys.sql +75 -0
  717. synapse/storage/schema/main/delta/56/signing_keys_nonunique_signatures.sql +41 -0
  718. synapse/storage/schema/main/delta/56/stats_separated.sql +175 -0
  719. synapse/storage/schema/main/delta/56/unique_user_filter_index.py +46 -0
  720. synapse/storage/schema/main/delta/56/user_external_ids.sql +43 -0
  721. synapse/storage/schema/main/delta/56/users_in_public_rooms_idx.sql +36 -0
  722. synapse/storage/schema/main/delta/57/delete_old_current_state_events.sql +41 -0
  723. synapse/storage/schema/main/delta/57/device_list_remote_cache_stale.sql +44 -0
  724. synapse/storage/schema/main/delta/57/local_current_membership.py +111 -0
  725. synapse/storage/schema/main/delta/57/remove_sent_outbound_pokes.sql +40 -0
  726. synapse/storage/schema/main/delta/57/rooms_version_column.sql +43 -0
  727. synapse/storage/schema/main/delta/57/rooms_version_column_2.sql.postgres +35 -0
  728. synapse/storage/schema/main/delta/57/rooms_version_column_2.sql.sqlite +22 -0
  729. synapse/storage/schema/main/delta/57/rooms_version_column_3.sql.postgres +39 -0
  730. synapse/storage/schema/main/delta/57/rooms_version_column_3.sql.sqlite +23 -0
  731. synapse/storage/schema/main/delta/58/02remove_dup_outbound_pokes.sql +41 -0
  732. synapse/storage/schema/main/delta/58/03persist_ui_auth.sql +55 -0
  733. synapse/storage/schema/main/delta/58/05cache_instance.sql.postgres +30 -0
  734. synapse/storage/schema/main/delta/58/06dlols_unique_idx.py +83 -0
  735. synapse/storage/schema/main/delta/58/07add_method_to_thumbnail_constraint.sql.postgres +33 -0
  736. synapse/storage/schema/main/delta/58/07add_method_to_thumbnail_constraint.sql.sqlite +44 -0
  737. synapse/storage/schema/main/delta/58/07persist_ui_auth_ips.sql +44 -0
  738. synapse/storage/schema/main/delta/58/08_media_safe_from_quarantine.sql.postgres +18 -0
  739. synapse/storage/schema/main/delta/58/08_media_safe_from_quarantine.sql.sqlite +18 -0
  740. synapse/storage/schema/main/delta/58/09shadow_ban.sql +37 -0
  741. synapse/storage/schema/main/delta/58/10_pushrules_enabled_delete_obsolete.sql +47 -0
  742. synapse/storage/schema/main/delta/58/10drop_local_rejections_stream.sql +41 -0
  743. synapse/storage/schema/main/delta/58/10federation_pos_instance_name.sql +41 -0
  744. synapse/storage/schema/main/delta/58/11dehydration.sql +39 -0
  745. synapse/storage/schema/main/delta/58/11fallback.sql +43 -0
  746. synapse/storage/schema/main/delta/58/11user_id_seq.py +38 -0
  747. synapse/storage/schema/main/delta/58/12room_stats.sql +51 -0
  748. synapse/storage/schema/main/delta/58/13remove_presence_allow_inbound.sql +36 -0
  749. synapse/storage/schema/main/delta/58/14events_instance_name.sql +35 -0
  750. synapse/storage/schema/main/delta/58/14events_instance_name.sql.postgres +28 -0
  751. synapse/storage/schema/main/delta/58/15_catchup_destination_rooms.sql +61 -0
  752. synapse/storage/schema/main/delta/58/15unread_count.sql +45 -0
  753. synapse/storage/schema/main/delta/58/16populate_stats_process_rooms_fix.sql +41 -0
  754. synapse/storage/schema/main/delta/58/17_catchup_last_successful.sql +40 -0
  755. synapse/storage/schema/main/delta/58/18stream_positions.sql +41 -0
  756. synapse/storage/schema/main/delta/58/19instance_map.sql.postgres +25 -0
  757. synapse/storage/schema/main/delta/58/19txn_id.sql +59 -0
  758. synapse/storage/schema/main/delta/58/20instance_name_event_tables.sql +36 -0
  759. synapse/storage/schema/main/delta/58/20user_daily_visits.sql +37 -0
  760. synapse/storage/schema/main/delta/58/21as_device_stream.sql +36 -0
  761. synapse/storage/schema/main/delta/58/21drop_device_max_stream_id.sql +1 -0
  762. synapse/storage/schema/main/delta/58/22puppet_token.sql +36 -0
  763. synapse/storage/schema/main/delta/58/22users_have_local_media.sql +2 -0
  764. synapse/storage/schema/main/delta/58/23e2e_cross_signing_keys_idx.sql +36 -0
  765. synapse/storage/schema/main/delta/58/24drop_event_json_index.sql +38 -0
  766. synapse/storage/schema/main/delta/58/25user_external_ids_user_id_idx.sql +36 -0
  767. synapse/storage/schema/main/delta/58/26access_token_last_validated.sql +37 -0
  768. synapse/storage/schema/main/delta/58/27local_invites.sql +37 -0
  769. synapse/storage/schema/main/delta/58/28drop_last_used_column.sql.postgres +16 -0
  770. synapse/storage/schema/main/delta/58/28drop_last_used_column.sql.sqlite +62 -0
  771. synapse/storage/schema/main/delta/59/01ignored_user.py +85 -0
  772. synapse/storage/schema/main/delta/59/02shard_send_to_device.sql +37 -0
  773. synapse/storage/schema/main/delta/59/03shard_send_to_device_sequence.sql.postgres +25 -0
  774. synapse/storage/schema/main/delta/59/04_event_auth_chains.sql +71 -0
  775. synapse/storage/schema/main/delta/59/04_event_auth_chains.sql.postgres +16 -0
  776. synapse/storage/schema/main/delta/59/04drop_account_data.sql +36 -0
  777. synapse/storage/schema/main/delta/59/05cache_invalidation.sql +36 -0
  778. synapse/storage/schema/main/delta/59/06chain_cover_index.sql +36 -0
  779. synapse/storage/schema/main/delta/59/06shard_account_data.sql +39 -0
  780. synapse/storage/schema/main/delta/59/06shard_account_data.sql.postgres +32 -0
  781. synapse/storage/schema/main/delta/59/07shard_account_data_fix.sql +37 -0
  782. synapse/storage/schema/main/delta/59/08delete_pushers_for_deactivated_accounts.sql +39 -0
  783. synapse/storage/schema/main/delta/59/08delete_stale_pushers.sql +39 -0
  784. synapse/storage/schema/main/delta/59/09rejected_events_metadata.sql +45 -0
  785. synapse/storage/schema/main/delta/59/10delete_purged_chain_cover.sql +36 -0
  786. synapse/storage/schema/main/delta/59/11add_knock_members_to_stats.sql +39 -0
  787. synapse/storage/schema/main/delta/59/11drop_thumbnail_constraint.sql.postgres +22 -0
  788. synapse/storage/schema/main/delta/59/12account_validity_token_used_ts_ms.sql +37 -0
  789. synapse/storage/schema/main/delta/59/12presence_stream_instance.sql +37 -0
  790. synapse/storage/schema/main/delta/59/12presence_stream_instance_seq.sql.postgres +20 -0
  791. synapse/storage/schema/main/delta/59/13users_to_send_full_presence_to.sql +53 -0
  792. synapse/storage/schema/main/delta/59/14refresh_tokens.sql +53 -0
  793. synapse/storage/schema/main/delta/59/15locks.sql +56 -0
  794. synapse/storage/schema/main/delta/59/16federation_inbound_staging.sql +51 -0
  795. synapse/storage/schema/main/delta/60/01recreate_stream_ordering.sql.postgres +45 -0
  796. synapse/storage/schema/main/delta/60/02change_stream_ordering_columns.sql.postgres +30 -0
  797. synapse/storage/schema/main/delta/61/01change_appservices_txns.sql.postgres +23 -0
  798. synapse/storage/schema/main/delta/61/01insertion_event_lookups.sql +68 -0
  799. synapse/storage/schema/main/delta/61/02drop_redundant_room_depth_index.sql +37 -0
  800. synapse/storage/schema/main/delta/61/03recreate_min_depth.py +74 -0
  801. synapse/storage/schema/main/delta/62/01insertion_event_extremities.sql +43 -0
  802. synapse/storage/schema/main/delta/63/01create_registration_tokens.sql +42 -0
  803. synapse/storage/schema/main/delta/63/02delete_unlinked_email_pushers.sql +39 -0
  804. synapse/storage/schema/main/delta/63/02populate-rooms-creator.sql +36 -0
  805. synapse/storage/schema/main/delta/63/03session_store.sql +42 -0
  806. synapse/storage/schema/main/delta/63/04add_presence_stream_not_offline_index.sql +37 -0
  807. synapse/storage/schema/main/delta/64/01msc2716_chunk_to_batch_rename.sql.postgres +23 -0
  808. synapse/storage/schema/main/delta/64/01msc2716_chunk_to_batch_rename.sql.sqlite +37 -0
  809. synapse/storage/schema/main/delta/65/01msc2716_insertion_event_edges.sql +38 -0
  810. synapse/storage/schema/main/delta/65/03remove_hidden_devices_from_device_inbox.sql +41 -0
  811. synapse/storage/schema/main/delta/65/04_local_group_updates.sql +37 -0
  812. synapse/storage/schema/main/delta/65/05_remove_room_stats_historical_and_user_stats_historical.sql +38 -0
  813. synapse/storage/schema/main/delta/65/06remove_deleted_devices_from_device_inbox.sql +53 -0
  814. synapse/storage/schema/main/delta/65/07_arbitrary_relations.sql +37 -0
  815. synapse/storage/schema/main/delta/65/08_device_inbox_background_updates.sql +37 -0
  816. synapse/storage/schema/main/delta/65/10_expirable_refresh_tokens.sql +47 -0
  817. synapse/storage/schema/main/delta/65/11_devices_auth_provider_session.sql +46 -0
  818. synapse/storage/schema/main/delta/67/01drop_public_room_list_stream.sql +37 -0
  819. synapse/storage/schema/main/delta/68/01event_columns.sql +45 -0
  820. synapse/storage/schema/main/delta/68/02_msc2409_add_device_id_appservice_stream_type.sql +40 -0
  821. synapse/storage/schema/main/delta/68/03_delete_account_data_for_deactivated_accounts.sql +39 -0
  822. synapse/storage/schema/main/delta/68/04_refresh_tokens_index_next_token_id.sql +47 -0
  823. synapse/storage/schema/main/delta/68/04partial_state_rooms.sql +60 -0
  824. synapse/storage/schema/main/delta/68/05_delete_non_strings_from_event_search.sql.sqlite +22 -0
  825. synapse/storage/schema/main/delta/68/05partial_state_rooms_triggers.py +80 -0
  826. synapse/storage/schema/main/delta/68/06_msc3202_add_device_list_appservice_stream_type.sql +42 -0
  827. synapse/storage/schema/main/delta/69/01as_txn_seq.py +54 -0
  828. synapse/storage/schema/main/delta/69/01device_list_oubound_by_room.sql +57 -0
  829. synapse/storage/schema/main/delta/69/02cache_invalidation_index.sql +37 -0
  830. synapse/storage/schema/main/delta/70/01clean_table_purged_rooms.sql +39 -0
  831. synapse/storage/schema/main/delta/71/01rebuild_event_edges.sql.postgres +43 -0
  832. synapse/storage/schema/main/delta/71/01rebuild_event_edges.sql.sqlite +47 -0
  833. synapse/storage/schema/main/delta/71/01remove_noop_background_updates.sql +80 -0
  834. synapse/storage/schema/main/delta/71/02event_push_summary_unique.sql +37 -0
  835. synapse/storage/schema/main/delta/72/01add_room_type_to_state_stats.sql +38 -0
  836. synapse/storage/schema/main/delta/72/01event_push_summary_receipt.sql +54 -0
  837. synapse/storage/schema/main/delta/72/02event_push_actions_index.sql +38 -0
  838. synapse/storage/schema/main/delta/72/03bg_populate_events_columns.py +57 -0
  839. synapse/storage/schema/main/delta/72/03drop_event_reference_hashes.sql +36 -0
  840. synapse/storage/schema/main/delta/72/03remove_groups.sql +50 -0
  841. synapse/storage/schema/main/delta/72/04drop_column_application_services_state_last_txn.sql.postgres +17 -0
  842. synapse/storage/schema/main/delta/72/04drop_column_application_services_state_last_txn.sql.sqlite +40 -0
  843. synapse/storage/schema/main/delta/72/05receipts_event_stream_ordering.sql +38 -0
  844. synapse/storage/schema/main/delta/72/05remove_unstable_private_read_receipts.sql +38 -0
  845. synapse/storage/schema/main/delta/72/06add_consent_ts_to_users.sql +35 -0
  846. synapse/storage/schema/main/delta/72/06thread_notifications.sql +49 -0
  847. synapse/storage/schema/main/delta/72/07force_update_current_state_events_membership.py +67 -0
  848. synapse/storage/schema/main/delta/72/07thread_receipts.sql.postgres +30 -0
  849. synapse/storage/schema/main/delta/72/07thread_receipts.sql.sqlite +70 -0
  850. synapse/storage/schema/main/delta/72/08begin_cache_invalidation_seq_at_2.sql.postgres +23 -0
  851. synapse/storage/schema/main/delta/72/08thread_receipts.sql +39 -0
  852. synapse/storage/schema/main/delta/72/09partial_indices.sql.sqlite +56 -0
  853. synapse/storage/schema/main/delta/73/01event_failed_pull_attempts.sql +48 -0
  854. synapse/storage/schema/main/delta/73/02add_pusher_enabled.sql +35 -0
  855. synapse/storage/schema/main/delta/73/02room_id_indexes_for_purging.sql +41 -0
  856. synapse/storage/schema/main/delta/73/03pusher_device_id.sql +39 -0
  857. synapse/storage/schema/main/delta/73/03users_approved_column.sql +39 -0
  858. synapse/storage/schema/main/delta/73/04partial_join_details.sql +42 -0
  859. synapse/storage/schema/main/delta/73/04pending_device_list_updates.sql +47 -0
  860. synapse/storage/schema/main/delta/73/05old_push_actions.sql.postgres +22 -0
  861. synapse/storage/schema/main/delta/73/05old_push_actions.sql.sqlite +24 -0
  862. synapse/storage/schema/main/delta/73/06thread_notifications_thread_id_idx.sql +42 -0
  863. synapse/storage/schema/main/delta/73/08thread_receipts_non_null.sql.postgres +23 -0
  864. synapse/storage/schema/main/delta/73/08thread_receipts_non_null.sql.sqlite +76 -0
  865. synapse/storage/schema/main/delta/73/09partial_joined_via_destination.sql +37 -0
  866. synapse/storage/schema/main/delta/73/09threads_table.sql +49 -0
  867. synapse/storage/schema/main/delta/73/10_update_sqlite_fts4_tokenizer.py +71 -0
  868. synapse/storage/schema/main/delta/73/10login_tokens.sql +54 -0
  869. synapse/storage/schema/main/delta/73/11event_search_room_id_n_distinct.sql.postgres +33 -0
  870. synapse/storage/schema/main/delta/73/12refactor_device_list_outbound_pokes.sql +72 -0
  871. synapse/storage/schema/main/delta/73/13add_device_lists_index.sql +39 -0
  872. synapse/storage/schema/main/delta/73/20_un_partial_stated_room_stream.sql +51 -0
  873. synapse/storage/schema/main/delta/73/21_un_partial_stated_room_stream_seq.sql.postgres +20 -0
  874. synapse/storage/schema/main/delta/73/22_rebuild_user_dir_stats.sql +48 -0
  875. synapse/storage/schema/main/delta/73/22_un_partial_stated_event_stream.sql +53 -0
  876. synapse/storage/schema/main/delta/73/23_fix_thread_index.sql +52 -0
  877. synapse/storage/schema/main/delta/73/23_un_partial_stated_room_stream_seq.sql.postgres +20 -0
  878. synapse/storage/schema/main/delta/73/24_events_jump_to_date_index.sql +36 -0
  879. synapse/storage/schema/main/delta/73/25drop_presence.sql +36 -0
  880. synapse/storage/schema/main/delta/74/01_user_directory_stale_remote_users.sql +58 -0
  881. synapse/storage/schema/main/delta/74/02_set_device_id_for_pushers_bg_update.sql +38 -0
  882. synapse/storage/schema/main/delta/74/03_membership_tables_event_stream_ordering.sql.postgres +29 -0
  883. synapse/storage/schema/main/delta/74/03_membership_tables_event_stream_ordering.sql.sqlite +23 -0
  884. synapse/storage/schema/main/delta/74/03_room_membership_index.sql +38 -0
  885. synapse/storage/schema/main/delta/74/04_delete_e2e_backup_keys_for_deactivated_users.sql +36 -0
  886. synapse/storage/schema/main/delta/74/04_membership_tables_event_stream_ordering_triggers.py +87 -0
  887. synapse/storage/schema/main/delta/74/05_events_txn_id_device_id.sql +72 -0
  888. synapse/storage/schema/main/delta/74/90COMMENTS_destinations.sql.postgres +52 -0
  889. synapse/storage/schema/main/delta/76/01_add_profiles_full_user_id_column.sql +39 -0
  890. synapse/storage/schema/main/delta/76/02_add_user_filters_full_user_id_column.sql +39 -0
  891. synapse/storage/schema/main/delta/76/03_per_user_experimental_features.sql +46 -0
  892. synapse/storage/schema/main/delta/76/04_add_room_forgetter.sql +43 -0
  893. synapse/storage/schema/main/delta/77/01_add_profiles_not_valid_check.sql.postgres +16 -0
  894. synapse/storage/schema/main/delta/77/02_add_user_filters_not_valid_check.sql.postgres +16 -0
  895. synapse/storage/schema/main/delta/77/03bg_populate_full_user_id_profiles.sql +35 -0
  896. synapse/storage/schema/main/delta/77/04bg_populate_full_user_id_user_filters.sql +35 -0
  897. synapse/storage/schema/main/delta/77/05thread_notifications_backfill.sql +67 -0
  898. synapse/storage/schema/main/delta/77/06thread_notifications_not_null.sql.sqlite +102 -0
  899. synapse/storage/schema/main/delta/77/06thread_notifications_not_null_event_push_actions.sql.postgres +27 -0
  900. synapse/storage/schema/main/delta/77/06thread_notifications_not_null_event_push_actions_staging.sql.postgres +27 -0
  901. synapse/storage/schema/main/delta/77/06thread_notifications_not_null_event_push_summary.sql.postgres +29 -0
  902. synapse/storage/schema/main/delta/77/14bg_indices_event_stream_ordering.sql +39 -0
  903. synapse/storage/schema/main/delta/78/01_validate_and_update_profiles.py +99 -0
  904. synapse/storage/schema/main/delta/78/02_validate_and_update_user_filters.py +100 -0
  905. synapse/storage/schema/main/delta/78/03_remove_unused_indexes_user_filters.py +72 -0
  906. synapse/storage/schema/main/delta/78/03event_extremities_constraints.py +65 -0
  907. synapse/storage/schema/main/delta/78/04_add_full_user_id_index_user_filters.py +32 -0
  908. synapse/storage/schema/main/delta/79/03_read_write_locks_triggers.sql.postgres +102 -0
  909. synapse/storage/schema/main/delta/79/03_read_write_locks_triggers.sql.sqlite +72 -0
  910. synapse/storage/schema/main/delta/79/04_mitigate_stream_ordering_update_race.py +70 -0
  911. synapse/storage/schema/main/delta/79/05_read_write_locks_triggers.sql.postgres +69 -0
  912. synapse/storage/schema/main/delta/79/05_read_write_locks_triggers.sql.sqlite +65 -0
  913. synapse/storage/schema/main/delta/80/01_users_alter_locked.sql +35 -0
  914. synapse/storage/schema/main/delta/80/02_read_write_locks_unlogged.sql.postgres +30 -0
  915. synapse/storage/schema/main/delta/80/02_scheduled_tasks.sql +47 -0
  916. synapse/storage/schema/main/delta/80/03_read_write_locks_triggers.sql.postgres +37 -0
  917. synapse/storage/schema/main/delta/80/04_read_write_locks_deadlock.sql.postgres +71 -0
  918. synapse/storage/schema/main/delta/82/02_scheduled_tasks_index.sql +35 -0
  919. synapse/storage/schema/main/delta/82/04_add_indices_for_purging_rooms.sql +39 -0
  920. synapse/storage/schema/main/delta/82/05gaps.sql +44 -0
  921. synapse/storage/schema/main/delta/83/01_drop_old_tables.sql +43 -0
  922. synapse/storage/schema/main/delta/83/03_instance_name_receipts.sql.sqlite +17 -0
  923. synapse/storage/schema/main/delta/83/05_cross_signing_key_update_grant.sql +34 -0
  924. synapse/storage/schema/main/delta/83/06_event_push_summary_room.sql +36 -0
  925. synapse/storage/schema/main/delta/84/01_auth_links_stats.sql.postgres +20 -0
  926. synapse/storage/schema/main/delta/84/02_auth_links_index.sql +16 -0
  927. synapse/storage/schema/main/delta/84/03_auth_links_analyze.sql.postgres +16 -0
  928. synapse/storage/schema/main/delta/84/04_access_token_index.sql +15 -0
  929. synapse/storage/schema/main/delta/85/01_add_suspended.sql +14 -0
  930. synapse/storage/schema/main/delta/85/02_add_instance_names.sql +27 -0
  931. synapse/storage/schema/main/delta/85/03_new_sequences.sql.postgres +54 -0
  932. synapse/storage/schema/main/delta/85/04_cleanup_device_federation_outbox.sql +15 -0
  933. synapse/storage/schema/main/delta/85/05_add_instance_names_converted_pos.sql +16 -0
  934. synapse/storage/schema/main/delta/85/06_add_room_reports.sql +20 -0
  935. synapse/storage/schema/main/delta/86/01_authenticate_media.sql +15 -0
  936. synapse/storage/schema/main/delta/86/02_receipts_event_id_index.sql +15 -0
  937. synapse/storage/schema/main/delta/87/01_sliding_sync_memberships.sql +169 -0
  938. synapse/storage/schema/main/delta/87/02_per_connection_state.sql +81 -0
  939. synapse/storage/schema/main/delta/87/03_current_state_index.sql +19 -0
  940. synapse/storage/schema/main/delta/88/01_add_delayed_events.sql +43 -0
  941. synapse/storage/schema/main/delta/88/01_custom_profile_fields.sql +15 -0
  942. synapse/storage/schema/main/delta/88/02_fix_sliding_sync_membership_snapshots_forgotten_column.sql +21 -0
  943. synapse/storage/schema/main/delta/88/03_add_otk_ts_added_index.sql +18 -0
  944. synapse/storage/schema/main/delta/88/04_current_state_delta_index.sql +18 -0
  945. synapse/storage/schema/main/delta/88/05_drop_old_otks.sql.postgres +19 -0
  946. synapse/storage/schema/main/delta/88/05_drop_old_otks.sql.sqlite +19 -0
  947. synapse/storage/schema/main/delta/88/05_sliding_sync_room_config_index.sql +20 -0
  948. synapse/storage/schema/main/delta/88/06_events_received_ts_index.sql +17 -0
  949. synapse/storage/schema/main/delta/89/01_sliding_sync_membership_snapshot_index.sql +15 -0
  950. synapse/storage/schema/main/delta/90/01_add_column_participant_room_memberships_table.sql +16 -0
  951. synapse/storage/schema/main/delta/91/01_media_hash.sql +28 -0
  952. synapse/storage/schema/main/delta/92/01_remove_trigger.sql.postgres +16 -0
  953. synapse/storage/schema/main/delta/92/01_remove_trigger.sql.sqlite +16 -0
  954. synapse/storage/schema/main/delta/92/02_remove_populate_participant_bg_update.sql +17 -0
  955. synapse/storage/schema/main/delta/92/04_ss_membership_snapshot_idx.sql +16 -0
  956. synapse/storage/schema/main/delta/92/04_thread_subscriptions.sql +59 -0
  957. synapse/storage/schema/main/delta/92/04_thread_subscriptions_seq.sql.postgres +19 -0
  958. synapse/storage/schema/main/delta/92/05_fixup_max_depth_cap.sql +17 -0
  959. synapse/storage/schema/main/delta/92/05_thread_subscriptions_comments.sql.postgres +18 -0
  960. synapse/storage/schema/main/delta/92/06_device_federation_inbox_index.sql +16 -0
  961. synapse/storage/schema/main/delta/92/06_threads_last_sent_stream_ordering_comments.sql.postgres +24 -0
  962. synapse/storage/schema/main/delta/92/07_add_user_reports.sql +22 -0
  963. synapse/storage/schema/main/delta/92/07_event_txn_id_device_id_txn_id2.sql +15 -0
  964. synapse/storage/schema/main/delta/92/08_room_ban_redactions.sql +21 -0
  965. synapse/storage/schema/main/delta/92/08_thread_subscriptions_seq_fixup.sql.postgres +19 -0
  966. synapse/storage/schema/main/delta/92/09_thread_subscriptions_update.sql +20 -0
  967. synapse/storage/schema/main/delta/92/09_thread_subscriptions_update.sql.postgres +18 -0
  968. synapse/storage/schema/main/delta/93/01_add_delayed_events.sql +15 -0
  969. synapse/storage/schema/main/full_schemas/72/full.sql.postgres +1344 -0
  970. synapse/storage/schema/main/full_schemas/72/full.sql.sqlite +646 -0
  971. synapse/storage/schema/state/delta/23/drop_state_index.sql +35 -0
  972. synapse/storage/schema/state/delta/32/remove_state_indices.sql +38 -0
  973. synapse/storage/schema/state/delta/35/add_state_index.sql +36 -0
  974. synapse/storage/schema/state/delta/35/state.sql +41 -0
  975. synapse/storage/schema/state/delta/35/state_dedupe.sql +36 -0
  976. synapse/storage/schema/state/delta/47/state_group_seq.py +38 -0
  977. synapse/storage/schema/state/delta/56/state_group_room_idx.sql +36 -0
  978. synapse/storage/schema/state/delta/61/02state_groups_state_n_distinct.sql.postgres +34 -0
  979. synapse/storage/schema/state/delta/70/08_state_group_edges_unique.sql +36 -0
  980. synapse/storage/schema/state/delta/89/01_state_groups_deletion.sql +39 -0
  981. synapse/storage/schema/state/delta/90/02_delete_unreferenced_state_groups.sql +16 -0
  982. synapse/storage/schema/state/delta/90/03_remove_old_deletion_bg_update.sql +15 -0
  983. synapse/storage/schema/state/full_schemas/72/full.sql.postgres +30 -0
  984. synapse/storage/schema/state/full_schemas/72/full.sql.sqlite +20 -0
  985. synapse/storage/types.py +183 -0
  986. synapse/storage/util/__init__.py +20 -0
  987. synapse/storage/util/id_generators.py +928 -0
  988. synapse/storage/util/partial_state_events_tracker.py +194 -0
  989. synapse/storage/util/sequence.py +315 -0
  990. synapse/streams/__init__.py +43 -0
  991. synapse/streams/config.py +91 -0
  992. synapse/streams/events.py +203 -0
  993. synapse/synapse_rust/__init__.pyi +3 -0
  994. synapse/synapse_rust/acl.pyi +20 -0
  995. synapse/synapse_rust/events.pyi +136 -0
  996. synapse/synapse_rust/http_client.pyi +32 -0
  997. synapse/synapse_rust/push.pyi +86 -0
  998. synapse/synapse_rust/rendezvous.pyi +30 -0
  999. synapse/synapse_rust/segmenter.pyi +1 -0
  1000. synapse/synapse_rust.abi3.so +0 -0
  1001. synapse/types/__init__.py +1600 -0
  1002. synapse/types/handlers/__init__.py +93 -0
  1003. synapse/types/handlers/policy_server.py +16 -0
  1004. synapse/types/handlers/sliding_sync.py +908 -0
  1005. synapse/types/rest/__init__.py +25 -0
  1006. synapse/types/rest/client/__init__.py +413 -0
  1007. synapse/types/state.py +634 -0
  1008. synapse/types/storage/__init__.py +66 -0
  1009. synapse/util/__init__.py +169 -0
  1010. synapse/util/async_helpers.py +1045 -0
  1011. synapse/util/background_queue.py +142 -0
  1012. synapse/util/batching_queue.py +202 -0
  1013. synapse/util/caches/__init__.py +300 -0
  1014. synapse/util/caches/cached_call.py +143 -0
  1015. synapse/util/caches/deferred_cache.py +530 -0
  1016. synapse/util/caches/descriptors.py +692 -0
  1017. synapse/util/caches/dictionary_cache.py +346 -0
  1018. synapse/util/caches/expiringcache.py +249 -0
  1019. synapse/util/caches/lrucache.py +975 -0
  1020. synapse/util/caches/response_cache.py +322 -0
  1021. synapse/util/caches/stream_change_cache.py +370 -0
  1022. synapse/util/caches/treecache.py +189 -0
  1023. synapse/util/caches/ttlcache.py +197 -0
  1024. synapse/util/cancellation.py +63 -0
  1025. synapse/util/check_dependencies.py +335 -0
  1026. synapse/util/clock.py +567 -0
  1027. synapse/util/constants.py +22 -0
  1028. synapse/util/daemonize.py +165 -0
  1029. synapse/util/distributor.py +157 -0
  1030. synapse/util/events.py +134 -0
  1031. synapse/util/file_consumer.py +164 -0
  1032. synapse/util/frozenutils.py +57 -0
  1033. synapse/util/gai_resolver.py +178 -0
  1034. synapse/util/hash.py +38 -0
  1035. synapse/util/httpresourcetree.py +108 -0
  1036. synapse/util/iterutils.py +189 -0
  1037. synapse/util/json.py +56 -0
  1038. synapse/util/linked_list.py +156 -0
  1039. synapse/util/logcontext.py +46 -0
  1040. synapse/util/logformatter.py +28 -0
  1041. synapse/util/macaroons.py +325 -0
  1042. synapse/util/manhole.py +191 -0
  1043. synapse/util/metrics.py +339 -0
  1044. synapse/util/module_loader.py +116 -0
  1045. synapse/util/msisdn.py +51 -0
  1046. synapse/util/patch_inline_callbacks.py +250 -0
  1047. synapse/util/pydantic_models.py +63 -0
  1048. synapse/util/ratelimitutils.py +419 -0
  1049. synapse/util/retryutils.py +339 -0
  1050. synapse/util/rlimit.py +42 -0
  1051. synapse/util/rust.py +133 -0
  1052. synapse/util/sentinel.py +21 -0
  1053. synapse/util/stringutils.py +293 -0
  1054. synapse/util/task_scheduler.py +493 -0
  1055. synapse/util/templates.py +126 -0
  1056. synapse/util/threepids.py +123 -0
  1057. synapse/util/wheel_timer.py +112 -0
  1058. synapse/visibility.py +835 -0
@@ -0,0 +1,3106 @@
1
+ #
2
+ # This file is licensed under the Affero General Public License (AGPL) version 3.
3
+ #
4
+ # Copyright 2015-2021 The Matrix.org Foundation C.I.C.
5
+ # Copyright (C) 2023 New Vector, Ltd
6
+ #
7
+ # This program is free software: you can redistribute it and/or modify
8
+ # it under the terms of the GNU Affero General Public License as
9
+ # published by the Free Software Foundation, either version 3 of the
10
+ # License, or (at your option) any later version.
11
+ #
12
+ # See the GNU Affero General Public License for more details:
13
+ # <https://www.gnu.org/licenses/agpl-3.0.html>.
14
+ #
15
+ # Originally licensed under the Apache License, Version 2.0:
16
+ # <http://www.apache.org/licenses/LICENSE-2.0>.
17
+ #
18
+ # [This file includes modifications made by New Vector Limited]
19
+ #
20
+ #
21
+ import itertools
22
+ import logging
23
+ from typing import (
24
+ TYPE_CHECKING,
25
+ AbstractSet,
26
+ Any,
27
+ Mapping,
28
+ Sequence,
29
+ )
30
+
31
+ import attr
32
+ from prometheus_client import Counter
33
+
34
+ from synapse.api.constants import (
35
+ AccountDataTypes,
36
+ Direction,
37
+ EventContentFields,
38
+ EventTypes,
39
+ JoinRules,
40
+ Membership,
41
+ )
42
+ from synapse.api.filtering import FilterCollection
43
+ from synapse.api.presence import UserPresenceState
44
+ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS
45
+ from synapse.events import EventBase
46
+ from synapse.handlers.relations import BundledAggregations
47
+ from synapse.logging import issue9533_logger
48
+ from synapse.logging.context import current_context
49
+ from synapse.logging.opentracing import (
50
+ SynapseTags,
51
+ log_kv,
52
+ set_tag,
53
+ start_active_span,
54
+ trace,
55
+ )
56
+ from synapse.metrics import SERVER_NAME_LABEL
57
+ from synapse.storage.databases.main.event_push_actions import RoomNotifCounts
58
+ from synapse.storage.databases.main.roommember import extract_heroes_from_room_summary
59
+ from synapse.storage.databases.main.stream import PaginateFunction
60
+ from synapse.storage.invite_rule import InviteRule
61
+ from synapse.storage.roommember import MemberSummary
62
+ from synapse.types import (
63
+ DeviceListUpdates,
64
+ JsonDict,
65
+ JsonMapping,
66
+ MultiWriterStreamToken,
67
+ MutableStateMap,
68
+ Requester,
69
+ RoomStreamToken,
70
+ StateMap,
71
+ StrCollection,
72
+ StreamKeyType,
73
+ StreamToken,
74
+ UserID,
75
+ )
76
+ from synapse.types.state import StateFilter
77
+ from synapse.util.async_helpers import concurrently_execute
78
+ from synapse.util.caches.expiringcache import ExpiringCache
79
+ from synapse.util.caches.lrucache import LruCache
80
+ from synapse.util.caches.response_cache import ResponseCache, ResponseCacheContext
81
+ from synapse.util.metrics import Measure
82
+ from synapse.visibility import filter_events_for_client
83
+
84
+ if TYPE_CHECKING:
85
+ from synapse.server import HomeServer
86
+
87
+ logger = logging.getLogger(__name__)
88
+
89
+ # Counts the number of times we returned a non-empty sync. `type` is one of
90
+ # "initial_sync", "full_state_sync" or "incremental_sync", `lazy_loaded` is
91
+ # "true" or "false" depending on if the request asked for lazy loaded members or
92
+ # not.
93
+ non_empty_sync_counter = Counter(
94
+ "synapse_handlers_sync_nonempty_total",
95
+ "Count of non empty sync responses. type is initial_sync/full_state_sync"
96
+ "/incremental_sync. lazy_loaded indicates if lazy loaded members were "
97
+ "enabled for that request.",
98
+ labelnames=["type", "lazy_loaded", SERVER_NAME_LABEL],
99
+ )
100
+
101
+ # Store the cache that tracks which lazy-loaded members have been sent to a given
102
+ # client for no more than 30 minutes.
103
+ LAZY_LOADED_MEMBERS_CACHE_MAX_AGE = 30 * 60 * 1000
104
+
105
+ # Remember the last 100 members we sent to a client for the purposes of
106
+ # avoiding redundantly sending the same lazy-loaded members to the client
107
+ LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE = 100
108
+
109
+
110
+ SyncRequestKey = tuple[Any, ...]
111
+
112
+
113
+ @attr.s(slots=True, frozen=True, auto_attribs=True)
114
+ class SyncConfig:
115
+ user: UserID
116
+ filter_collection: FilterCollection
117
+ is_guest: bool
118
+ device_id: str | None
119
+ use_state_after: bool
120
+
121
+
122
+ @attr.s(slots=True, frozen=True, auto_attribs=True)
123
+ class TimelineBatch:
124
+ prev_batch: StreamToken
125
+ events: Sequence[EventBase]
126
+ limited: bool
127
+ # A mapping of event ID to the bundled aggregations for the above events.
128
+ # This is only calculated if limited is true.
129
+ bundled_aggregations: dict[str, BundledAggregations] | None = None
130
+
131
+ def __bool__(self) -> bool:
132
+ """Make the result appear empty if there are no updates. This is used
133
+ to tell if room needs to be part of the sync result.
134
+ """
135
+ return bool(self.events)
136
+
137
+
138
+ # We can't freeze this class, because we need to update it after it's instantiated to
139
+ # update its unread count. This is because we calculate the unread count for a room only
140
+ # if there are updates for it, which we check after the instance has been created.
141
+ # This should not be a big deal because we update the notification counts afterwards as
142
+ # well anyway.
143
+ @attr.s(slots=True, auto_attribs=True)
144
+ class JoinedSyncResult:
145
+ room_id: str
146
+ timeline: TimelineBatch
147
+ state: StateMap[EventBase]
148
+ ephemeral: list[JsonDict]
149
+ account_data: list[JsonDict]
150
+ unread_notifications: JsonDict
151
+ unread_thread_notifications: JsonDict
152
+ summary: JsonDict | None
153
+ unread_count: int
154
+
155
+ def __bool__(self) -> bool:
156
+ """Make the result appear empty if there are no updates. This is used
157
+ to tell if room needs to be part of the sync result.
158
+ """
159
+ return bool(
160
+ self.timeline or self.state or self.ephemeral or self.account_data
161
+ # nb the notification count does not, er, count: if there's nothing
162
+ # else in the result, we don't need to send it.
163
+ )
164
+
165
+
166
+ @attr.s(slots=True, frozen=True, auto_attribs=True)
167
+ class ArchivedSyncResult:
168
+ room_id: str
169
+ timeline: TimelineBatch
170
+ state: StateMap[EventBase]
171
+ account_data: list[JsonDict]
172
+
173
+ def __bool__(self) -> bool:
174
+ """Make the result appear empty if there are no updates. This is used
175
+ to tell if room needs to be part of the sync result.
176
+ """
177
+ return bool(self.timeline or self.state or self.account_data)
178
+
179
+
180
+ @attr.s(slots=True, frozen=True, auto_attribs=True)
181
+ class InvitedSyncResult:
182
+ room_id: str
183
+ invite: EventBase
184
+
185
+ def __bool__(self) -> bool:
186
+ """Invited rooms should always be reported to the client"""
187
+ return True
188
+
189
+
190
+ @attr.s(slots=True, frozen=True, auto_attribs=True)
191
+ class KnockedSyncResult:
192
+ room_id: str
193
+ knock: EventBase
194
+
195
+ def __bool__(self) -> bool:
196
+ """Knocked rooms should always be reported to the client"""
197
+ return True
198
+
199
+
200
+ @attr.s(slots=True, auto_attribs=True)
201
+ class _RoomChanges:
202
+ """The set of room entries to include in the sync, plus the set of joined
203
+ and left room IDs since last sync.
204
+ """
205
+
206
+ room_entries: list["RoomSyncResultBuilder"]
207
+ invited: list[InvitedSyncResult]
208
+ knocked: list[KnockedSyncResult]
209
+ newly_joined_rooms: list[str]
210
+ newly_left_rooms: list[str]
211
+
212
+
213
+ @attr.s(slots=True, frozen=True, auto_attribs=True)
214
+ class SyncResult:
215
+ """
216
+ Attributes:
217
+ next_batch: Token for the next sync
218
+ presence: List of presence events for the user.
219
+ account_data: List of account_data events for the user.
220
+ joined: JoinedSyncResult for each joined room.
221
+ invited: InvitedSyncResult for each invited room.
222
+ knocked: KnockedSyncResult for each knocked on room.
223
+ archived: ArchivedSyncResult for each archived room.
224
+ to_device: List of direct messages for the device.
225
+ device_lists: List of user_ids whose devices have changed
226
+ device_one_time_keys_count: Dict of algorithm to count for one time keys
227
+ for this device
228
+ device_unused_fallback_key_types: List of key types that have an unused fallback
229
+ key
230
+ """
231
+
232
+ next_batch: StreamToken
233
+ presence: list[UserPresenceState]
234
+ account_data: list[JsonDict]
235
+ joined: list[JoinedSyncResult]
236
+ invited: list[InvitedSyncResult]
237
+ knocked: list[KnockedSyncResult]
238
+ archived: list[ArchivedSyncResult]
239
+ to_device: list[JsonDict]
240
+ device_lists: DeviceListUpdates
241
+ device_one_time_keys_count: JsonMapping
242
+ device_unused_fallback_key_types: list[str]
243
+
244
+ def __bool__(self) -> bool:
245
+ """Make the result appear empty if there are no updates. This is used
246
+ to tell if the notifier needs to wait for more events when polling for
247
+ events.
248
+ """
249
+ return bool(
250
+ self.presence
251
+ or self.joined
252
+ or self.invited
253
+ or self.knocked
254
+ or self.archived
255
+ or self.account_data
256
+ or self.to_device
257
+ or self.device_lists
258
+ )
259
+
260
+ @staticmethod
261
+ def empty(
262
+ next_batch: StreamToken,
263
+ device_one_time_keys_count: JsonMapping,
264
+ device_unused_fallback_key_types: list[str],
265
+ ) -> "SyncResult":
266
+ "Return a new empty result"
267
+ return SyncResult(
268
+ next_batch=next_batch,
269
+ presence=[],
270
+ account_data=[],
271
+ joined=[],
272
+ invited=[],
273
+ knocked=[],
274
+ archived=[],
275
+ to_device=[],
276
+ device_lists=DeviceListUpdates(),
277
+ device_one_time_keys_count=device_one_time_keys_count,
278
+ device_unused_fallback_key_types=device_unused_fallback_key_types,
279
+ )
280
+
281
+
282
+ class SyncHandler:
283
+ def __init__(self, hs: "HomeServer"):
284
+ self.server_name = hs.hostname
285
+ self.hs_config = hs.config
286
+ self.store = hs.get_datastores().main
287
+ self.notifier = hs.get_notifier()
288
+ self.presence_handler = hs.get_presence_handler()
289
+ self._relations_handler = hs.get_relations_handler()
290
+ self._push_rules_handler = hs.get_push_rules_handler()
291
+ self.event_sources = hs.get_event_sources()
292
+ self.clock = hs.get_clock()
293
+ self.state = hs.get_state_handler()
294
+ self.auth_blocking = hs.get_auth_blocking()
295
+ self._storage_controllers = hs.get_storage_controllers()
296
+ self._state_storage_controller = self._storage_controllers.state
297
+ self._device_handler = hs.get_device_handler()
298
+ self._task_scheduler = hs.get_task_scheduler()
299
+
300
+ self.should_calculate_push_rules = hs.config.push.enable_push
301
+
302
+ # TODO: flush cache entries on subsequent sync request.
303
+ # Once we get the next /sync request (ie, one with the same access token
304
+ # that sets 'since' to 'next_batch'), we know that device won't need a
305
+ # cached result any more, and we could flush the entry from the cache to save
306
+ # memory.
307
+ self.response_cache: ResponseCache[SyncRequestKey] = ResponseCache(
308
+ clock=hs.get_clock(),
309
+ name="sync",
310
+ server_name=self.server_name,
311
+ timeout_ms=hs.config.caches.sync_response_cache_duration,
312
+ )
313
+
314
+ # ExpiringCache((User, Device)) -> LruCache(user_id => event_id)
315
+ self.lazy_loaded_members_cache: ExpiringCache[
316
+ tuple[str, str | None], LruCache[str, str]
317
+ ] = ExpiringCache(
318
+ cache_name="lazy_loaded_members_cache",
319
+ server_name=self.server_name,
320
+ hs=hs,
321
+ clock=self.clock,
322
+ max_len=0,
323
+ expiry_ms=LAZY_LOADED_MEMBERS_CACHE_MAX_AGE,
324
+ )
325
+
326
+ self.rooms_to_exclude_globally = hs.config.server.rooms_to_exclude_from_sync
327
+
328
+ async def wait_for_sync_for_user(
329
+ self,
330
+ requester: Requester,
331
+ sync_config: SyncConfig,
332
+ request_key: SyncRequestKey,
333
+ since_token: StreamToken | None = None,
334
+ timeout: int = 0,
335
+ full_state: bool = False,
336
+ ) -> SyncResult:
337
+ """Get the sync for a client if we have new data for it now. Otherwise
338
+ wait for new data to arrive on the server. If the timeout expires, then
339
+ return an empty sync result.
340
+
341
+ Args:
342
+ requester: The user requesting the sync response.
343
+ sync_config: Config/info necessary to process the sync request.
344
+ sync_version: Determines what kind of sync response to generate.
345
+ request_key: The key to use for caching the response.
346
+ since_token: The point in the stream to sync from.
347
+ timeout: How long to wait for new data to arrive before giving up.
348
+ full_state: Whether to return the full state for each room.
349
+
350
+ Returns:
351
+ returns a full `SyncResult`.
352
+ """
353
+ # If the user is not part of the mau group, then check that limits have
354
+ # not been exceeded (if not part of the group by this point, almost certain
355
+ # auth_blocking will occur)
356
+ user_id = sync_config.user.to_string()
357
+ await self.auth_blocking.check_auth_blocking(requester=requester)
358
+
359
+ res = await self.response_cache.wrap(
360
+ request_key,
361
+ self._wait_for_sync_for_user,
362
+ sync_config,
363
+ since_token,
364
+ timeout,
365
+ full_state,
366
+ cache_context=True,
367
+ )
368
+ logger.debug("Returning sync response for %s", user_id)
369
+ return res
370
+
371
+ async def _wait_for_sync_for_user(
372
+ self,
373
+ sync_config: SyncConfig,
374
+ since_token: StreamToken | None,
375
+ timeout: int,
376
+ full_state: bool,
377
+ cache_context: ResponseCacheContext[SyncRequestKey],
378
+ ) -> SyncResult:
379
+ """The start of the machinery that produces a /sync response.
380
+
381
+ See https://spec.matrix.org/v1.1/client-server-api/#syncing for full details.
382
+
383
+ This method does high-level bookkeeping:
384
+ - tracking the kind of sync in the logging context
385
+ - deleting any to_device messages whose delivery has been acknowledged.
386
+ - deciding if we should dispatch an instant or delayed response
387
+ - marking the sync as being lazily loaded, if appropriate
388
+
389
+ Computing the body of the response begins in the next method,
390
+ `current_sync_for_user`.
391
+ """
392
+ if since_token is None:
393
+ sync_type = "initial_sync"
394
+ elif full_state:
395
+ sync_type = "full_state_sync"
396
+ else:
397
+ sync_type = "incremental_sync"
398
+
399
+ sync_label = f"sync_v2:{sync_type}"
400
+
401
+ context = current_context()
402
+ if context:
403
+ context.tag = sync_label
404
+
405
+ if since_token is not None:
406
+ # We need to make sure this worker has caught up with the token. If
407
+ # this returns false it means we timed out waiting, and we should
408
+ # just return an empty response.
409
+ start = self.clock.time_msec()
410
+ if not await self.notifier.wait_for_stream_token(since_token):
411
+ logger.warning(
412
+ "Timed out waiting for worker to catch up. Returning empty response"
413
+ )
414
+ device_id = sync_config.device_id
415
+ one_time_keys_count: JsonMapping = {}
416
+ unused_fallback_key_types: list[str] = []
417
+ if device_id:
418
+ user_id = sync_config.user.to_string()
419
+ # TODO: We should have a way to let clients differentiate between the states of:
420
+ # * no change in OTK count since the provided since token
421
+ # * the server has zero OTKs left for this device
422
+ # Spec issue: https://github.com/matrix-org/matrix-doc/issues/3298
423
+ one_time_keys_count = await self.store.count_e2e_one_time_keys(
424
+ user_id, device_id
425
+ )
426
+ unused_fallback_key_types = list(
427
+ await self.store.get_e2e_unused_fallback_key_types(
428
+ user_id, device_id
429
+ )
430
+ )
431
+
432
+ cache_context.should_cache = False # Don't cache empty responses
433
+ return SyncResult.empty(
434
+ since_token, one_time_keys_count, unused_fallback_key_types
435
+ )
436
+
437
+ # If we've spent significant time waiting to catch up, take it off
438
+ # the timeout.
439
+ now = self.clock.time_msec()
440
+ if now - start > 1_000:
441
+ timeout -= now - start
442
+ timeout = max(timeout, 0)
443
+
444
+ # if we have a since token, delete any to-device messages before that token
445
+ # (since we now know that the device has received them)
446
+ if since_token is not None:
447
+ since_stream_id = since_token.to_device_key
448
+ deleted = await self.store.delete_messages_for_device(
449
+ sync_config.user.to_string(),
450
+ sync_config.device_id,
451
+ since_stream_id,
452
+ )
453
+ logger.debug(
454
+ "Deleted %d to-device messages up to %d", deleted, since_stream_id
455
+ )
456
+
457
+ if timeout == 0 or since_token is None or full_state:
458
+ # we are going to return immediately, so don't bother calling
459
+ # notifier.wait_for_events.
460
+ result = await self.current_sync_for_user(
461
+ sync_config, since_token, full_state=full_state
462
+ )
463
+ else:
464
+ # Otherwise, we wait for something to happen and report it to the user.
465
+ async def current_sync_callback(
466
+ before_token: StreamToken, after_token: StreamToken
467
+ ) -> SyncResult:
468
+ return await self.current_sync_for_user(sync_config, since_token)
469
+
470
+ result = await self.notifier.wait_for_events(
471
+ sync_config.user.to_string(),
472
+ timeout,
473
+ current_sync_callback,
474
+ from_token=since_token,
475
+ )
476
+
477
+ # if nothing has happened in any of the users' rooms since /sync was called,
478
+ # the resultant next_batch will be the same as since_token (since the result
479
+ # is generated when wait_for_events is first called, and not regenerated
480
+ # when wait_for_events times out).
481
+ #
482
+ # If that happens, we mustn't cache it, so that when the client comes back
483
+ # with the same cache token, we don't immediately return the same empty
484
+ # result, causing a tightloop. (https://github.com/matrix-org/synapse/issues/8518)
485
+ if result.next_batch == since_token:
486
+ cache_context.should_cache = False
487
+
488
+ if result:
489
+ if sync_config.filter_collection.lazy_load_members():
490
+ lazy_loaded = "true"
491
+ else:
492
+ lazy_loaded = "false"
493
+ non_empty_sync_counter.labels(
494
+ type=sync_label,
495
+ lazy_loaded=lazy_loaded,
496
+ **{SERVER_NAME_LABEL: self.server_name},
497
+ ).inc()
498
+
499
+ return result
500
+
501
+ async def current_sync_for_user(
502
+ self,
503
+ sync_config: SyncConfig,
504
+ since_token: StreamToken | None = None,
505
+ full_state: bool = False,
506
+ ) -> SyncResult:
507
+ """
508
+ Generates the response body of a sync result, represented as a
509
+ `SyncResult`.
510
+
511
+ This is a wrapper around `generate_sync_result` which starts an open tracing
512
+ span to track the sync. See `generate_sync_result` for the next part of your
513
+ indoctrination.
514
+
515
+ Args:
516
+ sync_config: Config/info necessary to process the sync request.
517
+ sync_version: Determines what kind of sync response to generate.
518
+ since_token: The point in the stream to sync from.p.
519
+ full_state: Whether to return the full state for each room.
520
+
521
+ Returns:
522
+ returns a full `SyncResult`.
523
+ """
524
+ with start_active_span("sync.current_sync_for_user"):
525
+ log_kv({"since_token": since_token})
526
+
527
+ # Go through the `/sync` v2 path
528
+ sync_result = await self.generate_sync_result(
529
+ sync_config, since_token, full_state
530
+ )
531
+
532
+ set_tag(SynapseTags.SYNC_RESULT, bool(sync_result))
533
+ return sync_result
534
+
535
+ async def ephemeral_by_room(
536
+ self,
537
+ sync_result_builder: "SyncResultBuilder",
538
+ now_token: StreamToken,
539
+ since_token: StreamToken | None = None,
540
+ ) -> tuple[StreamToken, dict[str, list[JsonDict]]]:
541
+ """Get the ephemeral events for each room the user is in
542
+ Args:
543
+ sync_result_builder
544
+ now_token: Where the server is currently up to.
545
+ since_token: Where the server was when the client
546
+ last synced.
547
+ Returns:
548
+ A tuple of the now StreamToken, updated to reflect the which typing
549
+ events are included, and a dict mapping from room_id to a list of
550
+ ephemeral events for that room.
551
+ """
552
+
553
+ sync_config = sync_result_builder.sync_config
554
+
555
+ with Measure(
556
+ self.clock, name="ephemeral_by_room", server_name=self.server_name
557
+ ):
558
+ typing_key = since_token.typing_key if since_token else 0
559
+
560
+ room_ids = sync_result_builder.joined_room_ids
561
+
562
+ typing_source = self.event_sources.sources.typing
563
+ typing, typing_key = await typing_source.get_new_events(
564
+ user=sync_config.user,
565
+ from_key=typing_key,
566
+ limit=sync_config.filter_collection.ephemeral_limit(),
567
+ room_ids=room_ids,
568
+ is_guest=sync_config.is_guest,
569
+ )
570
+ now_token = now_token.copy_and_replace(StreamKeyType.TYPING, typing_key)
571
+
572
+ ephemeral_by_room: JsonDict = {}
573
+
574
+ for event in typing:
575
+ room_id = event["room_id"]
576
+ ephemeral_by_room.setdefault(room_id, []).append(event)
577
+
578
+ receipt_key = (
579
+ since_token.receipt_key
580
+ if since_token
581
+ else MultiWriterStreamToken(stream=0)
582
+ )
583
+
584
+ receipt_source = self.event_sources.sources.receipt
585
+ receipts, receipt_key = await receipt_source.get_new_events(
586
+ user=sync_config.user,
587
+ from_key=receipt_key,
588
+ limit=sync_config.filter_collection.ephemeral_limit(),
589
+ room_ids=room_ids,
590
+ is_guest=sync_config.is_guest,
591
+ )
592
+ now_token = now_token.copy_and_replace(StreamKeyType.RECEIPT, receipt_key)
593
+
594
+ for event in receipts:
595
+ room_id = event["room_id"]
596
+ ephemeral_by_room.setdefault(room_id, []).append(event)
597
+
598
+ return now_token, ephemeral_by_room
599
+
600
+ async def _load_filtered_recents(
601
+ self,
602
+ room_id: str,
603
+ sync_result_builder: "SyncResultBuilder",
604
+ sync_config: SyncConfig,
605
+ upto_token: StreamToken,
606
+ since_token: StreamToken | None = None,
607
+ potential_recents: list[EventBase] | None = None,
608
+ newly_joined_room: bool = False,
609
+ ) -> TimelineBatch:
610
+ """Create a timeline batch for the room
611
+
612
+ Args:
613
+ room_id
614
+ sync_result_builder
615
+ sync_config
616
+ upto_token: The token up to which we should fetch (more) events.
617
+ If `potential_results` is non-empty then this is *start* of
618
+ the the list.
619
+ since_token
620
+ potential_recents: If non-empty, the events between the since token
621
+ and current token to send down to clients.
622
+ newly_joined_room
623
+ """
624
+ with Measure(
625
+ self.clock, name="load_filtered_recents", server_name=self.server_name
626
+ ):
627
+ timeline_limit = sync_config.filter_collection.timeline_limit()
628
+ block_all_timeline = (
629
+ sync_config.filter_collection.blocks_all_room_timeline()
630
+ )
631
+
632
+ if (
633
+ potential_recents is None
634
+ or newly_joined_room
635
+ or timeline_limit < len(potential_recents)
636
+ ):
637
+ limited = True
638
+ else:
639
+ limited = False
640
+
641
+ # Check if there is a gap, if so we need to mark this as limited and
642
+ # recalculate which events to send down.
643
+ gap_token = await self.store.get_timeline_gaps(
644
+ room_id,
645
+ since_token.room_key if since_token else None,
646
+ sync_result_builder.now_token.room_key,
647
+ )
648
+ if gap_token:
649
+ # There's a gap, so we need to ignore the passed in
650
+ # `potential_recents`, and reset `upto_token` to match.
651
+ potential_recents = None
652
+ upto_token = sync_result_builder.now_token
653
+ limited = True
654
+
655
+ log_kv({"limited": limited})
656
+
657
+ if potential_recents:
658
+ recents = await sync_config.filter_collection.filter_room_timeline(
659
+ potential_recents
660
+ )
661
+ log_kv({"recents_after_sync_filtering": len(recents)})
662
+
663
+ # We check if there are any state events, if there are then we pass
664
+ # all current state events to the filter_events function. This is to
665
+ # ensure that we always include current state in the timeline
666
+ current_state_ids: frozenset[str] = frozenset()
667
+ if any(e.is_state() for e in recents):
668
+ # FIXME(faster_joins): We use the partial state here as
669
+ # we don't want to block `/sync` on finishing a lazy join.
670
+ # Which should be fine once
671
+ # https://github.com/matrix-org/synapse/issues/12989 is resolved,
672
+ # since we shouldn't reach here anymore?
673
+ # Note that we use the current state as a whitelist for filtering
674
+ # `recents`, so partial state is only a problem when a membership
675
+ # event turns up in `recents` but has not made it into the current
676
+ # state.
677
+ current_state_ids = (
678
+ await self.store.check_if_events_in_current_state(
679
+ {e.event_id for e in recents if e.is_state()}
680
+ )
681
+ )
682
+
683
+ recents = await filter_events_for_client(
684
+ self._storage_controllers,
685
+ sync_config.user.to_string(),
686
+ recents,
687
+ always_include_ids=current_state_ids,
688
+ )
689
+ log_kv({"recents_after_visibility_filtering": len(recents)})
690
+ else:
691
+ recents = []
692
+
693
+ if not limited or block_all_timeline:
694
+ prev_batch_token = upto_token
695
+ if recents:
696
+ assert recents[0].internal_metadata.stream_ordering
697
+ room_key = RoomStreamToken(
698
+ stream=recents[0].internal_metadata.stream_ordering - 1
699
+ )
700
+ prev_batch_token = upto_token.copy_and_replace(
701
+ StreamKeyType.ROOM, room_key
702
+ )
703
+
704
+ return TimelineBatch(
705
+ events=recents, prev_batch=prev_batch_token, limited=False
706
+ )
707
+
708
+ filtering_factor = 2
709
+ load_limit = max(timeline_limit * filtering_factor, 10)
710
+ max_repeat = 5 # Only try a few times per room, otherwise
711
+ room_key = upto_token.room_key
712
+ end_key = room_key
713
+
714
+ since_key = None
715
+ if since_token and gap_token:
716
+ # If there is a gap then we need to only include events after
717
+ # it.
718
+ since_key = gap_token
719
+ elif since_token and not newly_joined_room:
720
+ since_key = since_token.room_key
721
+
722
+ while limited and len(recents) < timeline_limit and max_repeat:
723
+ # For initial `/sync`, we want to view a historical section of the
724
+ # timeline; to fetch events by `topological_ordering` (best
725
+ # representation of the room DAG as others were seeing it at the time).
726
+ # This also aligns with the order that `/messages` returns events in.
727
+ #
728
+ # For incremental `/sync`, we want to get all updates for rooms since
729
+ # the last `/sync` (regardless if those updates arrived late or happened
730
+ # a while ago in the past); to fetch events by `stream_ordering` (in the
731
+ # order they were received by the server).
732
+ #
733
+ # Relevant spec issue: https://github.com/matrix-org/matrix-spec/issues/1917
734
+ #
735
+ # FIXME: Using workaround for mypy,
736
+ # https://github.com/python/mypy/issues/10740#issuecomment-1997047277 and
737
+ # https://github.com/python/mypy/issues/17479
738
+ paginate_room_events_by_topological_ordering: PaginateFunction = (
739
+ self.store.paginate_room_events_by_topological_ordering
740
+ )
741
+ paginate_room_events_by_stream_ordering: PaginateFunction = (
742
+ self.store.paginate_room_events_by_stream_ordering
743
+ )
744
+ pagination_method: PaginateFunction = (
745
+ # Use `topographical_ordering` for historical events
746
+ paginate_room_events_by_topological_ordering
747
+ if since_key is None
748
+ # Use `stream_ordering` for updates
749
+ else paginate_room_events_by_stream_ordering
750
+ )
751
+ events, end_key, limited = await pagination_method(
752
+ room_id=room_id,
753
+ # The bounds are reversed so we can paginate backwards
754
+ # (from newer to older events) starting at to_bound.
755
+ # This ensures we fill the `limit` with the newest events first,
756
+ from_key=end_key,
757
+ to_key=since_key,
758
+ direction=Direction.BACKWARDS,
759
+ limit=load_limit,
760
+ )
761
+ # We want to return the events in ascending order (the last event is the
762
+ # most recent).
763
+ events.reverse()
764
+
765
+ log_kv({"loaded_recents": len(events)})
766
+
767
+ loaded_recents = (
768
+ await sync_config.filter_collection.filter_room_timeline(events)
769
+ )
770
+
771
+ log_kv({"loaded_recents_after_sync_filtering": len(loaded_recents)})
772
+
773
+ # We check if there are any state events, if there are then we pass
774
+ # all current state events to the filter_events function. This is to
775
+ # ensure that we always include current state in the timeline
776
+ current_state_ids = frozenset()
777
+ if any(e.is_state() for e in loaded_recents):
778
+ # FIXME(faster_joins): We use the partial state here as
779
+ # we don't want to block `/sync` on finishing a lazy join.
780
+ # Which should be fine once
781
+ # https://github.com/matrix-org/synapse/issues/12989 is resolved,
782
+ # since we shouldn't reach here anymore?
783
+ # Note that we use the current state as a whitelist for filtering
784
+ # `loaded_recents`, so partial state is only a problem when a
785
+ # membership event turns up in `loaded_recents` but has not made it
786
+ # into the current state.
787
+ current_state_ids = (
788
+ await self.store.check_if_events_in_current_state(
789
+ {e.event_id for e in loaded_recents if e.is_state()}
790
+ )
791
+ )
792
+
793
+ filtered_recents = await filter_events_for_client(
794
+ self._storage_controllers,
795
+ sync_config.user.to_string(),
796
+ loaded_recents,
797
+ always_include_ids=current_state_ids,
798
+ )
799
+
800
+ loaded_recents = []
801
+ for event in filtered_recents:
802
+ if event.type == EventTypes.CallInvite:
803
+ room_info = await self.store.get_room_with_stats(event.room_id)
804
+ assert room_info is not None
805
+ if room_info.join_rules == JoinRules.PUBLIC:
806
+ continue
807
+ loaded_recents.append(event)
808
+
809
+ log_kv({"loaded_recents_after_client_filtering": len(loaded_recents)})
810
+
811
+ loaded_recents.extend(recents)
812
+ recents = loaded_recents
813
+
814
+ max_repeat -= 1
815
+
816
+ if len(recents) > timeline_limit:
817
+ limited = True
818
+ recents = recents[-timeline_limit:]
819
+ assert recents[0].internal_metadata.stream_ordering
820
+ room_key = RoomStreamToken(
821
+ stream=recents[0].internal_metadata.stream_ordering - 1
822
+ )
823
+
824
+ prev_batch_token = upto_token.copy_and_replace(StreamKeyType.ROOM, room_key)
825
+
826
+ # Don't bother to bundle aggregations if the timeline is unlimited,
827
+ # as clients will have all the necessary information.
828
+ bundled_aggregations = None
829
+ if limited or newly_joined_room:
830
+ bundled_aggregations = (
831
+ await self._relations_handler.get_bundled_aggregations(
832
+ recents, sync_config.user.to_string()
833
+ )
834
+ )
835
+
836
+ return TimelineBatch(
837
+ events=recents,
838
+ prev_batch=prev_batch_token,
839
+ # Also mark as limited if this is a new room or there has been a gap
840
+ # (to force client to paginate the gap).
841
+ limited=limited or newly_joined_room or gap_token is not None,
842
+ bundled_aggregations=bundled_aggregations,
843
+ )
844
+
845
+ async def compute_summary(
846
+ self,
847
+ room_id: str,
848
+ sync_config: SyncConfig,
849
+ batch: TimelineBatch,
850
+ state: MutableStateMap[EventBase],
851
+ now_token: StreamToken,
852
+ ) -> JsonDict | None:
853
+ """Works out a room summary block for this room, summarising the number
854
+ of joined members in the room, and providing the 'hero' members if the
855
+ room has no name so clients can consistently name rooms. Also adds
856
+ state events to 'state' if needed to describe the heroes.
857
+
858
+ Args
859
+ room_id
860
+ sync_config
861
+ batch: The timeline batch for the room that will be sent to the user.
862
+ state: State as returned by compute_state_delta
863
+ now_token: Token of the end of the current batch.
864
+ """
865
+
866
+ # FIXME: we could/should get this from room_stats when matthew/stats lands
867
+
868
+ # FIXME: this promulgates https://github.com/matrix-org/synapse/issues/3305
869
+ last_events, _ = await self.store.get_recent_event_ids_for_room(
870
+ room_id, end_token=now_token.room_key, limit=1
871
+ )
872
+
873
+ if not last_events:
874
+ return None
875
+
876
+ last_event = last_events[-1]
877
+ state_ids = await self._state_storage_controller.get_state_ids_for_event(
878
+ last_event.event_id,
879
+ state_filter=StateFilter.from_types(
880
+ [(EventTypes.Name, ""), (EventTypes.CanonicalAlias, "")]
881
+ ),
882
+ )
883
+
884
+ # this is heavily cached, thus: fast.
885
+ details = await self.store.get_room_summary(room_id)
886
+
887
+ name_id = state_ids.get((EventTypes.Name, ""))
888
+ canonical_alias_id = state_ids.get((EventTypes.CanonicalAlias, ""))
889
+
890
+ summary: JsonDict = {}
891
+ empty_ms = MemberSummary([], 0)
892
+
893
+ # TODO: only send these when they change.
894
+ summary["m.joined_member_count"] = details.get(Membership.JOIN, empty_ms).count
895
+ summary["m.invited_member_count"] = details.get(
896
+ Membership.INVITE, empty_ms
897
+ ).count
898
+
899
+ # if the room has a name or canonical_alias set, we can skip
900
+ # calculating heroes. Empty strings are falsey, so we check
901
+ # for the "name" value and default to an empty string.
902
+ if name_id:
903
+ name = await self.store.get_event(name_id, allow_none=True)
904
+ if name and name.content.get("name"):
905
+ return summary
906
+
907
+ if canonical_alias_id:
908
+ canonical_alias = await self.store.get_event(
909
+ canonical_alias_id, allow_none=True
910
+ )
911
+ if canonical_alias and canonical_alias.content.get("alias"):
912
+ return summary
913
+
914
+ # FIXME: only build up a member_ids list for our heroes
915
+ member_ids = {}
916
+ for membership in (
917
+ Membership.JOIN,
918
+ Membership.INVITE,
919
+ Membership.LEAVE,
920
+ Membership.BAN,
921
+ ):
922
+ for user_id, event_id in details.get(membership, empty_ms).members:
923
+ member_ids[user_id] = event_id
924
+
925
+ me = sync_config.user.to_string()
926
+ summary["m.heroes"] = extract_heroes_from_room_summary(details, me)
927
+
928
+ if not sync_config.filter_collection.lazy_load_members():
929
+ return summary
930
+
931
+ # ensure we send membership events for heroes if needed
932
+ cache_key = (sync_config.user.to_string(), sync_config.device_id)
933
+ cache = self.get_lazy_loaded_members_cache(cache_key)
934
+
935
+ # track which members the client should already know about via LL:
936
+ # Ones which are already in state...
937
+ existing_members = {
938
+ user_id for (typ, user_id) in state.keys() if typ == EventTypes.Member
939
+ }
940
+
941
+ # ...or ones which are in the timeline...
942
+ for ev in batch.events:
943
+ if ev.type == EventTypes.Member:
944
+ existing_members.add(ev.state_key)
945
+
946
+ # ...and then ensure any missing ones get included in state.
947
+ missing_hero_event_ids = [
948
+ member_ids[hero_id]
949
+ for hero_id in summary["m.heroes"]
950
+ if (
951
+ cache.get(hero_id) != member_ids[hero_id]
952
+ and hero_id not in existing_members
953
+ )
954
+ ]
955
+
956
+ missing_hero_state = await self.store.get_events(missing_hero_event_ids)
957
+
958
+ for s in missing_hero_state.values():
959
+ cache.set(s.state_key, s.event_id)
960
+ state[(EventTypes.Member, s.state_key)] = s
961
+
962
+ return summary
963
+
964
+ def get_lazy_loaded_members_cache(
965
+ self, cache_key: tuple[str, str | None]
966
+ ) -> LruCache[str, str]:
967
+ cache: LruCache[str, str] | None = self.lazy_loaded_members_cache.get(cache_key)
968
+ if cache is None:
969
+ logger.debug("creating LruCache for %r", cache_key)
970
+ cache = LruCache(
971
+ max_size=LAZY_LOADED_MEMBERS_CACHE_MAX_SIZE,
972
+ clock=self.clock,
973
+ server_name=self.server_name,
974
+ )
975
+ self.lazy_loaded_members_cache[cache_key] = cache
976
+ else:
977
+ logger.debug("found LruCache for %r", cache_key)
978
+ return cache
979
+
980
+ async def compute_state_delta(
981
+ self,
982
+ room_id: str,
983
+ batch: TimelineBatch,
984
+ sync_config: SyncConfig,
985
+ since_token: StreamToken | None,
986
+ end_token: StreamToken,
987
+ full_state: bool,
988
+ joined: bool,
989
+ ) -> MutableStateMap[EventBase]:
990
+ """Works out the difference in state between the end of the previous sync and
991
+ the start of the timeline.
992
+
993
+ Args:
994
+ room_id:
995
+ batch: The timeline batch for the room that will be sent to the user.
996
+ sync_config:
997
+ since_token: Token of the end of the previous batch. May be `None`.
998
+ end_token: Token of the end of the current batch. Normally this will be
999
+ the same as the global "now_token", but if the user has left the room,
1000
+ the point just after their leave event.
1001
+ full_state: Whether to force returning the full state.
1002
+ `lazy_load_members` still applies when `full_state` is `True`.
1003
+ joined: whether the user is currently joined to the room
1004
+
1005
+ Returns:
1006
+ The state to return in the sync response for the room.
1007
+
1008
+ Clients will overlay this onto the state at the end of the previous sync to
1009
+ arrive at the state at the start of the timeline.
1010
+
1011
+ Clients will then overlay state events in the timeline to arrive at the
1012
+ state at the end of the timeline, in preparation for the next sync.
1013
+ """
1014
+ # TODO(mjark) Check if the state events were received by the server
1015
+ # after the previous sync, since we need to include those state
1016
+ # updates even if they occurred logically before the previous event.
1017
+ # TODO(mjark) Check for new redactions in the state events.
1018
+
1019
+ with Measure(
1020
+ self.clock, name="compute_state_delta", server_name=self.server_name
1021
+ ):
1022
+ # The memberships needed for events in the timeline.
1023
+ # Only calculated when `lazy_load_members` is on.
1024
+ members_to_fetch: set[str] | None = None
1025
+
1026
+ # A dictionary mapping user IDs to the first event in the timeline sent by
1027
+ # them. Only calculated when `lazy_load_members` is on.
1028
+ first_event_by_sender_map: dict[str, EventBase] | None = None
1029
+
1030
+ # The contribution to the room state from state events in the timeline.
1031
+ # Only contains the last event for any given state key.
1032
+ timeline_state: StateMap[str]
1033
+
1034
+ lazy_load_members = sync_config.filter_collection.lazy_load_members()
1035
+ include_redundant_members = (
1036
+ sync_config.filter_collection.include_redundant_members()
1037
+ )
1038
+
1039
+ if lazy_load_members:
1040
+ # We only request state for the members needed to display the
1041
+ # timeline:
1042
+
1043
+ timeline_state = {}
1044
+
1045
+ # Membership events to fetch that can be found in the room state, or in
1046
+ # the case of partial state rooms, the auth events of timeline events.
1047
+ members_to_fetch = set()
1048
+ first_event_by_sender_map = {}
1049
+ for event in batch.events:
1050
+ # Build the map from user IDs to the first timeline event they sent.
1051
+ if event.sender not in first_event_by_sender_map:
1052
+ first_event_by_sender_map[event.sender] = event
1053
+
1054
+ # We need the event's sender, unless their membership was in a
1055
+ # previous timeline event.
1056
+ if (EventTypes.Member, event.sender) not in timeline_state:
1057
+ members_to_fetch.add(event.sender)
1058
+ # FIXME: we also care about invite targets etc.
1059
+
1060
+ if event.is_state():
1061
+ timeline_state[(event.type, event.state_key)] = event.event_id
1062
+
1063
+ else:
1064
+ timeline_state = {
1065
+ (event.type, event.state_key): event.event_id
1066
+ for event in batch.events
1067
+ if event.is_state()
1068
+ }
1069
+
1070
+ # Now calculate the state to return in the sync response for the room.
1071
+ # This is more or less the change in state between the end of the previous
1072
+ # sync's timeline and the start of the current sync's timeline.
1073
+ # See the docstring above for details.
1074
+ state_ids: StateMap[str]
1075
+ # We need to know whether the state we fetch may be partial, so check
1076
+ # whether the room is partial stated *before* fetching it.
1077
+ is_partial_state_room = await self.store.is_partial_state_room(room_id)
1078
+ if full_state:
1079
+ state_ids = await self._compute_state_delta_for_full_sync(
1080
+ room_id,
1081
+ sync_config,
1082
+ batch,
1083
+ end_token,
1084
+ members_to_fetch,
1085
+ timeline_state,
1086
+ joined,
1087
+ )
1088
+ else:
1089
+ # If this is an initial sync then full_state should be set, and
1090
+ # that case is handled above. We assert here to ensure that this
1091
+ # is indeed the case.
1092
+ assert since_token is not None
1093
+
1094
+ state_ids = await self._compute_state_delta_for_incremental_sync(
1095
+ room_id,
1096
+ sync_config,
1097
+ batch,
1098
+ since_token,
1099
+ end_token,
1100
+ members_to_fetch,
1101
+ timeline_state,
1102
+ )
1103
+
1104
+ # If we only have partial state for the room, `state_ids` may be missing the
1105
+ # memberships we wanted. We attempt to find some by digging through the auth
1106
+ # events of timeline events.
1107
+ if lazy_load_members and is_partial_state_room:
1108
+ assert members_to_fetch is not None
1109
+ assert first_event_by_sender_map is not None
1110
+
1111
+ additional_state_ids = (
1112
+ await self._find_missing_partial_state_memberships(
1113
+ room_id, members_to_fetch, first_event_by_sender_map, state_ids
1114
+ )
1115
+ )
1116
+ state_ids = {**state_ids, **additional_state_ids}
1117
+
1118
+ # At this point, if `lazy_load_members` is enabled, `state_ids` includes
1119
+ # the memberships of all event senders in the timeline. This is because we
1120
+ # may not have sent the memberships in a previous sync.
1121
+
1122
+ # When `include_redundant_members` is on, we send all the lazy-loaded
1123
+ # memberships of event senders. Otherwise we make an effort to limit the set
1124
+ # of memberships we send to those that we have not already sent to this client.
1125
+ if lazy_load_members and not include_redundant_members:
1126
+ cache_key = (sync_config.user.to_string(), sync_config.device_id)
1127
+ cache = self.get_lazy_loaded_members_cache(cache_key)
1128
+
1129
+ # if it's a new sync sequence, then assume the client has had
1130
+ # amnesia and doesn't want any recent lazy-loaded members
1131
+ # de-duplicated.
1132
+ if since_token is None:
1133
+ logger.debug("clearing LruCache for %r", cache_key)
1134
+ cache.clear()
1135
+ else:
1136
+ # only send members which aren't in our LruCache (either
1137
+ # because they're new to this client or have been pushed out
1138
+ # of the cache)
1139
+ logger.debug("filtering state from %r...", state_ids)
1140
+ state_ids = {
1141
+ t: event_id
1142
+ for t, event_id in state_ids.items()
1143
+ if cache.get(t[1]) != event_id
1144
+ }
1145
+ logger.debug("...to %r", state_ids)
1146
+
1147
+ # add any member IDs we are about to send into our LruCache
1148
+ for t, event_id in itertools.chain(
1149
+ state_ids.items(), timeline_state.items()
1150
+ ):
1151
+ if t[0] == EventTypes.Member:
1152
+ cache.set(t[1], event_id)
1153
+
1154
+ state: dict[str, EventBase] = {}
1155
+ if state_ids:
1156
+ state = await self.store.get_events(list(state_ids.values()))
1157
+
1158
+ return {
1159
+ (e.type, e.state_key): e
1160
+ for e in await sync_config.filter_collection.filter_room_state(
1161
+ list(state.values())
1162
+ )
1163
+ if e.type != EventTypes.Aliases # until MSC2261 or alternative solution
1164
+ }
1165
+
1166
+ async def _compute_state_delta_for_full_sync(
1167
+ self,
1168
+ room_id: str,
1169
+ sync_config: SyncConfig,
1170
+ batch: TimelineBatch,
1171
+ end_token: StreamToken,
1172
+ members_to_fetch: set[str] | None,
1173
+ timeline_state: StateMap[str],
1174
+ joined: bool,
1175
+ ) -> StateMap[str]:
1176
+ """Calculate the state events to be included in a full sync response.
1177
+
1178
+ As with `_compute_state_delta_for_incremental_sync`, the result will include
1179
+ the membership events for the senders of each event in `members_to_fetch`.
1180
+
1181
+ Note that whether this returns the state at the start or the end of the
1182
+ batch depends on `sync_config.use_state_after` (c.f. MSC4222).
1183
+
1184
+ Args:
1185
+ room_id: The room we are calculating for.
1186
+ sync_confg: The user that is calling `/sync`.
1187
+ batch: The timeline batch for the room that will be sent to the user.
1188
+ end_token: Token of the end of the current batch. Normally this will be
1189
+ the same as the global "now_token", but if the user has left the room,
1190
+ the point just after their leave event.
1191
+ members_to_fetch: If lazy-loading is enabled, the memberships needed for
1192
+ events in the timeline.
1193
+ timeline_state: The contribution to the room state from state events in
1194
+ `batch`. Only contains the last event for any given state key.
1195
+ joined: whether the user is currently joined to the room
1196
+
1197
+ Returns:
1198
+ A map from (type, state_key) to event_id, for each event that we believe
1199
+ should be included in the `state` or `state_after` part of the sync response.
1200
+ """
1201
+ if members_to_fetch is not None:
1202
+ # Lazy-loading of membership events is enabled.
1203
+ #
1204
+ # Always make sure we load our own membership event so we know if
1205
+ # we're in the room, to fix https://github.com/vector-im/riot-web/issues/7209.
1206
+ #
1207
+ # We only need apply this on full state syncs given we disabled
1208
+ # LL for incr syncs in https://github.com/matrix-org/synapse/pull/3840.
1209
+ #
1210
+ # We don't insert ourselves into `members_to_fetch`, because in some
1211
+ # rare cases (an empty event batch with a now_token after the user's
1212
+ # leave in a partial state room which another local user has
1213
+ # joined), the room state will be missing our membership and there
1214
+ # is no guarantee that our membership will be in the auth events of
1215
+ # timeline events when the room is partial stated.
1216
+ state_filter = StateFilter.from_lazy_load_member_list(
1217
+ members_to_fetch.union((sync_config.user.to_string(),))
1218
+ )
1219
+
1220
+ # We are happy to use partial state to compute the `/sync` response.
1221
+ # Since partial state may not include the lazy-loaded memberships we
1222
+ # require, we fix up the state response afterwards with memberships from
1223
+ # auth events.
1224
+ await_full_state = False
1225
+ lazy_load_members = True
1226
+ else:
1227
+ state_filter = StateFilter.all()
1228
+ await_full_state = True
1229
+ lazy_load_members = False
1230
+
1231
+ # Check if we are wanting to return the state at the start or end of the
1232
+ # timeline. If at the end we can just use the current state.
1233
+ if sync_config.use_state_after:
1234
+ # If we're getting the state at the end of the timeline, we can just
1235
+ # use the current state of the room (and roll back any changes
1236
+ # between when we fetched the current state and `end_token`).
1237
+ #
1238
+ # For rooms we're not joined to, there might be a very large number
1239
+ # of deltas between `end_token` and "now", and so instead we fetch
1240
+ # the state at the end of the timeline.
1241
+ if joined:
1242
+ state_ids = await self._state_storage_controller.get_current_state_ids(
1243
+ room_id,
1244
+ state_filter=state_filter,
1245
+ await_full_state=await_full_state,
1246
+ )
1247
+
1248
+ # Now roll back the state by looking at the state deltas between
1249
+ # end_token and now.
1250
+ deltas = await self.store.get_current_state_deltas_for_room(
1251
+ room_id,
1252
+ from_token=end_token.room_key,
1253
+ to_token=self.store.get_room_max_token(),
1254
+ )
1255
+ if deltas:
1256
+ mutable_state_ids = dict(state_ids)
1257
+
1258
+ # We iterate over the deltas backwards so that if there are
1259
+ # multiple changes of the same type/state_key we'll
1260
+ # correctly pick the earliest delta.
1261
+ for delta in reversed(deltas):
1262
+ if delta.prev_event_id:
1263
+ mutable_state_ids[(delta.event_type, delta.state_key)] = (
1264
+ delta.prev_event_id
1265
+ )
1266
+ elif (delta.event_type, delta.state_key) in mutable_state_ids:
1267
+ mutable_state_ids.pop((delta.event_type, delta.state_key))
1268
+
1269
+ state_ids = mutable_state_ids
1270
+
1271
+ return state_ids
1272
+
1273
+ else:
1274
+ # Just use state groups to get the state at the end of the
1275
+ # timeline, i.e. the state at the leave/etc event.
1276
+ state_at_timeline_end = (
1277
+ await self._state_storage_controller.get_state_ids_at(
1278
+ room_id,
1279
+ stream_position=end_token,
1280
+ state_filter=state_filter,
1281
+ await_full_state=await_full_state,
1282
+ )
1283
+ )
1284
+ return state_at_timeline_end
1285
+
1286
+ state_at_timeline_end = await self._state_storage_controller.get_state_ids_at(
1287
+ room_id,
1288
+ stream_position=end_token,
1289
+ state_filter=state_filter,
1290
+ await_full_state=await_full_state,
1291
+ )
1292
+
1293
+ if batch:
1294
+ # Strictly speaking, this returns the state *after* the first event in the
1295
+ # timeline, but that is good enough here.
1296
+ state_at_timeline_start = (
1297
+ await self._state_storage_controller.get_state_ids_for_event(
1298
+ batch.events[0].event_id,
1299
+ state_filter=state_filter,
1300
+ await_full_state=await_full_state,
1301
+ )
1302
+ )
1303
+ else:
1304
+ state_at_timeline_start = state_at_timeline_end
1305
+
1306
+ state_ids = _calculate_state(
1307
+ timeline_contains=timeline_state,
1308
+ timeline_start=state_at_timeline_start,
1309
+ timeline_end=state_at_timeline_end,
1310
+ previous_timeline_end={},
1311
+ lazy_load_members=lazy_load_members,
1312
+ )
1313
+ return state_ids
1314
+
1315
+ async def _compute_state_delta_for_incremental_sync(
1316
+ self,
1317
+ room_id: str,
1318
+ sync_config: SyncConfig,
1319
+ batch: TimelineBatch,
1320
+ since_token: StreamToken,
1321
+ end_token: StreamToken,
1322
+ members_to_fetch: set[str] | None,
1323
+ timeline_state: StateMap[str],
1324
+ ) -> StateMap[str]:
1325
+ """Calculate the state events to be included in an incremental sync response.
1326
+
1327
+ If lazy-loading of membership events is enabled (as indicated by
1328
+ `members_to_fetch` being not-`None`), the result will include the membership
1329
+ events for each member in `members_to_fetch`. The caller
1330
+ (`compute_state_delta`) is responsible for keeping track of which membership
1331
+ events we have already sent to the client, and hence ripping them out.
1332
+
1333
+ Note that whether this returns the state at the start or the end of the
1334
+ batch depends on `sync_config.use_state_after` (c.f. MSC4222).
1335
+
1336
+ Args:
1337
+ room_id: The room we are calculating for.
1338
+ sync_config
1339
+ batch: The timeline batch for the room that will be sent to the user.
1340
+ since_token: Token of the end of the previous batch.
1341
+ end_token: Token of the end of the current batch. Normally this will be
1342
+ the same as the global "now_token", but if the user has left the room,
1343
+ the point just after their leave event.
1344
+ members_to_fetch: If lazy-loading is enabled, the memberships needed for
1345
+ events in the timeline. Otherwise, `None`.
1346
+ timeline_state: The contribution to the room state from state events in
1347
+ `batch`. Only contains the last event for any given state key.
1348
+
1349
+ Returns:
1350
+ A map from (type, state_key) to event_id, for each event that we believe
1351
+ should be included in the `state` or `state_after` part of the sync response.
1352
+ """
1353
+ if members_to_fetch is not None:
1354
+ # Lazy-loading is enabled. Only return the state that is needed.
1355
+ state_filter = StateFilter.from_lazy_load_member_list(members_to_fetch)
1356
+ await_full_state = False
1357
+ lazy_load_members = True
1358
+ else:
1359
+ state_filter = StateFilter.all()
1360
+ await_full_state = True
1361
+ lazy_load_members = False
1362
+
1363
+ # Check if we are wanting to return the state at the start or end of the
1364
+ # timeline. If at the end we can just use the current state delta stream.
1365
+ if sync_config.use_state_after:
1366
+ delta_state_ids: MutableStateMap[str] = {}
1367
+
1368
+ if members_to_fetch:
1369
+ # We're lazy-loading, so the client might need some more member
1370
+ # events to understand the events in this timeline. So we always
1371
+ # fish out all the member events corresponding to the timeline
1372
+ # here. The caller will then dedupe any redundant ones.
1373
+ member_ids = await self._state_storage_controller.get_current_state_ids(
1374
+ room_id=room_id,
1375
+ state_filter=StateFilter.from_types(
1376
+ (EventTypes.Member, member) for member in members_to_fetch
1377
+ ),
1378
+ await_full_state=await_full_state,
1379
+ )
1380
+ delta_state_ids.update(member_ids)
1381
+
1382
+ # We don't do LL filtering for incremental syncs - see
1383
+ # https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346
1384
+ # N.B. this slows down incr syncs as we are now processing way more
1385
+ # state in the server than if we were LLing.
1386
+ #
1387
+ # i.e. we return all state deltas, including membership changes that
1388
+ # we'd normally exclude due to LL.
1389
+ deltas = await self.store.get_current_state_deltas_for_room(
1390
+ room_id=room_id,
1391
+ from_token=since_token.room_key,
1392
+ to_token=end_token.room_key,
1393
+ )
1394
+ for delta in deltas:
1395
+ if delta.event_id is None:
1396
+ # There was a state reset and this state entry is no longer
1397
+ # present, but we have no way of informing the client about
1398
+ # this, so we just skip it for now.
1399
+ continue
1400
+
1401
+ # Note that deltas are in stream ordering, so if there are
1402
+ # multiple deltas for a given type/state_key we'll always pick
1403
+ # the latest one.
1404
+ delta_state_ids[(delta.event_type, delta.state_key)] = delta.event_id
1405
+
1406
+ return delta_state_ids
1407
+
1408
+ # For a non-gappy sync if the events in the timeline are simply a linear
1409
+ # chain (i.e. no merging/branching of the graph), then we know the state
1410
+ # delta between the end of the previous sync and start of the new one is
1411
+ # empty.
1412
+ #
1413
+ # c.f. #16941 for an example of why we can't do this for all non-gappy
1414
+ # syncs.
1415
+ is_linear_timeline = True
1416
+ if batch.events:
1417
+ # We need to make sure the first event in our batch points to the
1418
+ # last event in the previous batch.
1419
+ last_event_id_prev_batch = (
1420
+ await self.store.get_last_event_id_in_room_before_stream_ordering(
1421
+ room_id,
1422
+ end_token=since_token.room_key,
1423
+ )
1424
+ )
1425
+
1426
+ prev_event_id = last_event_id_prev_batch
1427
+ for e in batch.events:
1428
+ if e.prev_event_ids() != [prev_event_id]:
1429
+ is_linear_timeline = False
1430
+ break
1431
+ prev_event_id = e.event_id
1432
+
1433
+ if is_linear_timeline and not batch.limited:
1434
+ state_ids: StateMap[str] = {}
1435
+ if lazy_load_members:
1436
+ if members_to_fetch and batch.events:
1437
+ # We're lazy-loading, so the client might need some more
1438
+ # member events to understand the events in this timeline.
1439
+ # So we fish out all the member events corresponding to the
1440
+ # timeline here. The caller will then dedupe any redundant
1441
+ # ones.
1442
+
1443
+ state_ids = (
1444
+ await self._state_storage_controller.get_state_ids_for_event(
1445
+ batch.events[0].event_id,
1446
+ # we only want members!
1447
+ state_filter=StateFilter.from_types(
1448
+ (EventTypes.Member, member)
1449
+ for member in members_to_fetch
1450
+ ),
1451
+ await_full_state=False,
1452
+ )
1453
+ )
1454
+ return state_ids
1455
+
1456
+ if batch:
1457
+ state_at_timeline_start = (
1458
+ await self._state_storage_controller.get_state_ids_for_event(
1459
+ batch.events[0].event_id,
1460
+ state_filter=state_filter,
1461
+ await_full_state=await_full_state,
1462
+ )
1463
+ )
1464
+ else:
1465
+ # We can get here if the user has ignored the senders of all
1466
+ # the recent events.
1467
+ state_at_timeline_start = (
1468
+ await self._state_storage_controller.get_state_ids_at(
1469
+ room_id,
1470
+ stream_position=end_token,
1471
+ state_filter=state_filter,
1472
+ await_full_state=await_full_state,
1473
+ )
1474
+ )
1475
+
1476
+ if batch.limited:
1477
+ # for now, we disable LL for gappy syncs - see
1478
+ # https://github.com/vector-im/riot-web/issues/7211#issuecomment-419976346
1479
+ # N.B. this slows down incr syncs as we are now processing way
1480
+ # more state in the server than if we were LLing.
1481
+ #
1482
+ # We still have to filter timeline_start to LL entries (above) in order
1483
+ # for _calculate_state's LL logic to work, as we have to include LL
1484
+ # members for timeline senders in case they weren't loaded in the initial
1485
+ # sync. We do this by (counterintuitively) by filtering timeline_start
1486
+ # members to just be ones which were timeline senders, which then ensures
1487
+ # all of the rest get included in the state block (if we need to know
1488
+ # about them).
1489
+ state_filter = StateFilter.all()
1490
+
1491
+ state_at_previous_sync = await self._state_storage_controller.get_state_ids_at(
1492
+ room_id,
1493
+ stream_position=since_token,
1494
+ state_filter=state_filter,
1495
+ await_full_state=await_full_state,
1496
+ )
1497
+
1498
+ state_at_timeline_end = await self._state_storage_controller.get_state_ids_at(
1499
+ room_id,
1500
+ stream_position=end_token,
1501
+ state_filter=state_filter,
1502
+ await_full_state=await_full_state,
1503
+ )
1504
+
1505
+ state_ids = _calculate_state(
1506
+ timeline_contains=timeline_state,
1507
+ timeline_start=state_at_timeline_start,
1508
+ timeline_end=state_at_timeline_end,
1509
+ previous_timeline_end=state_at_previous_sync,
1510
+ lazy_load_members=lazy_load_members,
1511
+ )
1512
+
1513
+ return state_ids
1514
+
1515
+ async def _find_missing_partial_state_memberships(
1516
+ self,
1517
+ room_id: str,
1518
+ members_to_fetch: StrCollection,
1519
+ events_with_membership_auth: Mapping[str, EventBase],
1520
+ found_state_ids: StateMap[str],
1521
+ ) -> StateMap[str]:
1522
+ """Finds missing memberships from a set of auth events and returns them as a
1523
+ state map.
1524
+
1525
+ Args:
1526
+ room_id: The partial state room to find the remaining memberships for.
1527
+ members_to_fetch: The memberships to find.
1528
+ events_with_membership_auth: A mapping from user IDs to events whose auth
1529
+ events would contain their prior membership, if one exists.
1530
+ Note that join events will not cite a prior membership if a user has
1531
+ never been in a room before.
1532
+ found_state_ids: A dict from (type, state_key) -> state_event_id, containing
1533
+ memberships that have been previously found. Entries in
1534
+ `members_to_fetch` that have a membership in `found_state_ids` are
1535
+ ignored.
1536
+
1537
+ Returns:
1538
+ A dict from ("m.room.member", state_key) -> state_event_id, containing the
1539
+ memberships missing from `found_state_ids`.
1540
+
1541
+ When `events_with_membership_auth` contains a join event for a given user
1542
+ which does not cite a prior membership, no membership is returned for that
1543
+ user.
1544
+
1545
+ Raises:
1546
+ KeyError: if `events_with_membership_auth` does not have an entry for a
1547
+ missing membership. Memberships in `found_state_ids` do not need an
1548
+ entry in `events_with_membership_auth`.
1549
+ """
1550
+ additional_state_ids: MutableStateMap[str] = {}
1551
+
1552
+ # Tracks the missing members for logging purposes.
1553
+ missing_members = set()
1554
+
1555
+ # Identify memberships missing from `found_state_ids` and pick out the auth
1556
+ # events in which to look for them.
1557
+ auth_event_ids: set[str] = set()
1558
+ for member in members_to_fetch:
1559
+ if (EventTypes.Member, member) in found_state_ids:
1560
+ continue
1561
+
1562
+ event_with_membership_auth = events_with_membership_auth[member]
1563
+ is_create = (
1564
+ event_with_membership_auth.is_state()
1565
+ and event_with_membership_auth.type == EventTypes.Create
1566
+ )
1567
+ is_join = (
1568
+ event_with_membership_auth.is_state()
1569
+ and event_with_membership_auth.type == EventTypes.Member
1570
+ and event_with_membership_auth.state_key == member
1571
+ and event_with_membership_auth.content.get("membership")
1572
+ == Membership.JOIN
1573
+ )
1574
+ if not is_create and not is_join:
1575
+ # The event must include the desired membership as an auth event, unless
1576
+ # it's the `m.room.create` event for a room or the first join event for
1577
+ # a given user.
1578
+ missing_members.add(member)
1579
+ auth_event_ids.update(event_with_membership_auth.auth_event_ids())
1580
+
1581
+ auth_events = await self.store.get_events(auth_event_ids)
1582
+
1583
+ # Run through the missing memberships once more, picking out the memberships
1584
+ # from the pile of auth events we have just fetched.
1585
+ for member in members_to_fetch:
1586
+ if (EventTypes.Member, member) in found_state_ids:
1587
+ continue
1588
+
1589
+ event_with_membership_auth = events_with_membership_auth[member]
1590
+
1591
+ # Dig through the auth events to find the desired membership.
1592
+ for auth_event_id in event_with_membership_auth.auth_event_ids():
1593
+ # We only store events once we have all their auth events,
1594
+ # so the auth event must be in the pile we have just
1595
+ # fetched.
1596
+ auth_event = auth_events[auth_event_id]
1597
+
1598
+ if (
1599
+ auth_event.type == EventTypes.Member
1600
+ and auth_event.state_key == member
1601
+ ):
1602
+ missing_members.discard(member)
1603
+ additional_state_ids[(EventTypes.Member, member)] = (
1604
+ auth_event.event_id
1605
+ )
1606
+ break
1607
+
1608
+ if missing_members:
1609
+ # There really shouldn't be any missing memberships now. Either:
1610
+ # * we couldn't find an auth event, which shouldn't happen because we do
1611
+ # not persist events with persisting their auth events first, or
1612
+ # * the set of auth events did not contain a membership we wanted, which
1613
+ # means our caller didn't compute the events in `members_to_fetch`
1614
+ # correctly, or we somehow accepted an event whose auth events were
1615
+ # dodgy.
1616
+ logger.error(
1617
+ "Failed to find memberships for %s in partial state room "
1618
+ "%s in the auth events of %s.",
1619
+ missing_members,
1620
+ room_id,
1621
+ [
1622
+ events_with_membership_auth[member].event_id
1623
+ for member in missing_members
1624
+ ],
1625
+ )
1626
+
1627
+ return additional_state_ids
1628
+
1629
+ async def unread_notifs_for_room_id(
1630
+ self, room_id: str, sync_config: SyncConfig
1631
+ ) -> RoomNotifCounts:
1632
+ if not self.should_calculate_push_rules:
1633
+ # If push rules have been universally disabled then we know we won't
1634
+ # have any unread counts in the DB, so we may as well skip asking
1635
+ # the DB.
1636
+ return RoomNotifCounts.empty()
1637
+
1638
+ with Measure(
1639
+ self.clock, name="unread_notifs_for_room_id", server_name=self.server_name
1640
+ ):
1641
+ return await self.store.get_unread_event_push_actions_by_room_for_user(
1642
+ room_id,
1643
+ sync_config.user.to_string(),
1644
+ )
1645
+
1646
+ async def generate_sync_result(
1647
+ self,
1648
+ sync_config: SyncConfig,
1649
+ since_token: StreamToken | None = None,
1650
+ full_state: bool = False,
1651
+ ) -> SyncResult:
1652
+ """Generates the response body of a sync result.
1653
+
1654
+ This is represented by a `SyncResult` struct, which is built from small pieces
1655
+ using a `SyncResultBuilder`. See also
1656
+ https://spec.matrix.org/v1.1/client-server-api/#get_matrixclientv3sync
1657
+ the `sync_result_builder` is passed as a mutable ("inout") parameter to various
1658
+ helper functions. These retrieve and process the data which forms the sync body,
1659
+ often writing to the `sync_result_builder` to store their output.
1660
+
1661
+ At the end, we transfer data from the `sync_result_builder` to a new `SyncResult`
1662
+ instance to signify that the sync calculation is complete.
1663
+ """
1664
+
1665
+ user_id = sync_config.user.to_string()
1666
+ app_service = self.store.get_app_service_by_user_id(user_id)
1667
+ if app_service:
1668
+ # We no longer support AS users using /sync directly.
1669
+ # See https://github.com/matrix-org/matrix-doc/issues/1144
1670
+ raise NotImplementedError()
1671
+
1672
+ sync_result_builder = await self.get_sync_result_builder(
1673
+ sync_config,
1674
+ since_token,
1675
+ full_state,
1676
+ )
1677
+
1678
+ logger.debug(
1679
+ "Calculating sync response for %r between %s and %s",
1680
+ sync_config.user,
1681
+ sync_result_builder.since_token,
1682
+ sync_result_builder.now_token,
1683
+ )
1684
+
1685
+ logger.debug("Fetching account data")
1686
+
1687
+ # Global account data is included if it is not filtered out.
1688
+ if not sync_config.filter_collection.blocks_all_global_account_data():
1689
+ await self._generate_sync_entry_for_account_data(sync_result_builder)
1690
+
1691
+ # Presence data is included if the server has it enabled and not filtered out.
1692
+ include_presence_data = bool(
1693
+ self.hs_config.server.presence_enabled
1694
+ and not sync_config.filter_collection.blocks_all_presence()
1695
+ )
1696
+ # Device list updates are sent if a since token is provided.
1697
+ include_device_list_updates = bool(since_token and since_token.device_list_key)
1698
+
1699
+ # If we do not care about the rooms or things which depend on the room
1700
+ # data (namely presence and device list updates), then we can skip
1701
+ # this process completely.
1702
+ device_lists = DeviceListUpdates()
1703
+ if (
1704
+ not sync_result_builder.sync_config.filter_collection.blocks_all_rooms()
1705
+ or include_presence_data
1706
+ or include_device_list_updates
1707
+ ):
1708
+ logger.debug("Fetching room data")
1709
+
1710
+ # Note that _generate_sync_entry_for_rooms sets sync_result_builder.joined, which
1711
+ # is used in calculate_user_changes below.
1712
+ (
1713
+ newly_joined_rooms,
1714
+ newly_left_rooms,
1715
+ ) = await self._generate_sync_entry_for_rooms(sync_result_builder)
1716
+
1717
+ # Work out which users have joined or left rooms we're in. We use this
1718
+ # to build the presence and device_list parts of the sync response in
1719
+ # `_generate_sync_entry_for_presence` and
1720
+ # `_generate_sync_entry_for_device_list` respectively.
1721
+ if include_presence_data or include_device_list_updates:
1722
+ # This uses the sync_result_builder.joined which is set in
1723
+ # `_generate_sync_entry_for_rooms`, if that didn't find any joined
1724
+ # rooms for some reason it is a no-op.
1725
+ (
1726
+ newly_joined_or_invited_or_knocked_users,
1727
+ newly_left_users,
1728
+ ) = sync_result_builder.calculate_user_changes()
1729
+
1730
+ if include_presence_data:
1731
+ logger.debug("Fetching presence data")
1732
+ await self._generate_sync_entry_for_presence(
1733
+ sync_result_builder,
1734
+ newly_joined_rooms,
1735
+ newly_joined_or_invited_or_knocked_users,
1736
+ )
1737
+
1738
+ if include_device_list_updates:
1739
+ # include_device_list_updates can only be True if we have a
1740
+ # since token.
1741
+ assert since_token is not None
1742
+
1743
+ device_lists = await self._device_handler.generate_sync_entry_for_device_list(
1744
+ user_id=user_id,
1745
+ since_token=since_token,
1746
+ now_token=sync_result_builder.now_token,
1747
+ joined_room_ids=sync_result_builder.joined_room_ids,
1748
+ newly_joined_rooms=newly_joined_rooms,
1749
+ newly_joined_or_invited_or_knocked_users=newly_joined_or_invited_or_knocked_users,
1750
+ newly_left_rooms=newly_left_rooms,
1751
+ newly_left_users=newly_left_users,
1752
+ )
1753
+
1754
+ logger.debug("Fetching to-device data")
1755
+ await self._generate_sync_entry_for_to_device(sync_result_builder)
1756
+
1757
+ logger.debug("Fetching OTK data")
1758
+ device_id = sync_config.device_id
1759
+ one_time_keys_count: JsonMapping = {}
1760
+ unused_fallback_key_types: list[str] = []
1761
+ if device_id:
1762
+ # TODO: We should have a way to let clients differentiate between the states of:
1763
+ # * no change in OTK count since the provided since token
1764
+ # * the server has zero OTKs left for this device
1765
+ # Spec issue: https://github.com/matrix-org/matrix-doc/issues/3298
1766
+ one_time_keys_count = await self.store.count_e2e_one_time_keys(
1767
+ user_id, device_id
1768
+ )
1769
+ unused_fallback_key_types = list(
1770
+ await self.store.get_e2e_unused_fallback_key_types(user_id, device_id)
1771
+ )
1772
+
1773
+ num_events = 0
1774
+
1775
+ # debug for https://github.com/matrix-org/synapse/issues/9424
1776
+ for joined_room in sync_result_builder.joined:
1777
+ num_events += len(joined_room.timeline.events)
1778
+
1779
+ log_kv(
1780
+ {
1781
+ "joined_rooms_in_result": len(sync_result_builder.joined),
1782
+ "events_in_result": num_events,
1783
+ }
1784
+ )
1785
+
1786
+ logger.debug("Sync response calculation complete")
1787
+ return SyncResult(
1788
+ presence=sync_result_builder.presence,
1789
+ account_data=sync_result_builder.account_data,
1790
+ joined=sync_result_builder.joined,
1791
+ invited=sync_result_builder.invited,
1792
+ knocked=sync_result_builder.knocked,
1793
+ archived=sync_result_builder.archived,
1794
+ to_device=sync_result_builder.to_device,
1795
+ device_lists=device_lists,
1796
+ device_one_time_keys_count=one_time_keys_count,
1797
+ device_unused_fallback_key_types=unused_fallback_key_types,
1798
+ next_batch=sync_result_builder.now_token,
1799
+ )
1800
+
1801
+ async def get_sync_result_builder(
1802
+ self,
1803
+ sync_config: SyncConfig,
1804
+ since_token: StreamToken | None = None,
1805
+ full_state: bool = False,
1806
+ ) -> "SyncResultBuilder":
1807
+ """
1808
+ Assemble a `SyncResultBuilder` with all of the initial context to
1809
+ start building up the sync response:
1810
+
1811
+ - Membership changes between the last sync and the current sync.
1812
+ - Joined room IDs (minus any rooms to exclude).
1813
+ - Rooms that became fully-stated/un-partial stated since the last sync.
1814
+
1815
+ Args:
1816
+ sync_config: Config/info necessary to process the sync request.
1817
+ since_token: The point in the stream to sync from.
1818
+ full_state: Whether to return the full state for each room.
1819
+
1820
+ Returns:
1821
+ `SyncResultBuilder` ready to start generating parts of the sync response.
1822
+ """
1823
+ user_id = sync_config.user.to_string()
1824
+
1825
+ # Note: we get the users room list *before* we get the `now_token`, this
1826
+ # avoids checking back in history if rooms are joined after the token is fetched.
1827
+ token_before_rooms = self.event_sources.get_current_token()
1828
+ mutable_joined_room_ids = set(await self.store.get_rooms_for_user(user_id))
1829
+
1830
+ # NB: The `now_token` gets changed by some of the `generate_sync_*` methods,
1831
+ # this is due to some of the underlying streams not supporting the ability
1832
+ # to query up to a given point.
1833
+ # Always use the `now_token` in `SyncResultBuilder`
1834
+ now_token = self.event_sources.get_current_token()
1835
+ log_kv({"now_token": now_token})
1836
+
1837
+ # Since we fetched the users room list before calculating the `now_token` (see
1838
+ # above), there's a small window during which membership events may have been
1839
+ # persisted, so we fetch these now and modify the joined room list for any
1840
+ # changes between the get_rooms_for_user call and the get_current_token call.
1841
+ membership_change_events = []
1842
+ if since_token:
1843
+ membership_change_events = await self.store.get_membership_changes_for_user(
1844
+ user_id,
1845
+ since_token.room_key,
1846
+ now_token.room_key,
1847
+ self.rooms_to_exclude_globally,
1848
+ )
1849
+
1850
+ last_membership_change_by_room_id: dict[str, EventBase] = {}
1851
+ for event in membership_change_events:
1852
+ last_membership_change_by_room_id[event.room_id] = event
1853
+
1854
+ # For the latest membership event in each room found, add/remove the room ID
1855
+ # from the joined room list accordingly. In this case we only care if the
1856
+ # latest change is JOIN.
1857
+
1858
+ for room_id, event in last_membership_change_by_room_id.items():
1859
+ assert event.internal_metadata.stream_ordering
1860
+ # As a shortcut, skip any events that happened before we got our
1861
+ # `get_rooms_for_user()` snapshot (any changes are already represented
1862
+ # in that list).
1863
+ if (
1864
+ event.internal_metadata.stream_ordering
1865
+ < token_before_rooms.room_key.stream
1866
+ ):
1867
+ continue
1868
+
1869
+ logger.info(
1870
+ "User membership change between getting rooms and current token: %s %s %s",
1871
+ user_id,
1872
+ event.membership,
1873
+ room_id,
1874
+ )
1875
+ # User joined a room - we have to then check the room state to ensure we
1876
+ # respect any bans if there's a race between the join and ban events.
1877
+ if event.membership == Membership.JOIN:
1878
+ user_ids_in_room = await self.store.get_users_in_room(room_id)
1879
+ if user_id in user_ids_in_room:
1880
+ mutable_joined_room_ids.add(room_id)
1881
+ # The user left the room, or left and was re-invited but not joined yet
1882
+ else:
1883
+ mutable_joined_room_ids.discard(room_id)
1884
+
1885
+ # Tweak the set of rooms to return to the client for eager (non-lazy) syncs.
1886
+ mutable_rooms_to_exclude = set(self.rooms_to_exclude_globally)
1887
+ if not sync_config.filter_collection.lazy_load_members():
1888
+ # Non-lazy syncs should never include partially stated rooms.
1889
+ # Exclude all partially stated rooms from this sync.
1890
+ results = await self.store.is_partial_state_room_batched(
1891
+ mutable_joined_room_ids
1892
+ )
1893
+ mutable_rooms_to_exclude.update(
1894
+ room_id
1895
+ for room_id, is_partial_state in results.items()
1896
+ if is_partial_state
1897
+ )
1898
+ membership_change_events = [
1899
+ event
1900
+ for event in membership_change_events
1901
+ if not results.get(event.room_id, False)
1902
+ ]
1903
+
1904
+ # Incremental eager syncs should additionally include rooms that
1905
+ # - we are joined to
1906
+ # - are full-stated
1907
+ # - became fully-stated at some point during the sync period
1908
+ # (These rooms will have been omitted during a previous eager sync.)
1909
+ forced_newly_joined_room_ids: set[str] = set()
1910
+ if since_token and not sync_config.filter_collection.lazy_load_members():
1911
+ un_partial_stated_rooms = (
1912
+ await self.store.get_un_partial_stated_rooms_between(
1913
+ since_token.un_partial_stated_rooms_key,
1914
+ now_token.un_partial_stated_rooms_key,
1915
+ mutable_joined_room_ids,
1916
+ )
1917
+ )
1918
+ results = await self.store.is_partial_state_room_batched(
1919
+ un_partial_stated_rooms
1920
+ )
1921
+ forced_newly_joined_room_ids.update(
1922
+ room_id
1923
+ for room_id, is_partial_state in results.items()
1924
+ if not is_partial_state
1925
+ )
1926
+
1927
+ # Now we have our list of joined room IDs, exclude as configured and freeze
1928
+ joined_room_ids = frozenset(
1929
+ room_id
1930
+ for room_id in mutable_joined_room_ids
1931
+ if room_id not in mutable_rooms_to_exclude
1932
+ )
1933
+
1934
+ sync_result_builder = SyncResultBuilder(
1935
+ sync_config,
1936
+ full_state,
1937
+ since_token=since_token,
1938
+ now_token=now_token,
1939
+ joined_room_ids=joined_room_ids,
1940
+ excluded_room_ids=frozenset(mutable_rooms_to_exclude),
1941
+ forced_newly_joined_room_ids=frozenset(forced_newly_joined_room_ids),
1942
+ membership_change_events=membership_change_events,
1943
+ )
1944
+
1945
+ return sync_result_builder
1946
+
1947
+ @trace
1948
+ async def _generate_sync_entry_for_to_device(
1949
+ self, sync_result_builder: "SyncResultBuilder"
1950
+ ) -> None:
1951
+ """Generates the portion of the sync response. Populates
1952
+ `sync_result_builder` with the result.
1953
+ """
1954
+ user_id = sync_result_builder.sync_config.user.to_string()
1955
+ device_id = sync_result_builder.sync_config.device_id
1956
+ now_token = sync_result_builder.now_token
1957
+ since_stream_id = 0
1958
+ if sync_result_builder.since_token is not None:
1959
+ since_stream_id = int(sync_result_builder.since_token.to_device_key)
1960
+
1961
+ if device_id is not None and since_stream_id != int(now_token.to_device_key):
1962
+ messages, stream_id = await self.store.get_messages_for_device(
1963
+ user_id, device_id, since_stream_id, now_token.to_device_key
1964
+ )
1965
+
1966
+ for message in messages:
1967
+ log_kv(
1968
+ {
1969
+ "event": "to_device_message",
1970
+ "sender": message["sender"],
1971
+ "type": message["type"],
1972
+ EventContentFields.TO_DEVICE_MSGID: message["content"].get(
1973
+ EventContentFields.TO_DEVICE_MSGID
1974
+ ),
1975
+ }
1976
+ )
1977
+
1978
+ if messages and issue9533_logger.isEnabledFor(logging.DEBUG):
1979
+ issue9533_logger.debug(
1980
+ "Returning to-device messages with stream_ids (%d, %d]; now: %d;"
1981
+ " msgids: %s",
1982
+ since_stream_id,
1983
+ stream_id,
1984
+ now_token.to_device_key,
1985
+ [
1986
+ message["content"].get(EventContentFields.TO_DEVICE_MSGID)
1987
+ for message in messages
1988
+ ],
1989
+ )
1990
+ sync_result_builder.now_token = now_token.copy_and_replace(
1991
+ StreamKeyType.TO_DEVICE, stream_id
1992
+ )
1993
+ sync_result_builder.to_device = messages
1994
+ else:
1995
+ sync_result_builder.to_device = []
1996
+
1997
+ async def _generate_sync_entry_for_account_data(
1998
+ self, sync_result_builder: "SyncResultBuilder"
1999
+ ) -> None:
2000
+ """Generates the global account data portion of the sync response.
2001
+
2002
+ Account data (called "Client Config" in the spec) can be set either globally
2003
+ or for a specific room. Account data consists of a list of events which
2004
+ accumulate state, much like a room.
2005
+
2006
+ This function retrieves global account data and writes it to the given
2007
+ `sync_result_builder`. See `_generate_sync_entry_for_rooms` for handling
2008
+ of per-room account data.
2009
+
2010
+ Args:
2011
+ sync_result_builder
2012
+ """
2013
+ sync_config = sync_result_builder.sync_config
2014
+ user_id = sync_result_builder.sync_config.user.to_string()
2015
+ since_token = sync_result_builder.since_token
2016
+
2017
+ if since_token and not sync_result_builder.full_state:
2018
+ global_account_data = (
2019
+ await self.store.get_updated_global_account_data_for_user(
2020
+ user_id, since_token.account_data_key
2021
+ )
2022
+ )
2023
+
2024
+ push_rules_changed = await self.store.have_push_rules_changed_for_user(
2025
+ user_id, int(since_token.push_rules_key)
2026
+ )
2027
+
2028
+ if push_rules_changed:
2029
+ global_account_data = dict(global_account_data)
2030
+ global_account_data[
2031
+ AccountDataTypes.PUSH_RULES
2032
+ ] = await self._push_rules_handler.push_rules_for_user(sync_config.user)
2033
+ else:
2034
+ all_global_account_data = await self.store.get_global_account_data_for_user(
2035
+ user_id
2036
+ )
2037
+
2038
+ global_account_data = dict(all_global_account_data)
2039
+ global_account_data[
2040
+ AccountDataTypes.PUSH_RULES
2041
+ ] = await self._push_rules_handler.push_rules_for_user(sync_config.user)
2042
+
2043
+ account_data_for_user = (
2044
+ await sync_config.filter_collection.filter_global_account_data(
2045
+ [
2046
+ {"type": account_data_type, "content": content}
2047
+ for account_data_type, content in global_account_data.items()
2048
+ ]
2049
+ )
2050
+ )
2051
+
2052
+ sync_result_builder.account_data = account_data_for_user
2053
+
2054
+ async def _generate_sync_entry_for_presence(
2055
+ self,
2056
+ sync_result_builder: "SyncResultBuilder",
2057
+ newly_joined_rooms: AbstractSet[str],
2058
+ newly_joined_or_invited_users: AbstractSet[str],
2059
+ ) -> None:
2060
+ """Generates the presence portion of the sync response. Populates the
2061
+ `sync_result_builder` with the result.
2062
+
2063
+ Args:
2064
+ sync_result_builder
2065
+ newly_joined_rooms: Set of rooms that the user has joined since
2066
+ the last sync (or empty if an initial sync)
2067
+ newly_joined_or_invited_users: Set of users that have joined or
2068
+ been invited to rooms since the last sync (or empty if an
2069
+ initial sync)
2070
+ """
2071
+ now_token = sync_result_builder.now_token
2072
+ sync_config = sync_result_builder.sync_config
2073
+ user = sync_result_builder.sync_config.user
2074
+
2075
+ presence_source = self.event_sources.sources.presence
2076
+
2077
+ since_token = sync_result_builder.since_token
2078
+ presence_key = None
2079
+ include_offline = False
2080
+ if since_token and not sync_result_builder.full_state:
2081
+ presence_key = since_token.presence_key
2082
+ include_offline = True
2083
+
2084
+ presence, presence_key = await presence_source.get_new_events(
2085
+ user=user,
2086
+ from_key=presence_key,
2087
+ is_guest=sync_config.is_guest,
2088
+ include_offline=(
2089
+ True
2090
+ if self.hs_config.server.presence_include_offline_users_on_sync
2091
+ else include_offline
2092
+ ),
2093
+ )
2094
+ assert presence_key
2095
+ sync_result_builder.now_token = now_token.copy_and_replace(
2096
+ StreamKeyType.PRESENCE, presence_key
2097
+ )
2098
+
2099
+ extra_users_ids = set(newly_joined_or_invited_users)
2100
+ for room_id in newly_joined_rooms:
2101
+ users = await self.store.get_users_in_room(room_id)
2102
+ extra_users_ids.update(users)
2103
+ extra_users_ids.discard(user.to_string())
2104
+
2105
+ if extra_users_ids:
2106
+ states = await self.presence_handler.get_states(extra_users_ids)
2107
+ presence.extend(states)
2108
+
2109
+ # Deduplicate the presence entries so that there's at most one per user
2110
+ presence = list({p.user_id: p for p in presence}.values())
2111
+
2112
+ presence = await sync_config.filter_collection.filter_presence(presence)
2113
+
2114
+ sync_result_builder.presence = presence
2115
+
2116
+ async def _generate_sync_entry_for_rooms(
2117
+ self, sync_result_builder: "SyncResultBuilder"
2118
+ ) -> tuple[AbstractSet[str], AbstractSet[str]]:
2119
+ """Generates the rooms portion of the sync response. Populates the
2120
+ `sync_result_builder` with the result.
2121
+
2122
+ In the response that reaches the client, rooms are divided into four categories:
2123
+ `invite`, `join`, `knock`, `leave`. These aren't the same as the four sets of
2124
+ room ids returned by this function.
2125
+
2126
+ Args:
2127
+ sync_result_builder
2128
+
2129
+ Returns:
2130
+ Returns a 2-tuple describing rooms the user has joined or left.
2131
+
2132
+ Its entries are:
2133
+ - newly_joined_rooms
2134
+ - newly_left_rooms
2135
+ """
2136
+
2137
+ since_token = sync_result_builder.since_token
2138
+ user_id = sync_result_builder.sync_config.user.to_string()
2139
+
2140
+ blocks_all_rooms = (
2141
+ sync_result_builder.sync_config.filter_collection.blocks_all_rooms()
2142
+ )
2143
+
2144
+ # 0. Start by fetching room account data (if required).
2145
+ if (
2146
+ blocks_all_rooms
2147
+ or sync_result_builder.sync_config.filter_collection.blocks_all_room_account_data()
2148
+ ):
2149
+ account_data_by_room: Mapping[str, Mapping[str, JsonMapping]] = {}
2150
+ elif since_token and not sync_result_builder.full_state:
2151
+ account_data_by_room = (
2152
+ await self.store.get_updated_room_account_data_for_user(
2153
+ user_id, since_token.account_data_key
2154
+ )
2155
+ )
2156
+ else:
2157
+ account_data_by_room = await self.store.get_room_account_data_for_user(
2158
+ user_id
2159
+ )
2160
+
2161
+ # 1. Start by fetching all ephemeral events in rooms we've joined (if required).
2162
+ block_all_room_ephemeral = (
2163
+ blocks_all_rooms
2164
+ or sync_result_builder.sync_config.filter_collection.blocks_all_room_ephemeral()
2165
+ )
2166
+ if block_all_room_ephemeral:
2167
+ ephemeral_by_room: dict[str, list[JsonDict]] = {}
2168
+ else:
2169
+ now_token, ephemeral_by_room = await self.ephemeral_by_room(
2170
+ sync_result_builder,
2171
+ now_token=sync_result_builder.now_token,
2172
+ since_token=sync_result_builder.since_token,
2173
+ )
2174
+ sync_result_builder.now_token = now_token
2175
+
2176
+ # 2. We check up front if anything has changed, if it hasn't then there is
2177
+ # no point in going further.
2178
+ if not sync_result_builder.full_state:
2179
+ if since_token and not ephemeral_by_room and not account_data_by_room:
2180
+ have_changed = await self._have_rooms_changed(sync_result_builder)
2181
+ log_kv({"rooms_have_changed": have_changed})
2182
+ if not have_changed:
2183
+ tags_by_room = await self.store.get_updated_tags(
2184
+ user_id, since_token.account_data_key
2185
+ )
2186
+ if not tags_by_room:
2187
+ logger.debug("no-oping sync")
2188
+ return set(), set()
2189
+
2190
+ # 3. Work out which rooms need reporting in the sync response.
2191
+ ignored_users = await self.store.ignored_users(user_id)
2192
+ if since_token:
2193
+ room_changes = await self._get_room_changes_for_incremental_sync(
2194
+ sync_result_builder, ignored_users
2195
+ )
2196
+ tags_by_room = await self.store.get_updated_tags(
2197
+ user_id, since_token.account_data_key
2198
+ )
2199
+ else:
2200
+ room_changes = await self._get_room_changes_for_initial_sync(
2201
+ sync_result_builder, ignored_users
2202
+ )
2203
+ tags_by_room = await self.store.get_tags_for_user(user_id)
2204
+
2205
+ log_kv({"rooms_changed": len(room_changes.room_entries)})
2206
+
2207
+ room_entries = room_changes.room_entries
2208
+ invited = room_changes.invited
2209
+ knocked = room_changes.knocked
2210
+ newly_joined_rooms = room_changes.newly_joined_rooms
2211
+ newly_left_rooms = room_changes.newly_left_rooms
2212
+
2213
+ # 4. We need to apply further processing to `room_entries` (rooms considered
2214
+ # joined or archived).
2215
+ async def handle_room_entries(room_entry: "RoomSyncResultBuilder") -> None:
2216
+ logger.debug("Generating room entry for %s", room_entry.room_id)
2217
+ # Note that this mutates sync_result_builder.{joined,archived}.
2218
+ await self._generate_room_entry(
2219
+ sync_result_builder,
2220
+ room_entry,
2221
+ ephemeral=ephemeral_by_room.get(room_entry.room_id, []),
2222
+ tags=tags_by_room.get(room_entry.room_id),
2223
+ account_data=account_data_by_room.get(room_entry.room_id, {}),
2224
+ always_include=sync_result_builder.full_state,
2225
+ )
2226
+ logger.debug("Generated room entry for %s", room_entry.room_id)
2227
+
2228
+ with start_active_span("sync.generate_room_entries"):
2229
+ await concurrently_execute(handle_room_entries, room_entries, 10)
2230
+
2231
+ sync_result_builder.invited.extend(invited)
2232
+ sync_result_builder.knocked.extend(knocked)
2233
+
2234
+ return set(newly_joined_rooms), set(newly_left_rooms)
2235
+
2236
+ async def _have_rooms_changed(
2237
+ self, sync_result_builder: "SyncResultBuilder"
2238
+ ) -> bool:
2239
+ """Returns whether there may be any new events that should be sent down
2240
+ the sync. Returns True if there are.
2241
+
2242
+ Does not modify the `sync_result_builder`.
2243
+ """
2244
+ since_token = sync_result_builder.since_token
2245
+ membership_change_events = sync_result_builder.membership_change_events
2246
+
2247
+ assert since_token
2248
+
2249
+ if membership_change_events or sync_result_builder.forced_newly_joined_room_ids:
2250
+ return True
2251
+
2252
+ stream_id = since_token.room_key.stream
2253
+ for room_id in sync_result_builder.joined_room_ids:
2254
+ if self.store.has_room_changed_since(room_id, stream_id):
2255
+ return True
2256
+ return False
2257
+
2258
+ async def _get_room_changes_for_incremental_sync(
2259
+ self,
2260
+ sync_result_builder: "SyncResultBuilder",
2261
+ ignored_users: frozenset[str],
2262
+ ) -> _RoomChanges:
2263
+ """Determine the changes in rooms to report to the user.
2264
+
2265
+ This function is a first pass at generating the rooms part of the sync response.
2266
+ It determines which rooms have changed during the sync period, and categorises
2267
+ them into four buckets: "knock", "invite", "join" and "leave". It also excludes
2268
+ from that list any room that appears in the list of rooms to exclude from sync
2269
+ results in the server configuration.
2270
+
2271
+ 1. Finds all membership changes for the user in the sync period (from
2272
+ `since_token` up to `now_token`).
2273
+ 2. Uses those to place the room in one of the four categories above.
2274
+ 3. Builds a `_RoomChanges` struct to record this, and return that struct.
2275
+
2276
+ For rooms classified as "knock", "invite" or "leave", we just need to report
2277
+ a single membership event in the eventual /sync response. For "join" we need
2278
+ to fetch additional non-membership events, e.g. messages in the room. That is
2279
+ more complicated, so instead we report an intermediary `RoomSyncResultBuilder`
2280
+ struct, and leave the additional work to `_generate_room_entry`.
2281
+
2282
+ The sync_result_builder is not modified by this function.
2283
+ """
2284
+ user_id = sync_result_builder.sync_config.user.to_string()
2285
+ since_token = sync_result_builder.since_token
2286
+ now_token = sync_result_builder.now_token
2287
+ sync_config = sync_result_builder.sync_config
2288
+ membership_change_events = sync_result_builder.membership_change_events
2289
+
2290
+ assert since_token
2291
+
2292
+ mem_change_events_by_room_id: dict[str, list[EventBase]] = {}
2293
+ for event in membership_change_events:
2294
+ mem_change_events_by_room_id.setdefault(event.room_id, []).append(event)
2295
+
2296
+ newly_joined_rooms: list[str] = list(
2297
+ sync_result_builder.forced_newly_joined_room_ids
2298
+ )
2299
+ newly_left_rooms: list[str] = []
2300
+ room_entries: list[RoomSyncResultBuilder] = []
2301
+ invited: list[InvitedSyncResult] = []
2302
+ knocked: list[KnockedSyncResult] = []
2303
+ invite_config = await self.store.get_invite_config_for_user(user_id)
2304
+ for room_id, events in mem_change_events_by_room_id.items():
2305
+ # The body of this loop will add this room to at least one of the five lists
2306
+ # above. Things get messy if you've e.g. joined, left, joined then left the
2307
+ # room all in the same sync period.
2308
+ logger.debug(
2309
+ "Membership changes in %s: [%s]",
2310
+ room_id,
2311
+ ", ".join("%s (%s)" % (e.event_id, e.membership) for e in events),
2312
+ )
2313
+
2314
+ non_joins = [e for e in events if e.membership != Membership.JOIN]
2315
+ has_join = len(non_joins) != len(events)
2316
+
2317
+ # We want to figure out if we joined the room at some point since
2318
+ # the last sync (even if we have since left). This is to make sure
2319
+ # we do send down the room, and with full state, where necessary
2320
+
2321
+ old_state_ids = None
2322
+ if room_id in sync_result_builder.joined_room_ids and non_joins:
2323
+ # Always include if the user (re)joined the room, especially
2324
+ # important so that device list changes are calculated correctly.
2325
+ # If there are non-join member events, but we are still in the room,
2326
+ # then the user must have left and joined
2327
+ newly_joined_rooms.append(room_id)
2328
+
2329
+ # User is in the room so we don't need to do the invite/leave checks
2330
+ continue
2331
+
2332
+ if room_id in sync_result_builder.joined_room_ids or has_join:
2333
+ old_state_ids = await self._state_storage_controller.get_state_ids_at(
2334
+ room_id,
2335
+ since_token,
2336
+ state_filter=StateFilter.from_types([(EventTypes.Member, user_id)]),
2337
+ )
2338
+ old_mem_ev_id = old_state_ids.get((EventTypes.Member, user_id), None)
2339
+ old_mem_ev = None
2340
+ if old_mem_ev_id:
2341
+ old_mem_ev = await self.store.get_event(
2342
+ old_mem_ev_id, allow_none=True
2343
+ )
2344
+
2345
+ if not old_mem_ev or old_mem_ev.membership != Membership.JOIN:
2346
+ newly_joined_rooms.append(room_id)
2347
+
2348
+ # If user is in the room then we don't need to do the invite/leave checks
2349
+ if room_id in sync_result_builder.joined_room_ids:
2350
+ continue
2351
+
2352
+ if not non_joins:
2353
+ continue
2354
+ last_non_join = non_joins[-1]
2355
+
2356
+ # Check if we have left the room. This can either be because we were
2357
+ # joined before *or* that we since joined and then left.
2358
+ if events[-1].membership != Membership.JOIN:
2359
+ if has_join:
2360
+ newly_left_rooms.append(room_id)
2361
+ else:
2362
+ if not old_state_ids:
2363
+ old_state_ids = (
2364
+ await self._state_storage_controller.get_state_ids_at(
2365
+ room_id,
2366
+ since_token,
2367
+ state_filter=StateFilter.from_types(
2368
+ [(EventTypes.Member, user_id)]
2369
+ ),
2370
+ )
2371
+ )
2372
+ old_mem_ev_id = old_state_ids.get(
2373
+ (EventTypes.Member, user_id), None
2374
+ )
2375
+ old_mem_ev = None
2376
+ if old_mem_ev_id:
2377
+ old_mem_ev = await self.store.get_event(
2378
+ old_mem_ev_id, allow_none=True
2379
+ )
2380
+ if old_mem_ev and old_mem_ev.membership == Membership.JOIN:
2381
+ newly_left_rooms.append(room_id)
2382
+
2383
+ # Only bother if we're still currently invited
2384
+ should_invite = last_non_join.membership == Membership.INVITE
2385
+ if should_invite:
2386
+ if (
2387
+ last_non_join.sender not in ignored_users
2388
+ and invite_config.get_invite_rule(last_non_join.sender)
2389
+ != InviteRule.IGNORE
2390
+ ):
2391
+ invite_room_sync = InvitedSyncResult(room_id, invite=last_non_join)
2392
+ if invite_room_sync:
2393
+ invited.append(invite_room_sync)
2394
+
2395
+ # Only bother if our latest membership in the room is knock (and we haven't
2396
+ # been accepted/rejected in the meantime).
2397
+ should_knock = last_non_join.membership == Membership.KNOCK
2398
+ if should_knock:
2399
+ knock_room_sync = KnockedSyncResult(room_id, knock=last_non_join)
2400
+ if knock_room_sync:
2401
+ knocked.append(knock_room_sync)
2402
+
2403
+ # Always include leave/ban events. Just take the last one.
2404
+ # TODO: How do we handle ban -> leave in same batch?
2405
+ leave_events = [
2406
+ e
2407
+ for e in non_joins
2408
+ if e.membership in (Membership.LEAVE, Membership.BAN)
2409
+ ]
2410
+
2411
+ if leave_events:
2412
+ leave_event = leave_events[-1]
2413
+ leave_position = await self.store.get_position_for_event(
2414
+ leave_event.event_id
2415
+ )
2416
+
2417
+ # If the leave event happened before the since token then we
2418
+ # bail.
2419
+ if since_token and not leave_position.persisted_after(
2420
+ since_token.room_key
2421
+ ):
2422
+ continue
2423
+
2424
+ # We can safely convert the position of the leave event into a
2425
+ # stream token as it'll only be used in the context of this
2426
+ # room. (c.f. the docstring of `to_room_stream_token`).
2427
+ leave_token = since_token.copy_and_replace(
2428
+ StreamKeyType.ROOM, leave_position.to_room_stream_token()
2429
+ )
2430
+
2431
+ # If this is an out of band message, like a remote invite
2432
+ # rejection, we include it in the recents batch. Otherwise, we
2433
+ # let _load_filtered_recents handle fetching the correct
2434
+ # batches.
2435
+ #
2436
+ # This is all screaming out for a refactor, as the logic here is
2437
+ # subtle and the moving parts numerous.
2438
+ if leave_event.internal_metadata.is_out_of_band_membership():
2439
+ batch_events: list[EventBase] | None = [leave_event]
2440
+ else:
2441
+ batch_events = None
2442
+
2443
+ room_entries.append(
2444
+ RoomSyncResultBuilder(
2445
+ room_id=room_id,
2446
+ rtype="archived",
2447
+ events=batch_events,
2448
+ newly_joined=room_id in newly_joined_rooms,
2449
+ full_state=False,
2450
+ since_token=since_token,
2451
+ upto_token=leave_token,
2452
+ end_token=leave_token,
2453
+ out_of_band=leave_event.internal_metadata.is_out_of_band_membership(),
2454
+ )
2455
+ )
2456
+
2457
+ timeline_limit = sync_config.filter_collection.timeline_limit()
2458
+
2459
+ # Get all events since the `from_key` in rooms we're currently joined to.
2460
+ # If there are too many, we get the most recent events only. This leaves
2461
+ # a "gap" in the timeline, as described by the spec for /sync.
2462
+ room_to_events = await self.store.get_room_events_stream_for_rooms(
2463
+ room_ids=sync_result_builder.joined_room_ids,
2464
+ from_key=now_token.room_key,
2465
+ to_key=since_token.room_key,
2466
+ limit=timeline_limit + 1,
2467
+ direction=Direction.BACKWARDS,
2468
+ )
2469
+
2470
+ # We loop through all room ids, even if there are no new events, in case
2471
+ # there are non room events that we need to notify about.
2472
+ for room_id in sync_result_builder.joined_room_ids:
2473
+ room_entry = room_to_events.get(room_id, None)
2474
+
2475
+ newly_joined = room_id in newly_joined_rooms
2476
+ if room_entry:
2477
+ events, start_key, _ = room_entry
2478
+ # We want to return the events in ascending order (the last event is the
2479
+ # most recent).
2480
+ events.reverse()
2481
+
2482
+ prev_batch_token = now_token.copy_and_replace(
2483
+ StreamKeyType.ROOM, start_key
2484
+ )
2485
+
2486
+ entry = RoomSyncResultBuilder(
2487
+ room_id=room_id,
2488
+ rtype="joined",
2489
+ events=events,
2490
+ newly_joined=newly_joined,
2491
+ full_state=False,
2492
+ since_token=None if newly_joined else since_token,
2493
+ upto_token=prev_batch_token,
2494
+ end_token=now_token,
2495
+ )
2496
+ else:
2497
+ entry = RoomSyncResultBuilder(
2498
+ room_id=room_id,
2499
+ rtype="joined",
2500
+ events=[],
2501
+ newly_joined=newly_joined,
2502
+ full_state=False,
2503
+ since_token=since_token,
2504
+ upto_token=since_token,
2505
+ end_token=now_token,
2506
+ )
2507
+
2508
+ room_entries.append(entry)
2509
+
2510
+ return _RoomChanges(
2511
+ room_entries,
2512
+ invited,
2513
+ knocked,
2514
+ newly_joined_rooms,
2515
+ newly_left_rooms,
2516
+ )
2517
+
2518
+ async def _get_room_changes_for_initial_sync(
2519
+ self,
2520
+ sync_result_builder: "SyncResultBuilder",
2521
+ ignored_users: frozenset[str],
2522
+ ) -> _RoomChanges:
2523
+ """Returns entries for all rooms for the user.
2524
+
2525
+ Like `_get_rooms_changed`, but assumes the `since_token` is `None`.
2526
+
2527
+ This function does not modify the sync_result_builder.
2528
+
2529
+ Args:
2530
+ sync_result_builder
2531
+ ignored_users: Set of users ignored by user.
2532
+ ignored_rooms: List of rooms to ignore.
2533
+ """
2534
+
2535
+ user_id = sync_result_builder.sync_config.user.to_string()
2536
+ since_token = sync_result_builder.since_token
2537
+ now_token = sync_result_builder.now_token
2538
+ sync_config = sync_result_builder.sync_config
2539
+
2540
+ room_list = await self.store.get_rooms_for_local_user_where_membership_is(
2541
+ user_id=user_id,
2542
+ membership_list=Membership.LIST,
2543
+ excluded_rooms=sync_result_builder.excluded_room_ids,
2544
+ )
2545
+ invite_config = await self.store.get_invite_config_for_user(user_id)
2546
+
2547
+ room_entries = []
2548
+ invited = []
2549
+ knocked = []
2550
+
2551
+ for event in room_list:
2552
+ if event.room_version_id not in KNOWN_ROOM_VERSIONS:
2553
+ continue
2554
+
2555
+ if event.membership == Membership.JOIN:
2556
+ room_entries.append(
2557
+ RoomSyncResultBuilder(
2558
+ room_id=event.room_id,
2559
+ rtype="joined",
2560
+ events=None,
2561
+ newly_joined=False,
2562
+ full_state=True,
2563
+ since_token=since_token,
2564
+ upto_token=now_token,
2565
+ end_token=now_token,
2566
+ )
2567
+ )
2568
+ elif event.membership == Membership.INVITE:
2569
+ if event.sender in ignored_users:
2570
+ continue
2571
+ if invite_config.get_invite_rule(event.sender) == InviteRule.IGNORE:
2572
+ continue
2573
+ invite = await self.store.get_event(event.event_id)
2574
+ invited.append(InvitedSyncResult(room_id=event.room_id, invite=invite))
2575
+ elif event.membership == Membership.KNOCK:
2576
+ knock = await self.store.get_event(event.event_id)
2577
+ knocked.append(KnockedSyncResult(room_id=event.room_id, knock=knock))
2578
+ elif event.membership in (Membership.LEAVE, Membership.BAN):
2579
+ # Always send down rooms we were banned from or kicked from.
2580
+ if not sync_config.filter_collection.include_leave:
2581
+ if event.membership == Membership.LEAVE:
2582
+ if user_id == event.sender:
2583
+ continue
2584
+
2585
+ leave_token = now_token.copy_and_replace(
2586
+ StreamKeyType.ROOM, RoomStreamToken(stream=event.event_pos.stream)
2587
+ )
2588
+ room_entries.append(
2589
+ RoomSyncResultBuilder(
2590
+ room_id=event.room_id,
2591
+ rtype="archived",
2592
+ events=None,
2593
+ newly_joined=False,
2594
+ full_state=True,
2595
+ since_token=since_token,
2596
+ upto_token=leave_token,
2597
+ end_token=leave_token,
2598
+ )
2599
+ )
2600
+
2601
+ return _RoomChanges(room_entries, invited, knocked, [], [])
2602
+
2603
+ async def _generate_room_entry(
2604
+ self,
2605
+ sync_result_builder: "SyncResultBuilder",
2606
+ room_builder: "RoomSyncResultBuilder",
2607
+ ephemeral: list[JsonDict],
2608
+ tags: Mapping[str, JsonMapping] | None,
2609
+ account_data: Mapping[str, JsonMapping],
2610
+ always_include: bool = False,
2611
+ ) -> None:
2612
+ """Populates the `joined` and `archived` section of `sync_result_builder`
2613
+ based on the `room_builder`.
2614
+
2615
+ Ideally, we want to report all events whose stream ordering `s` lies in the
2616
+ range `since_token < s <= now_token`, where the two tokens are read from the
2617
+ sync_result_builder.
2618
+
2619
+ If there are too many events in that range to report, things get complicated.
2620
+ In this situation we return a truncated list of the most recent events, and
2621
+ indicate in the response that there is a "gap" of omitted events. Lots of this
2622
+ is handled in `_load_filtered_recents`, but some of is handled in this method.
2623
+
2624
+ Additionally:
2625
+ - we include a "state_delta", to describe the changes in state over the gap,
2626
+ - we include all membership events applying to the user making the request,
2627
+ even those in the gap.
2628
+
2629
+ See the spec for the rationale:
2630
+ https://spec.matrix.org/v1.1/client-server-api/#syncing
2631
+
2632
+ Args:
2633
+ sync_result_builder
2634
+ room_builder
2635
+ ephemeral: List of new ephemeral events for room
2636
+ tags: List of *all* tags for room, or None if there has been
2637
+ no change.
2638
+ account_data: List of new account data for room
2639
+ always_include: Always include this room in the sync response,
2640
+ even if empty.
2641
+ """
2642
+ newly_joined = room_builder.newly_joined
2643
+ full_state = (
2644
+ room_builder.full_state or newly_joined or sync_result_builder.full_state
2645
+ )
2646
+ events = room_builder.events
2647
+
2648
+ # We want to shortcut out as early as possible.
2649
+ if not (always_include or account_data or ephemeral or full_state):
2650
+ if events == [] and tags is None:
2651
+ return
2652
+
2653
+ now_token = sync_result_builder.now_token
2654
+ sync_config = sync_result_builder.sync_config
2655
+
2656
+ room_id = room_builder.room_id
2657
+ since_token = room_builder.since_token
2658
+ upto_token = room_builder.upto_token
2659
+
2660
+ with start_active_span("sync.generate_room_entry"):
2661
+ set_tag("room_id", room_id)
2662
+ log_kv({"events": len(events or ())})
2663
+
2664
+ log_kv(
2665
+ {
2666
+ "since_token": since_token,
2667
+ "upto_token": upto_token,
2668
+ "end_token": room_builder.end_token,
2669
+ }
2670
+ )
2671
+
2672
+ batch = await self._load_filtered_recents(
2673
+ room_id,
2674
+ sync_result_builder,
2675
+ sync_config,
2676
+ upto_token=upto_token,
2677
+ since_token=since_token,
2678
+ potential_recents=events,
2679
+ newly_joined_room=newly_joined,
2680
+ )
2681
+ log_kv(
2682
+ {
2683
+ "batch_events": len(batch.events),
2684
+ "prev_batch": batch.prev_batch,
2685
+ "batch_limited": batch.limited,
2686
+ }
2687
+ )
2688
+
2689
+ # Note: `batch` can be both empty and limited here in the case where
2690
+ # `_load_filtered_recents` can't find any events the user should see
2691
+ # (e.g. due to having ignored the sender of the last 50 events).
2692
+
2693
+ # When we join the room (or the client requests full_state), we should
2694
+ # send down any existing tags. Usually the user won't have tags in a
2695
+ # newly joined room, unless either a) they've joined before or b) the
2696
+ # tag was added by synapse e.g. for server notice rooms.
2697
+ if full_state:
2698
+ user_id = sync_result_builder.sync_config.user.to_string()
2699
+ tags = await self.store.get_tags_for_room(user_id, room_id)
2700
+
2701
+ # If there aren't any tags, don't send the empty tags list down
2702
+ # sync
2703
+ if not tags:
2704
+ tags = None
2705
+
2706
+ account_data_events = []
2707
+ if tags is not None:
2708
+ account_data_events.append(
2709
+ {"type": AccountDataTypes.TAG, "content": {"tags": tags}}
2710
+ )
2711
+
2712
+ for account_data_type, content in account_data.items():
2713
+ account_data_events.append(
2714
+ {"type": account_data_type, "content": content}
2715
+ )
2716
+
2717
+ account_data_events = (
2718
+ await sync_config.filter_collection.filter_room_account_data(
2719
+ account_data_events
2720
+ )
2721
+ )
2722
+
2723
+ ephemeral = [
2724
+ # per spec, ephemeral events (typing notifications and read receipts)
2725
+ # should not have a `room_id` field when sent to clients
2726
+ # refs:
2727
+ # - https://spec.matrix.org/v1.16/client-server-api/#mtyping
2728
+ # - https://spec.matrix.org/v1.16/client-server-api/#mreceipt
2729
+ {k: v for (k, v) in event.items() if k != "room_id"}
2730
+ for event in await sync_config.filter_collection.filter_room_ephemeral(
2731
+ ephemeral
2732
+ )
2733
+ ]
2734
+
2735
+ if not (
2736
+ always_include
2737
+ or batch
2738
+ or account_data_events
2739
+ or ephemeral
2740
+ or full_state
2741
+ ):
2742
+ return
2743
+
2744
+ if not room_builder.out_of_band:
2745
+ state = await self.compute_state_delta(
2746
+ room_id,
2747
+ batch,
2748
+ sync_config,
2749
+ since_token,
2750
+ room_builder.end_token,
2751
+ full_state=full_state,
2752
+ joined=room_builder.rtype == "joined",
2753
+ )
2754
+ else:
2755
+ # An out of band room won't have any state changes.
2756
+ state = {}
2757
+
2758
+ summary: JsonDict | None = {}
2759
+
2760
+ # we include a summary in room responses when we're lazy loading
2761
+ # members (as the client otherwise doesn't have enough info to form
2762
+ # the name itself).
2763
+ if (
2764
+ not room_builder.out_of_band
2765
+ and sync_config.filter_collection.lazy_load_members()
2766
+ and (
2767
+ # we recalculate the summary:
2768
+ # if there are membership changes in the timeline, or
2769
+ # if membership has changed during a gappy sync, or
2770
+ # if this is an initial sync.
2771
+ any(ev.type == EventTypes.Member for ev in batch.events)
2772
+ or (
2773
+ # XXX: this may include false positives in the form of LL
2774
+ # members which have snuck into state
2775
+ batch.limited
2776
+ and any(t == EventTypes.Member for (t, k) in state)
2777
+ )
2778
+ or since_token is None
2779
+ )
2780
+ ):
2781
+ summary = await self.compute_summary(
2782
+ room_id, sync_config, batch, state, now_token
2783
+ )
2784
+
2785
+ if room_builder.rtype == "joined":
2786
+ unread_notifications: dict[str, int] = {}
2787
+ room_sync = JoinedSyncResult(
2788
+ room_id=room_id,
2789
+ timeline=batch,
2790
+ state=state,
2791
+ ephemeral=ephemeral,
2792
+ account_data=account_data_events,
2793
+ unread_notifications=unread_notifications,
2794
+ unread_thread_notifications={},
2795
+ summary=summary,
2796
+ unread_count=0,
2797
+ )
2798
+
2799
+ if room_sync or always_include:
2800
+ notifs = await self.unread_notifs_for_room_id(room_id, sync_config)
2801
+
2802
+ # Notifications for the main timeline.
2803
+ notify_count = notifs.main_timeline.notify_count
2804
+ highlight_count = notifs.main_timeline.highlight_count
2805
+ unread_count = notifs.main_timeline.unread_count
2806
+
2807
+ # Check the sync configuration.
2808
+ if sync_config.filter_collection.unread_thread_notifications():
2809
+ # And add info for each thread.
2810
+ room_sync.unread_thread_notifications = {
2811
+ thread_id: {
2812
+ "notification_count": thread_notifs.notify_count,
2813
+ "highlight_count": thread_notifs.highlight_count,
2814
+ }
2815
+ for thread_id, thread_notifs in notifs.threads.items()
2816
+ if thread_id is not None
2817
+ }
2818
+
2819
+ else:
2820
+ # Combine the unread counts for all threads and main timeline.
2821
+ for thread_notifs in notifs.threads.values():
2822
+ notify_count += thread_notifs.notify_count
2823
+ highlight_count += thread_notifs.highlight_count
2824
+ unread_count += thread_notifs.unread_count
2825
+
2826
+ unread_notifications["notification_count"] = notify_count
2827
+ unread_notifications["highlight_count"] = highlight_count
2828
+ room_sync.unread_count = unread_count
2829
+
2830
+ sync_result_builder.joined.append(room_sync)
2831
+
2832
+ if batch.limited and since_token:
2833
+ user_id = sync_result_builder.sync_config.user.to_string()
2834
+ logger.debug(
2835
+ "Incremental gappy sync of %s for user %s with %d state events",
2836
+ room_id,
2837
+ user_id,
2838
+ len(state),
2839
+ )
2840
+ elif room_builder.rtype == "archived":
2841
+ archived_room_sync = ArchivedSyncResult(
2842
+ room_id=room_id,
2843
+ timeline=batch,
2844
+ state=state,
2845
+ account_data=account_data_events,
2846
+ )
2847
+ if archived_room_sync or always_include:
2848
+ sync_result_builder.archived.append(archived_room_sync)
2849
+ else:
2850
+ raise Exception("Unrecognized rtype: %r", room_builder.rtype)
2851
+
2852
+
2853
+ def _action_has_highlight(actions: list[JsonDict]) -> bool:
2854
+ for action in actions:
2855
+ try:
2856
+ if action.get("set_tweak", None) == "highlight":
2857
+ return action.get("value", True)
2858
+ except AttributeError:
2859
+ pass
2860
+
2861
+ return False
2862
+
2863
+
2864
+ def _calculate_state(
2865
+ timeline_contains: StateMap[str],
2866
+ timeline_start: StateMap[str],
2867
+ timeline_end: StateMap[str],
2868
+ previous_timeline_end: StateMap[str],
2869
+ lazy_load_members: bool,
2870
+ ) -> StateMap[str]:
2871
+ """Works out what state to include in a sync response.
2872
+
2873
+ Args:
2874
+ timeline_contains: state in the timeline
2875
+ timeline_start: state at the start of the timeline
2876
+ timeline_end: state at the end of the timeline
2877
+ previous_timeline_end: state at the end of the previous sync (or empty dict
2878
+ if this is an initial sync)
2879
+ lazy_load_members: whether to return members from timeline_start
2880
+ or not. assumes that timeline_start has already been filtered to
2881
+ include only the members the client needs to know about.
2882
+ """
2883
+ event_id_to_state_key = {
2884
+ event_id: state_key
2885
+ for state_key, event_id in itertools.chain(
2886
+ timeline_contains.items(),
2887
+ timeline_start.items(),
2888
+ timeline_end.items(),
2889
+ previous_timeline_end.items(),
2890
+ )
2891
+ }
2892
+
2893
+ timeline_end_ids = set(timeline_end.values())
2894
+ timeline_start_ids = set(timeline_start.values())
2895
+ previous_timeline_end_ids = set(previous_timeline_end.values())
2896
+ timeline_contains_ids = set(timeline_contains.values())
2897
+
2898
+ # If we are lazyloading room members, we explicitly add the membership events
2899
+ # for the senders in the timeline into the state block returned by /sync,
2900
+ # as we may not have sent them to the client before. We find these membership
2901
+ # events by filtering them out of timeline_start, which has already been filtered
2902
+ # to only include membership events for the senders in the timeline.
2903
+ # In practice, we can do this by removing them from the previous_timeline_end_ids
2904
+ # list, which is the list of relevant state we know we have already sent to the
2905
+ # client.
2906
+ # see https://github.com/matrix-org/synapse/pull/2970/files/efcdacad7d1b7f52f879179701c7e0d9b763511f#r204732809
2907
+
2908
+ if lazy_load_members:
2909
+ previous_timeline_end_ids.difference_update(
2910
+ e for t, e in timeline_start.items() if t[0] == EventTypes.Member
2911
+ )
2912
+
2913
+ # Naively, we would just return the difference between the state at the start
2914
+ # of the timeline (`timeline_start_ids`) and that at the end of the previous sync
2915
+ # (`previous_timeline_end_ids`). However, that fails in the presence of forks in
2916
+ # the DAG.
2917
+ #
2918
+ # For example, consider a DAG such as the following:
2919
+ #
2920
+ # E1
2921
+ # ↗ ↖
2922
+ # | S2
2923
+ # | ↑
2924
+ # --|------|----
2925
+ # | |
2926
+ # E3 |
2927
+ # ↖ /
2928
+ # E4
2929
+ #
2930
+ # ... and a filter that means we only return 2 events, represented by the dashed
2931
+ # horizontal line. Assuming S2 was *not* included in the previous sync, we need to
2932
+ # include it in the `state` section.
2933
+ #
2934
+ # Note that the state at the start of the timeline (E3) does not include S2. So,
2935
+ # to make sure it gets included in the calculation here, we actually look at
2936
+ # the state at the *end* of the timeline, and subtract any events that are present
2937
+ # in the timeline.
2938
+ #
2939
+ # ----------
2940
+ #
2941
+ # Aside 1: You may then wonder if we need to include `timeline_start` in the
2942
+ # calculation. Consider a linear DAG:
2943
+ #
2944
+ # E1
2945
+ # ↑
2946
+ # S2
2947
+ # ↑
2948
+ # ----|------
2949
+ # |
2950
+ # E3
2951
+ # ↑
2952
+ # S4
2953
+ # ↑
2954
+ # E5
2955
+ #
2956
+ # ... where S2 and S4 change the same piece of state; and where we have a filter
2957
+ # that returns 3 events (E3, S4, E5). We still need to tell the client about S2,
2958
+ # because it might affect the display of E3. However, the state at the end of the
2959
+ # timeline only tells us about S4; if we don't inspect `timeline_start` we won't
2960
+ # find out about S2.
2961
+ #
2962
+ # (There are yet more complicated cases in which a state event is excluded from the
2963
+ # timeline, but whose effect actually lands in the DAG in the *middle* of the
2964
+ # timeline. We have no way to represent that in the /sync response, and we don't
2965
+ # even try; it is ether omitted or plonked into `state` as if it were at the start
2966
+ # of the timeline, depending on what else is in the timeline.)
2967
+
2968
+ state_ids = (
2969
+ (timeline_end_ids | timeline_start_ids)
2970
+ - previous_timeline_end_ids
2971
+ - timeline_contains_ids
2972
+ )
2973
+
2974
+ return {event_id_to_state_key[e]: e for e in state_ids}
2975
+
2976
+
2977
+ @attr.s(slots=True, auto_attribs=True)
2978
+ class SyncResultBuilder:
2979
+ """Used to help build up a new SyncResult for a user
2980
+
2981
+ Attributes:
2982
+ sync_config
2983
+ full_state: The full_state flag as specified by user
2984
+ since_token: The token supplied by user, or None.
2985
+ now_token: The token to sync up to.
2986
+ joined_room_ids: List of rooms the user is joined to
2987
+ excluded_room_ids: Set of room ids we should omit from the /sync response.
2988
+ forced_newly_joined_room_ids:
2989
+ Rooms that should be presented in the /sync response as if they were
2990
+ newly joined during the sync period, even if that's not the case.
2991
+ (This is useful if the room was previously excluded from a /sync response,
2992
+ and now the client should be made aware of it.)
2993
+ Only used by incremental syncs.
2994
+
2995
+ # The following mirror the fields in a sync response
2996
+ presence
2997
+ account_data
2998
+ joined
2999
+ invited
3000
+ knocked
3001
+ archived
3002
+ to_device
3003
+ """
3004
+
3005
+ sync_config: SyncConfig
3006
+ full_state: bool
3007
+ since_token: StreamToken | None
3008
+ now_token: StreamToken
3009
+ joined_room_ids: frozenset[str]
3010
+ excluded_room_ids: frozenset[str]
3011
+ forced_newly_joined_room_ids: frozenset[str]
3012
+ membership_change_events: list[EventBase]
3013
+
3014
+ presence: list[UserPresenceState] = attr.Factory(list)
3015
+ account_data: list[JsonDict] = attr.Factory(list)
3016
+ joined: list[JoinedSyncResult] = attr.Factory(list)
3017
+ invited: list[InvitedSyncResult] = attr.Factory(list)
3018
+ knocked: list[KnockedSyncResult] = attr.Factory(list)
3019
+ archived: list[ArchivedSyncResult] = attr.Factory(list)
3020
+ to_device: list[JsonDict] = attr.Factory(list)
3021
+
3022
+ def calculate_user_changes(self) -> tuple[AbstractSet[str], AbstractSet[str]]:
3023
+ """Work out which other users have joined or left rooms we are joined to.
3024
+
3025
+ This data only is only useful for an incremental sync.
3026
+
3027
+ The SyncResultBuilder is not modified by this function.
3028
+ """
3029
+ newly_joined_or_invited_or_knocked_users = set()
3030
+ newly_left_users = set()
3031
+ if self.since_token:
3032
+ for joined_sync in self.joined:
3033
+ it = itertools.chain(
3034
+ joined_sync.state.values(), joined_sync.timeline.events
3035
+ )
3036
+ for event in it:
3037
+ if event.type == EventTypes.Member:
3038
+ if (
3039
+ event.membership == Membership.JOIN
3040
+ or event.membership == Membership.INVITE
3041
+ or event.membership == Membership.KNOCK
3042
+ ):
3043
+ newly_joined_or_invited_or_knocked_users.add(
3044
+ event.state_key
3045
+ )
3046
+ # If the user left and rejoined in the same batch, they
3047
+ # count as a newly-joined user, *not* a newly-left user.
3048
+ newly_left_users.discard(event.state_key)
3049
+ else:
3050
+ prev_content = event.unsigned.get("prev_content", {})
3051
+ prev_membership = prev_content.get("membership", None)
3052
+ if prev_membership == Membership.JOIN:
3053
+ newly_left_users.add(event.state_key)
3054
+ # If the user joined and left in the same batch, they
3055
+ # count as a newly-left user, not a newly-joined user.
3056
+ newly_joined_or_invited_or_knocked_users.discard(
3057
+ event.state_key
3058
+ )
3059
+
3060
+ return newly_joined_or_invited_or_knocked_users, newly_left_users
3061
+
3062
+
3063
+ @attr.s(slots=True, auto_attribs=True)
3064
+ class RoomSyncResultBuilder:
3065
+ """Stores information needed to create either a `JoinedSyncResult` or
3066
+ `ArchivedSyncResult`.
3067
+
3068
+ Attributes:
3069
+ room_id
3070
+
3071
+ rtype: One of `"joined"` or `"archived"`
3072
+
3073
+ events: List of events to include in the room (more events may be added
3074
+ when generating result).
3075
+
3076
+ newly_joined: If the user has newly joined the room
3077
+
3078
+ full_state: Whether the full state should be sent in result
3079
+
3080
+ since_token: Earliest point to return events from, or None
3081
+
3082
+ upto_token: Latest point to return events from. If `events` is populated,
3083
+ this is set to the token at the start of `events`
3084
+
3085
+ end_token: The last point in the timeline that the client should see events
3086
+ from. Normally this will be the same as the global `now_token`, but in
3087
+ the case of rooms where the user has left the room, this will be the point
3088
+ just after their leave event.
3089
+
3090
+ This is used in the calculation of the state which is returned in `state`:
3091
+ any state changes *up to* `end_token` (and not beyond!) which are not
3092
+ reflected in the timeline need to be returned in `state`.
3093
+
3094
+ out_of_band: whether the events in the room are "out of band" events
3095
+ and the server isn't in the room.
3096
+ """
3097
+
3098
+ room_id: str
3099
+ rtype: str
3100
+ events: list[EventBase] | None
3101
+ newly_joined: bool
3102
+ full_state: bool
3103
+ since_token: StreamToken | None
3104
+ upto_token: StreamToken
3105
+ end_token: StreamToken
3106
+ out_of_band: bool = False