matrix-synapse 1.139.2__cp39-abi3-macosx_11_0_arm64.whl → 1.140.0rc1__cp39-abi3-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of matrix-synapse might be problematic. Click here for more details.

Files changed (158) hide show
  1. {matrix_synapse-1.139.2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/METADATA +5 -3
  2. {matrix_synapse-1.139.2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/RECORD +157 -154
  3. synapse/_scripts/generate_workers_map.py +6 -1
  4. synapse/_scripts/synapse_port_db.py +0 -2
  5. synapse/_scripts/update_synapse_database.py +1 -6
  6. synapse/api/auth/base.py +1 -3
  7. synapse/api/auth/mas.py +6 -8
  8. synapse/api/auth/msc3861_delegated.py +6 -8
  9. synapse/api/errors.py +3 -0
  10. synapse/app/_base.py +101 -39
  11. synapse/app/admin_cmd.py +2 -4
  12. synapse/app/appservice.py +1 -1
  13. synapse/app/client_reader.py +1 -1
  14. synapse/app/event_creator.py +1 -1
  15. synapse/app/federation_reader.py +1 -1
  16. synapse/app/federation_sender.py +1 -1
  17. synapse/app/frontend_proxy.py +1 -1
  18. synapse/app/generic_worker.py +17 -11
  19. synapse/app/homeserver.py +85 -47
  20. synapse/app/media_repository.py +1 -1
  21. synapse/app/phone_stats_home.py +16 -14
  22. synapse/app/pusher.py +1 -1
  23. synapse/app/synchrotron.py +1 -1
  24. synapse/app/user_dir.py +1 -1
  25. synapse/appservice/__init__.py +29 -2
  26. synapse/appservice/scheduler.py +8 -8
  27. synapse/config/_base.py +32 -14
  28. synapse/config/_base.pyi +5 -3
  29. synapse/config/experimental.py +3 -0
  30. synapse/config/homeserver.py +27 -1
  31. synapse/config/logger.py +3 -4
  32. synapse/config/matrixrtc.py +67 -0
  33. synapse/crypto/keyring.py +18 -4
  34. synapse/events/auto_accept_invites.py +0 -1
  35. synapse/federation/federation_client.py +39 -0
  36. synapse/federation/federation_server.py +1 -1
  37. synapse/federation/send_queue.py +3 -0
  38. synapse/federation/sender/__init__.py +24 -8
  39. synapse/federation/sender/per_destination_queue.py +31 -8
  40. synapse/federation/sender/transaction_manager.py +12 -0
  41. synapse/federation/transport/client.py +29 -0
  42. synapse/handlers/account_validity.py +2 -4
  43. synapse/handlers/appservice.py +5 -7
  44. synapse/handlers/deactivate_account.py +2 -3
  45. synapse/handlers/delayed_events.py +10 -13
  46. synapse/handlers/device.py +14 -14
  47. synapse/handlers/e2e_keys.py +4 -3
  48. synapse/handlers/federation.py +7 -11
  49. synapse/handlers/federation_event.py +5 -6
  50. synapse/handlers/message.py +16 -10
  51. synapse/handlers/pagination.py +3 -7
  52. synapse/handlers/presence.py +21 -25
  53. synapse/handlers/profile.py +1 -1
  54. synapse/handlers/read_marker.py +3 -1
  55. synapse/handlers/register.py +8 -1
  56. synapse/handlers/room.py +13 -4
  57. synapse/handlers/room_member.py +11 -7
  58. synapse/handlers/room_policy.py +96 -2
  59. synapse/handlers/sso.py +1 -1
  60. synapse/handlers/stats.py +5 -3
  61. synapse/handlers/sync.py +20 -13
  62. synapse/handlers/typing.py +5 -10
  63. synapse/handlers/user_directory.py +12 -11
  64. synapse/handlers/worker_lock.py +19 -15
  65. synapse/http/client.py +18 -13
  66. synapse/http/federation/matrix_federation_agent.py +6 -1
  67. synapse/http/federation/well_known_resolver.py +3 -1
  68. synapse/http/matrixfederationclient.py +50 -11
  69. synapse/http/proxy.py +2 -2
  70. synapse/http/server.py +36 -2
  71. synapse/http/site.py +109 -17
  72. synapse/logging/context.py +165 -63
  73. synapse/logging/opentracing.py +30 -6
  74. synapse/logging/scopecontextmanager.py +161 -0
  75. synapse/media/_base.py +2 -1
  76. synapse/media/media_repository.py +20 -6
  77. synapse/media/url_previewer.py +5 -6
  78. synapse/metrics/_gc.py +3 -1
  79. synapse/metrics/background_process_metrics.py +128 -24
  80. synapse/metrics/common_usage_metrics.py +3 -5
  81. synapse/module_api/__init__.py +42 -5
  82. synapse/notifier.py +10 -3
  83. synapse/push/emailpusher.py +5 -4
  84. synapse/push/httppusher.py +6 -6
  85. synapse/push/pusherpool.py +3 -8
  86. synapse/replication/http/devices.py +0 -41
  87. synapse/replication/tcp/client.py +8 -5
  88. synapse/replication/tcp/handler.py +2 -3
  89. synapse/replication/tcp/protocol.py +14 -7
  90. synapse/replication/tcp/redis.py +16 -11
  91. synapse/replication/tcp/resource.py +5 -4
  92. synapse/replication/tcp/streams/__init__.py +2 -0
  93. synapse/res/providers.json +6 -5
  94. synapse/rest/__init__.py +2 -0
  95. synapse/rest/admin/__init__.py +4 -0
  96. synapse/rest/admin/events.py +69 -0
  97. synapse/rest/admin/media.py +70 -2
  98. synapse/rest/client/matrixrtc.py +52 -0
  99. synapse/rest/client/push_rule.py +1 -1
  100. synapse/rest/client/room.py +2 -3
  101. synapse/rest/client/sync.py +1 -0
  102. synapse/rest/client/transactions.py +1 -1
  103. synapse/server.py +271 -38
  104. synapse/server_notices/server_notices_manager.py +1 -0
  105. synapse/state/__init__.py +4 -1
  106. synapse/storage/_base.py +1 -1
  107. synapse/storage/background_updates.py +8 -3
  108. synapse/storage/controllers/persist_events.py +4 -3
  109. synapse/storage/controllers/purge_events.py +2 -3
  110. synapse/storage/controllers/state.py +5 -5
  111. synapse/storage/database.py +12 -7
  112. synapse/storage/databases/main/__init__.py +7 -2
  113. synapse/storage/databases/main/cache.py +4 -3
  114. synapse/storage/databases/main/censor_events.py +1 -1
  115. synapse/storage/databases/main/client_ips.py +9 -8
  116. synapse/storage/databases/main/deviceinbox.py +7 -6
  117. synapse/storage/databases/main/devices.py +4 -4
  118. synapse/storage/databases/main/end_to_end_keys.py +6 -3
  119. synapse/storage/databases/main/event_federation.py +7 -6
  120. synapse/storage/databases/main/event_push_actions.py +13 -13
  121. synapse/storage/databases/main/events_bg_updates.py +1 -1
  122. synapse/storage/databases/main/events_worker.py +6 -8
  123. synapse/storage/databases/main/lock.py +17 -13
  124. synapse/storage/databases/main/media_repository.py +2 -2
  125. synapse/storage/databases/main/metrics.py +6 -6
  126. synapse/storage/databases/main/monthly_active_users.py +3 -4
  127. synapse/storage/databases/main/receipts.py +1 -1
  128. synapse/storage/databases/main/registration.py +18 -19
  129. synapse/storage/databases/main/roommember.py +1 -1
  130. synapse/storage/databases/main/session.py +3 -3
  131. synapse/storage/databases/main/sliding_sync.py +2 -2
  132. synapse/storage/databases/main/transactions.py +3 -3
  133. synapse/storage/databases/state/store.py +2 -0
  134. synapse/synapse_rust/http_client.pyi +4 -0
  135. synapse/synapse_rust.abi3.so +0 -0
  136. synapse/util/async_helpers.py +36 -24
  137. synapse/util/batching_queue.py +16 -6
  138. synapse/util/caches/__init__.py +1 -1
  139. synapse/util/caches/deferred_cache.py +4 -0
  140. synapse/util/caches/descriptors.py +14 -2
  141. synapse/util/caches/dictionary_cache.py +6 -1
  142. synapse/util/caches/expiringcache.py +16 -5
  143. synapse/util/caches/lrucache.py +14 -26
  144. synapse/util/caches/response_cache.py +11 -1
  145. synapse/util/clock.py +215 -39
  146. synapse/util/constants.py +2 -0
  147. synapse/util/daemonize.py +5 -1
  148. synapse/util/distributor.py +9 -5
  149. synapse/util/metrics.py +35 -6
  150. synapse/util/ratelimitutils.py +4 -1
  151. synapse/util/retryutils.py +7 -4
  152. synapse/util/task_scheduler.py +11 -14
  153. synapse/logging/filter.py +0 -38
  154. {matrix_synapse-1.139.2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/AUTHORS.rst +0 -0
  155. {matrix_synapse-1.139.2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/LICENSE-AGPL-3.0 +0 -0
  156. {matrix_synapse-1.139.2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/LICENSE-COMMERCIAL +0 -0
  157. {matrix_synapse-1.139.2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/WHEEL +0 -0
  158. {matrix_synapse-1.139.2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/entry_points.txt +0 -0
@@ -32,7 +32,6 @@ from synapse.api.constants import EventTypes, Membership, ReceiptTypes
32
32
  from synapse.federation import send_queue
33
33
  from synapse.federation.sender import FederationSender
34
34
  from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable
35
- from synapse.metrics.background_process_metrics import run_as_background_process
36
35
  from synapse.replication.tcp.streams import (
37
36
  AccountDataStream,
38
37
  DeviceListsStream,
@@ -344,7 +343,9 @@ class ReplicationDataHandler:
344
343
  # to wedge here forever.
345
344
  deferred: "Deferred[None]" = Deferred()
346
345
  deferred = timeout_deferred(
347
- deferred, _WAIT_FOR_REPLICATION_TIMEOUT_SECONDS, self._reactor
346
+ deferred=deferred,
347
+ timeout=_WAIT_FOR_REPLICATION_TIMEOUT_SECONDS,
348
+ clock=self._clock,
348
349
  )
349
350
 
350
351
  waiting_list = self._streams_to_waiters.setdefault(
@@ -436,7 +437,9 @@ class FederationSenderHandler:
436
437
  # to. This is always set before we use it.
437
438
  self.federation_position: Optional[int] = None
438
439
 
439
- self._fed_position_linearizer = Linearizer(name="_fed_position_linearizer")
440
+ self._fed_position_linearizer = Linearizer(
441
+ name="_fed_position_linearizer", clock=hs.get_clock()
442
+ )
440
443
 
441
444
  async def process_replication_rows(
442
445
  self, stream_name: str, token: int, rows: list
@@ -511,8 +514,8 @@ class FederationSenderHandler:
511
514
  # no need to queue up another task.
512
515
  return
513
516
 
514
- run_as_background_process(
515
- "_save_and_send_ack", self.server_name, self._save_and_send_ack
517
+ self._hs.run_as_background_process(
518
+ "_save_and_send_ack", self._save_and_send_ack
516
519
  )
517
520
 
518
521
  async def _save_and_send_ack(self) -> None:
@@ -41,7 +41,6 @@ from prometheus_client import Counter
41
41
  from twisted.internet.protocol import ReconnectingClientFactory
42
42
 
43
43
  from synapse.metrics import SERVER_NAME_LABEL, LaterGauge
44
- from synapse.metrics.background_process_metrics import run_as_background_process
45
44
  from synapse.replication.tcp.commands import (
46
45
  ClearUserSyncsCommand,
47
46
  Command,
@@ -132,6 +131,7 @@ class ReplicationCommandHandler:
132
131
 
133
132
  def __init__(self, hs: "HomeServer"):
134
133
  self.server_name = hs.hostname
134
+ self.hs = hs
135
135
  self._replication_data_handler = hs.get_replication_data_handler()
136
136
  self._presence_handler = hs.get_presence_handler()
137
137
  self._store = hs.get_datastores().main
@@ -361,9 +361,8 @@ class ReplicationCommandHandler:
361
361
  return
362
362
 
363
363
  # fire off a background process to start processing the queue.
364
- run_as_background_process(
364
+ self.hs.run_as_background_process(
365
365
  "process-replication-data",
366
- self.server_name,
367
366
  self._unsafe_process_queue,
368
367
  stream_name,
369
368
  )
@@ -42,7 +42,6 @@ from synapse.logging.context import PreserveLoggingContext
42
42
  from synapse.metrics import SERVER_NAME_LABEL, LaterGauge
43
43
  from synapse.metrics.background_process_metrics import (
44
44
  BackgroundProcessLoggingContext,
45
- run_as_background_process,
46
45
  )
47
46
  from synapse.replication.tcp.commands import (
48
47
  VALID_CLIENT_COMMANDS,
@@ -140,9 +139,14 @@ class BaseReplicationStreamProtocol(LineOnlyReceiver):
140
139
  max_line_buffer = 10000
141
140
 
142
141
  def __init__(
143
- self, server_name: str, clock: Clock, handler: "ReplicationCommandHandler"
142
+ self,
143
+ hs: "HomeServer",
144
+ server_name: str,
145
+ clock: Clock,
146
+ handler: "ReplicationCommandHandler",
144
147
  ):
145
148
  self.server_name = server_name
149
+ self.hs = hs
146
150
  self.clock = clock
147
151
  self.command_handler = handler
148
152
 
@@ -290,9 +294,8 @@ class BaseReplicationStreamProtocol(LineOnlyReceiver):
290
294
  # if so.
291
295
 
292
296
  if isawaitable(res):
293
- run_as_background_process(
297
+ self.hs.run_as_background_process(
294
298
  "replication-" + cmd.get_logcontext_id(),
295
- self.server_name,
296
299
  lambda: res,
297
300
  )
298
301
 
@@ -470,9 +473,13 @@ class ServerReplicationStreamProtocol(BaseReplicationStreamProtocol):
470
473
  VALID_OUTBOUND_COMMANDS = VALID_SERVER_COMMANDS
471
474
 
472
475
  def __init__(
473
- self, server_name: str, clock: Clock, handler: "ReplicationCommandHandler"
476
+ self,
477
+ hs: "HomeServer",
478
+ server_name: str,
479
+ clock: Clock,
480
+ handler: "ReplicationCommandHandler",
474
481
  ):
475
- super().__init__(server_name, clock, handler)
482
+ super().__init__(hs, server_name, clock, handler)
476
483
 
477
484
  self.server_name = server_name
478
485
 
@@ -497,7 +504,7 @@ class ClientReplicationStreamProtocol(BaseReplicationStreamProtocol):
497
504
  clock: Clock,
498
505
  command_handler: "ReplicationCommandHandler",
499
506
  ):
500
- super().__init__(server_name, clock, command_handler)
507
+ super().__init__(hs, server_name, clock, command_handler)
501
508
 
502
509
  self.client_name = client_name
503
510
  self.server_name = server_name
@@ -40,7 +40,6 @@ from synapse.logging.context import PreserveLoggingContext, make_deferred_yielda
40
40
  from synapse.metrics import SERVER_NAME_LABEL
41
41
  from synapse.metrics.background_process_metrics import (
42
42
  BackgroundProcessLoggingContext,
43
- run_as_background_process,
44
43
  wrap_as_background_process,
45
44
  )
46
45
  from synapse.replication.tcp.commands import (
@@ -109,6 +108,7 @@ class RedisSubscriber(SubscriberProtocol):
109
108
  """
110
109
 
111
110
  server_name: str
111
+ hs: "HomeServer"
112
112
  synapse_handler: "ReplicationCommandHandler"
113
113
  synapse_stream_prefix: str
114
114
  synapse_channel_names: List[str]
@@ -146,9 +146,7 @@ class RedisSubscriber(SubscriberProtocol):
146
146
  def connectionMade(self) -> None:
147
147
  logger.info("Connected to redis")
148
148
  super().connectionMade()
149
- run_as_background_process(
150
- "subscribe-replication", self.server_name, self._send_subscribe
151
- )
149
+ self.hs.run_as_background_process("subscribe-replication", self._send_subscribe)
152
150
 
153
151
  async def _send_subscribe(self) -> None:
154
152
  # it's important to make sure that we only send the REPLICATE command once we
@@ -223,8 +221,8 @@ class RedisSubscriber(SubscriberProtocol):
223
221
  # if so.
224
222
 
225
223
  if isawaitable(res):
226
- run_as_background_process(
227
- "replication-" + cmd.get_logcontext_id(), self.server_name, lambda: res
224
+ self.hs.run_as_background_process(
225
+ "replication-" + cmd.get_logcontext_id(), lambda: res
228
226
  )
229
227
 
230
228
  def connectionLost(self, reason: Failure) -> None: # type: ignore[override]
@@ -245,11 +243,17 @@ class RedisSubscriber(SubscriberProtocol):
245
243
  Args:
246
244
  cmd: The command to send
247
245
  """
248
- run_as_background_process(
246
+ self.hs.run_as_background_process(
249
247
  "send-cmd",
250
- self.server_name,
251
248
  self._async_send_command,
252
249
  cmd,
250
+ # We originally started tracing background processes to avoid `There was no
251
+ # active span` errors but this change meant we started generating 15x the
252
+ # number of spans than before (this is one of the most heavily called
253
+ # instances of `run_as_background_process`).
254
+ #
255
+ # Since we don't log or tag a tracing span in the downstream
256
+ # code, we can safely disable this.
253
257
  bg_start_span=False,
254
258
  )
255
259
 
@@ -310,9 +314,8 @@ class SynapseRedisFactory(RedisFactory):
310
314
  convertNumbers=convertNumbers,
311
315
  )
312
316
 
313
- self.server_name = (
314
- hs.hostname
315
- ) # nb must be called this for @wrap_as_background_process
317
+ self.hs = hs # nb must be called this for @wrap_as_background_process
318
+ self.server_name = hs.hostname
316
319
 
317
320
  hs.get_clock().looping_call(self._send_ping, 30 * 1000)
318
321
 
@@ -390,6 +393,7 @@ class RedisDirectTcpReplicationClientFactory(SynapseRedisFactory):
390
393
  )
391
394
 
392
395
  self.server_name = hs.hostname
396
+ self.hs = hs
393
397
  self.synapse_handler = hs.get_replication_command_handler()
394
398
  self.synapse_stream_prefix = hs.hostname
395
399
  self.synapse_channel_names = channel_names
@@ -405,6 +409,7 @@ class RedisDirectTcpReplicationClientFactory(SynapseRedisFactory):
405
409
  # the base method does some other things than just instantiating the
406
410
  # protocol.
407
411
  p.server_name = self.server_name
412
+ p.hs = self.hs
408
413
  p.synapse_handler = self.synapse_handler
409
414
  p.synapse_outbound_redis_connection = self.synapse_outbound_redis_connection
410
415
  p.synapse_stream_prefix = self.synapse_stream_prefix
@@ -30,7 +30,6 @@ from twisted.internet.interfaces import IAddress
30
30
  from twisted.internet.protocol import ServerFactory
31
31
 
32
32
  from synapse.metrics import SERVER_NAME_LABEL
33
- from synapse.metrics.background_process_metrics import run_as_background_process
34
33
  from synapse.replication.tcp.commands import PositionCommand
35
34
  from synapse.replication.tcp.protocol import ServerReplicationStreamProtocol
36
35
  from synapse.replication.tcp.streams import EventsStream
@@ -55,6 +54,7 @@ class ReplicationStreamProtocolFactory(ServerFactory):
55
54
  def __init__(self, hs: "HomeServer"):
56
55
  self.command_handler = hs.get_replication_command_handler()
57
56
  self.clock = hs.get_clock()
57
+ self.hs = hs
58
58
  self.server_name = hs.config.server.server_name
59
59
 
60
60
  # If we've created a `ReplicationStreamProtocolFactory` then we're
@@ -69,7 +69,7 @@ class ReplicationStreamProtocolFactory(ServerFactory):
69
69
 
70
70
  def buildProtocol(self, addr: IAddress) -> ServerReplicationStreamProtocol:
71
71
  return ServerReplicationStreamProtocol(
72
- self.server_name, self.clock, self.command_handler
72
+ self.hs, self.server_name, self.clock, self.command_handler
73
73
  )
74
74
 
75
75
 
@@ -82,6 +82,7 @@ class ReplicationStreamer:
82
82
 
83
83
  def __init__(self, hs: "HomeServer"):
84
84
  self.server_name = hs.hostname
85
+ self.hs = hs
85
86
  self.store = hs.get_datastores().main
86
87
  self.clock = hs.get_clock()
87
88
  self.notifier = hs.get_notifier()
@@ -147,8 +148,8 @@ class ReplicationStreamer:
147
148
  logger.debug("Notifier poke loop already running")
148
149
  return
149
150
 
150
- run_as_background_process(
151
- "replication_notifier", self.server_name, self._run_notifier_loop
151
+ self.hs.run_as_background_process(
152
+ "replication_notifier", self._run_notifier_loop
152
153
  )
153
154
 
154
155
  async def _run_notifier_loop(self) -> None:
@@ -77,6 +77,7 @@ STREAMS_MAP = {
77
77
  __all__ = [
78
78
  "STREAMS_MAP",
79
79
  "Stream",
80
+ "EventsStream",
80
81
  "BackfillStream",
81
82
  "PresenceStream",
82
83
  "PresenceFederationStream",
@@ -87,6 +88,7 @@ __all__ = [
87
88
  "CachesStream",
88
89
  "DeviceListsStream",
89
90
  "ToDeviceStream",
91
+ "FederationStream",
90
92
  "AccountDataStream",
91
93
  "ThreadSubscriptionsStream",
92
94
  "UnPartialStatedRoomStream",
@@ -1,14 +1,15 @@
1
1
  [
2
2
  {
3
- "provider_name": "Twitter",
4
- "provider_url": "http://www.twitter.com/",
3
+ "provider_name": "X",
4
+ "provider_url": "https://x.com/",
5
5
  "endpoints": [
6
6
  {
7
7
  "schemes": [
8
- "https://twitter.com/*/moments/*",
9
- "https://*.twitter.com/*/moments/*"
8
+ "https://x.com/*",
9
+ "https://x.com/*/status/*",
10
+ "https://*.x.com/*/status/*"
10
11
  ],
11
- "url": "https://publish.twitter.com/oembed"
12
+ "url": "https://publish.x.com/oembed"
12
13
  }
13
14
  ]
14
15
  },
synapse/rest/__init__.py CHANGED
@@ -42,6 +42,7 @@ from synapse.rest.client import (
42
42
  login,
43
43
  login_token_request,
44
44
  logout,
45
+ matrixrtc,
45
46
  mutual_rooms,
46
47
  notifications,
47
48
  openid,
@@ -89,6 +90,7 @@ CLIENT_SERVLET_FUNCTIONS: Tuple[RegisterServletsFunc, ...] = (
89
90
  presence.register_servlets,
90
91
  directory.register_servlets,
91
92
  voip.register_servlets,
93
+ matrixrtc.register_servlets,
92
94
  pusher.register_servlets,
93
95
  push_rule.register_servlets,
94
96
  logout.register_servlets,
@@ -57,6 +57,9 @@ from synapse.rest.admin.event_reports import (
57
57
  EventReportDetailRestServlet,
58
58
  EventReportsRestServlet,
59
59
  )
60
+ from synapse.rest.admin.events import (
61
+ EventRestServlet,
62
+ )
60
63
  from synapse.rest.admin.experimental_features import ExperimentalFeaturesRestServlet
61
64
  from synapse.rest.admin.federation import (
62
65
  DestinationMembershipRestServlet,
@@ -339,6 +342,7 @@ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
339
342
  ExperimentalFeaturesRestServlet(hs).register(http_server)
340
343
  SuspendAccountRestServlet(hs).register(http_server)
341
344
  ScheduledTasksRestServlet(hs).register(http_server)
345
+ EventRestServlet(hs).register(http_server)
342
346
 
343
347
 
344
348
  def register_servlets_for_client_rest_resource(
@@ -0,0 +1,69 @@
1
+ from http import HTTPStatus
2
+ from typing import TYPE_CHECKING, Tuple
3
+
4
+ from synapse.api.errors import NotFoundError
5
+ from synapse.events.utils import (
6
+ SerializeEventConfig,
7
+ format_event_raw,
8
+ serialize_event,
9
+ )
10
+ from synapse.http.servlet import RestServlet
11
+ from synapse.http.site import SynapseRequest
12
+ from synapse.rest.admin import admin_patterns
13
+ from synapse.rest.admin._base import assert_user_is_admin
14
+ from synapse.storage.databases.main.events_worker import EventRedactBehaviour
15
+ from synapse.types import JsonDict
16
+
17
+ if TYPE_CHECKING:
18
+ from synapse.server import HomeServer
19
+
20
+
21
+ class EventRestServlet(RestServlet):
22
+ """
23
+ Get an event that is known to the homeserver.
24
+ The requester must have administrator access in Synapse.
25
+
26
+ GET /_synapse/admin/v1/fetch_event/<event_id>
27
+ returns:
28
+ 200 OK with event json if the event is known to the homeserver. Otherwise raises
29
+ a NotFound error.
30
+
31
+ Args:
32
+ event_id: the id of the requested event.
33
+ Returns:
34
+ JSON blob of the event
35
+ """
36
+
37
+ PATTERNS = admin_patterns("/fetch_event/(?P<event_id>[^/]*)$")
38
+
39
+ def __init__(self, hs: "HomeServer"):
40
+ self._auth = hs.get_auth()
41
+ self._store = hs.get_datastores().main
42
+ self._clock = hs.get_clock()
43
+
44
+ async def on_GET(
45
+ self, request: SynapseRequest, event_id: str
46
+ ) -> Tuple[int, JsonDict]:
47
+ requester = await self._auth.get_user_by_req(request)
48
+ await assert_user_is_admin(self._auth, requester)
49
+
50
+ event = await self._store.get_event(
51
+ event_id,
52
+ EventRedactBehaviour.as_is,
53
+ allow_none=True,
54
+ )
55
+
56
+ if event is None:
57
+ raise NotFoundError("Event not found")
58
+
59
+ config = SerializeEventConfig(
60
+ as_client_event=False,
61
+ event_format=format_event_raw,
62
+ requester=requester,
63
+ only_event_fields=None,
64
+ include_stripped_room_state=True,
65
+ include_admin_metadata=True,
66
+ )
67
+ res = {"event": serialize_event(event, self._clock.time_msec(), config=config)}
68
+
69
+ return HTTPStatus.OK, res
@@ -18,7 +18,6 @@
18
18
  # [This file includes modifications made by New Vector Limited]
19
19
  #
20
20
  #
21
-
22
21
  import logging
23
22
  from http import HTTPStatus
24
23
  from typing import TYPE_CHECKING, Optional, Tuple
@@ -41,7 +40,9 @@ from synapse.rest.admin._base import (
41
40
  assert_requester_is_admin,
42
41
  assert_user_is_admin,
43
42
  )
44
- from synapse.storage.databases.main.media_repository import MediaSortOrder
43
+ from synapse.storage.databases.main.media_repository import (
44
+ MediaSortOrder,
45
+ )
45
46
  from synapse.types import JsonDict, UserID
46
47
 
47
48
  if TYPE_CHECKING:
@@ -50,6 +51,72 @@ if TYPE_CHECKING:
50
51
  logger = logging.getLogger(__name__)
51
52
 
52
53
 
54
+ class QueryMediaById(RestServlet):
55
+ """
56
+ Fetch info about a piece of local or cached remote media.
57
+ """
58
+
59
+ PATTERNS = admin_patterns("/media/(?P<server_name>[^/]*)/(?P<media_id>[^/]*)$")
60
+
61
+ def __init__(self, hs: "HomeServer"):
62
+ self.store = hs.get_datastores().main
63
+ self.auth = hs.get_auth()
64
+ self.server_name = hs.hostname
65
+ self.hs = hs
66
+ self.media_repo = hs.get_media_repository()
67
+
68
+ async def on_GET(
69
+ self, request: SynapseRequest, server_name: str, media_id: str
70
+ ) -> Tuple[int, JsonDict]:
71
+ requester = await self.auth.get_user_by_req(request)
72
+ await assert_user_is_admin(self.auth, requester)
73
+
74
+ if not self.hs.is_mine_server_name(server_name):
75
+ remote_media_info = await self.media_repo.get_cached_remote_media_info(
76
+ server_name, media_id
77
+ )
78
+ if remote_media_info is None:
79
+ raise NotFoundError("Unknown media")
80
+ resp = {
81
+ "media_origin": remote_media_info.media_origin,
82
+ "user_id": None,
83
+ "media_id": remote_media_info.media_id,
84
+ "media_type": remote_media_info.media_type,
85
+ "media_length": remote_media_info.media_length,
86
+ "upload_name": remote_media_info.upload_name,
87
+ "created_ts": remote_media_info.created_ts,
88
+ "filesystem_id": remote_media_info.filesystem_id,
89
+ "url_cache": None,
90
+ "last_access_ts": remote_media_info.last_access_ts,
91
+ "quarantined_by": remote_media_info.quarantined_by,
92
+ "authenticated": remote_media_info.authenticated,
93
+ "safe_from_quarantine": None,
94
+ "sha256": remote_media_info.sha256,
95
+ }
96
+ else:
97
+ local_media_info = await self.store.get_local_media(media_id)
98
+ if local_media_info is None:
99
+ raise NotFoundError("Unknown media")
100
+ resp = {
101
+ "media_origin": None,
102
+ "user_id": local_media_info.user_id,
103
+ "media_id": local_media_info.media_id,
104
+ "media_type": local_media_info.media_type,
105
+ "media_length": local_media_info.media_length,
106
+ "upload_name": local_media_info.upload_name,
107
+ "created_ts": local_media_info.created_ts,
108
+ "filesystem_id": None,
109
+ "url_cache": local_media_info.url_cache,
110
+ "last_access_ts": local_media_info.last_access_ts,
111
+ "quarantined_by": local_media_info.quarantined_by,
112
+ "authenticated": local_media_info.authenticated,
113
+ "safe_from_quarantine": local_media_info.safe_from_quarantine,
114
+ "sha256": local_media_info.sha256,
115
+ }
116
+
117
+ return HTTPStatus.OK, {"media_info": resp}
118
+
119
+
53
120
  class QuarantineMediaInRoom(RestServlet):
54
121
  """Quarantines all media in a room so that no one can download it via
55
122
  this server.
@@ -470,3 +537,4 @@ def register_servlets_for_media_repo(hs: "HomeServer", http_server: HttpServer)
470
537
  DeleteMediaByDateSize(hs).register(http_server)
471
538
  DeleteMediaByID(hs).register(http_server)
472
539
  UserMediaRestServlet(hs).register(http_server)
540
+ QueryMediaById(hs).register(http_server)
@@ -0,0 +1,52 @@
1
+ #
2
+ # This file is licensed under the Affero General Public License (AGPL) version 3.
3
+ #
4
+ # Copyright (C) 2025 New Vector, Ltd
5
+ #
6
+ # This program is free software: you can redistribute it and/or modify
7
+ # it under the terms of the GNU Affero General Public License as
8
+ # published by the Free Software Foundation, either version 3 of the
9
+ # License, or (at your option) any later version.
10
+ #
11
+ # See the GNU Affero General Public License for more details:
12
+ # <https://www.gnu.org/licenses/agpl-3.0.html>.
13
+ #
14
+ # [This file includes modifications made by New Vector Limited]
15
+ #
16
+ #
17
+
18
+ from typing import TYPE_CHECKING, Tuple
19
+
20
+ from synapse.http.server import HttpServer
21
+ from synapse.http.servlet import RestServlet
22
+ from synapse.http.site import SynapseRequest
23
+ from synapse.rest.client._base import client_patterns
24
+ from synapse.types import JsonDict
25
+
26
+ if TYPE_CHECKING:
27
+ from synapse.server import HomeServer
28
+
29
+
30
+ class MatrixRTCRestServlet(RestServlet):
31
+ PATTERNS = client_patterns(r"/org\.matrix\.msc4143/rtc/transports$", releases=())
32
+ CATEGORY = "Client API requests"
33
+
34
+ def __init__(self, hs: "HomeServer"):
35
+ super().__init__()
36
+ self._hs = hs
37
+ self._auth = hs.get_auth()
38
+ self._transports = hs.config.matrix_rtc.transports
39
+
40
+ async def on_GET(self, request: SynapseRequest) -> Tuple[int, JsonDict]:
41
+ # Require authentication for this endpoint.
42
+ await self._auth.get_user_by_req(request)
43
+
44
+ if self._transports:
45
+ return 200, {"rtc_transports": self._transports}
46
+
47
+ return 200, {}
48
+
49
+
50
+ def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
51
+ if hs.config.experimental.msc4143_enabled:
52
+ MatrixRTCRestServlet(hs).register(http_server)
@@ -65,7 +65,7 @@ class PushRuleRestServlet(RestServlet):
65
65
  hs.get_instance_name() in hs.config.worker.writers.push_rules
66
66
  )
67
67
  self._push_rules_handler = hs.get_push_rules_handler()
68
- self._push_rule_linearizer = Linearizer(name="push_rules")
68
+ self._push_rule_linearizer = Linearizer(name="push_rules", clock=hs.get_clock())
69
69
 
70
70
  async def on_PUT(self, request: SynapseRequest, path: str) -> Tuple[int, JsonDict]:
71
71
  if not self._is_push_worker:
@@ -66,7 +66,6 @@ from synapse.http.site import SynapseRequest
66
66
  from synapse.logging.context import make_deferred_yieldable, run_in_background
67
67
  from synapse.logging.opentracing import set_tag
68
68
  from synapse.metrics import SERVER_NAME_LABEL
69
- from synapse.metrics.background_process_metrics import run_as_background_process
70
69
  from synapse.rest.client._base import client_patterns
71
70
  from synapse.rest.client.transactions import HttpTransactionCache
72
71
  from synapse.state import CREATE_KEY, POWER_KEY
@@ -1225,6 +1224,7 @@ class RoomRedactEventRestServlet(TransactionRestServlet):
1225
1224
  def __init__(self, hs: "HomeServer"):
1226
1225
  super().__init__(hs)
1227
1226
  self.server_name = hs.hostname
1227
+ self.hs = hs
1228
1228
  self.event_creation_handler = hs.get_event_creation_handler()
1229
1229
  self.auth = hs.get_auth()
1230
1230
  self._store = hs.get_datastores().main
@@ -1307,9 +1307,8 @@ class RoomRedactEventRestServlet(TransactionRestServlet):
1307
1307
  )
1308
1308
 
1309
1309
  if with_relations:
1310
- run_as_background_process(
1310
+ self.hs.run_as_background_process(
1311
1311
  "redact_related_events",
1312
- self.server_name,
1313
1312
  self._relation_handler.redact_events_related_to,
1314
1313
  requester=requester,
1315
1314
  event_id=event_id,
@@ -126,6 +126,7 @@ class SyncRestServlet(RestServlet):
126
126
 
127
127
  self._json_filter_cache: LruCache[str, bool] = LruCache(
128
128
  max_size=1000,
129
+ clock=self.clock,
129
130
  cache_name="sync_valid_filter",
130
131
  server_name=self.server_name,
131
132
  )
@@ -56,7 +56,7 @@ class HttpTransactionCache:
56
56
  ] = {}
57
57
  # Try to clean entries every 30 mins. This means entries will exist
58
58
  # for at *LEAST* 30 mins, and at *MOST* 60 mins.
59
- self.cleaner = self.clock.looping_call(self._cleanup, CLEANUP_PERIOD_MS)
59
+ self.clock.looping_call(self._cleanup, CLEANUP_PERIOD_MS)
60
60
 
61
61
  def _get_transaction_key(self, request: IRequest, requester: Requester) -> Hashable:
62
62
  """A helper function which returns a transaction key that can be used