matrix-synapse 1.139.0rc2__cp39-abi3-musllinux_1_2_aarch64.whl → 1.140.0rc1__cp39-abi3-musllinux_1_2_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of matrix-synapse might be problematic. Click here for more details.
- {matrix_synapse-1.139.0rc2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/METADATA +5 -3
- {matrix_synapse-1.139.0rc2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/RECORD +158 -155
- synapse/_scripts/generate_workers_map.py +6 -1
- synapse/_scripts/synapse_port_db.py +0 -2
- synapse/_scripts/update_synapse_database.py +1 -6
- synapse/api/auth/base.py +1 -3
- synapse/api/auth/mas.py +6 -8
- synapse/api/auth/msc3861_delegated.py +6 -8
- synapse/api/errors.py +3 -0
- synapse/app/_base.py +101 -39
- synapse/app/admin_cmd.py +2 -4
- synapse/app/appservice.py +1 -1
- synapse/app/client_reader.py +1 -1
- synapse/app/event_creator.py +1 -1
- synapse/app/federation_reader.py +1 -1
- synapse/app/federation_sender.py +1 -1
- synapse/app/frontend_proxy.py +1 -1
- synapse/app/generic_worker.py +17 -11
- synapse/app/homeserver.py +85 -47
- synapse/app/media_repository.py +1 -1
- synapse/app/phone_stats_home.py +16 -14
- synapse/app/pusher.py +1 -1
- synapse/app/synchrotron.py +1 -1
- synapse/app/user_dir.py +1 -1
- synapse/appservice/__init__.py +29 -2
- synapse/appservice/scheduler.py +8 -8
- synapse/config/_base.py +32 -14
- synapse/config/_base.pyi +5 -3
- synapse/config/experimental.py +3 -0
- synapse/config/homeserver.py +27 -1
- synapse/config/logger.py +3 -4
- synapse/config/matrixrtc.py +67 -0
- synapse/crypto/keyring.py +18 -4
- synapse/events/auto_accept_invites.py +0 -1
- synapse/federation/federation_client.py +39 -0
- synapse/federation/federation_server.py +1 -1
- synapse/federation/send_queue.py +3 -0
- synapse/federation/sender/__init__.py +24 -8
- synapse/federation/sender/per_destination_queue.py +31 -8
- synapse/federation/sender/transaction_manager.py +12 -0
- synapse/federation/transport/client.py +29 -0
- synapse/handlers/account_validity.py +2 -4
- synapse/handlers/appservice.py +5 -7
- synapse/handlers/deactivate_account.py +2 -3
- synapse/handlers/delayed_events.py +10 -13
- synapse/handlers/device.py +14 -14
- synapse/handlers/e2e_keys.py +16 -11
- synapse/handlers/federation.py +7 -11
- synapse/handlers/federation_event.py +5 -6
- synapse/handlers/message.py +16 -10
- synapse/handlers/pagination.py +3 -7
- synapse/handlers/presence.py +21 -25
- synapse/handlers/profile.py +1 -1
- synapse/handlers/read_marker.py +3 -1
- synapse/handlers/register.py +8 -1
- synapse/handlers/room.py +13 -4
- synapse/handlers/room_member.py +11 -7
- synapse/handlers/room_policy.py +96 -2
- synapse/handlers/sso.py +1 -1
- synapse/handlers/stats.py +5 -3
- synapse/handlers/sync.py +20 -13
- synapse/handlers/typing.py +5 -10
- synapse/handlers/user_directory.py +12 -11
- synapse/handlers/worker_lock.py +19 -15
- synapse/http/client.py +18 -13
- synapse/http/federation/matrix_federation_agent.py +6 -1
- synapse/http/federation/well_known_resolver.py +3 -1
- synapse/http/matrixfederationclient.py +50 -11
- synapse/http/proxy.py +2 -2
- synapse/http/server.py +36 -2
- synapse/http/site.py +109 -17
- synapse/logging/context.py +201 -110
- synapse/logging/opentracing.py +30 -6
- synapse/logging/scopecontextmanager.py +161 -0
- synapse/media/_base.py +2 -1
- synapse/media/media_repository.py +20 -6
- synapse/media/url_previewer.py +5 -6
- synapse/metrics/_gc.py +3 -1
- synapse/metrics/background_process_metrics.py +128 -24
- synapse/metrics/common_usage_metrics.py +3 -5
- synapse/module_api/__init__.py +42 -5
- synapse/notifier.py +10 -3
- synapse/push/emailpusher.py +5 -4
- synapse/push/httppusher.py +6 -6
- synapse/push/pusherpool.py +3 -8
- synapse/replication/http/devices.py +0 -41
- synapse/replication/tcp/client.py +8 -5
- synapse/replication/tcp/handler.py +2 -3
- synapse/replication/tcp/protocol.py +14 -7
- synapse/replication/tcp/redis.py +16 -11
- synapse/replication/tcp/resource.py +5 -4
- synapse/replication/tcp/streams/__init__.py +2 -0
- synapse/res/providers.json +6 -5
- synapse/rest/__init__.py +2 -0
- synapse/rest/admin/__init__.py +4 -0
- synapse/rest/admin/events.py +69 -0
- synapse/rest/admin/media.py +70 -2
- synapse/rest/client/keys.py +147 -3
- synapse/rest/client/matrixrtc.py +52 -0
- synapse/rest/client/push_rule.py +1 -1
- synapse/rest/client/room.py +2 -3
- synapse/rest/client/sync.py +1 -3
- synapse/rest/client/transactions.py +1 -1
- synapse/server.py +271 -38
- synapse/server_notices/server_notices_manager.py +1 -0
- synapse/state/__init__.py +4 -1
- synapse/storage/_base.py +1 -1
- synapse/storage/background_updates.py +8 -3
- synapse/storage/controllers/persist_events.py +4 -3
- synapse/storage/controllers/purge_events.py +2 -3
- synapse/storage/controllers/state.py +5 -5
- synapse/storage/database.py +12 -7
- synapse/storage/databases/main/__init__.py +7 -2
- synapse/storage/databases/main/cache.py +4 -3
- synapse/storage/databases/main/censor_events.py +1 -1
- synapse/storage/databases/main/client_ips.py +9 -8
- synapse/storage/databases/main/deviceinbox.py +7 -6
- synapse/storage/databases/main/devices.py +4 -4
- synapse/storage/databases/main/end_to_end_keys.py +6 -3
- synapse/storage/databases/main/event_federation.py +7 -6
- synapse/storage/databases/main/event_push_actions.py +13 -13
- synapse/storage/databases/main/events_bg_updates.py +1 -1
- synapse/storage/databases/main/events_worker.py +6 -8
- synapse/storage/databases/main/lock.py +17 -13
- synapse/storage/databases/main/media_repository.py +2 -2
- synapse/storage/databases/main/metrics.py +6 -6
- synapse/storage/databases/main/monthly_active_users.py +3 -4
- synapse/storage/databases/main/receipts.py +1 -1
- synapse/storage/databases/main/registration.py +18 -19
- synapse/storage/databases/main/roommember.py +1 -1
- synapse/storage/databases/main/session.py +3 -3
- synapse/storage/databases/main/sliding_sync.py +2 -2
- synapse/storage/databases/main/transactions.py +3 -3
- synapse/storage/databases/state/store.py +2 -0
- synapse/synapse_rust/http_client.pyi +4 -0
- synapse/synapse_rust.abi3.so +0 -0
- synapse/util/async_helpers.py +36 -24
- synapse/util/batching_queue.py +16 -6
- synapse/util/caches/__init__.py +1 -1
- synapse/util/caches/deferred_cache.py +4 -0
- synapse/util/caches/descriptors.py +14 -2
- synapse/util/caches/dictionary_cache.py +6 -1
- synapse/util/caches/expiringcache.py +16 -5
- synapse/util/caches/lrucache.py +14 -26
- synapse/util/caches/response_cache.py +11 -1
- synapse/util/clock.py +215 -39
- synapse/util/constants.py +2 -0
- synapse/util/daemonize.py +5 -1
- synapse/util/distributor.py +9 -5
- synapse/util/metrics.py +35 -6
- synapse/util/ratelimitutils.py +4 -1
- synapse/util/retryutils.py +7 -4
- synapse/util/task_scheduler.py +11 -14
- synapse/logging/filter.py +0 -38
- {matrix_synapse-1.139.0rc2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/AUTHORS.rst +0 -0
- {matrix_synapse-1.139.0rc2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/LICENSE-AGPL-3.0 +0 -0
- {matrix_synapse-1.139.0rc2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/LICENSE-COMMERCIAL +0 -0
- {matrix_synapse-1.139.0rc2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/WHEEL +0 -0
- {matrix_synapse-1.139.0rc2.dist-info → matrix_synapse-1.140.0rc1.dist-info}/entry_points.txt +0 -0
|
@@ -90,6 +90,7 @@ from synapse.logging.opentracing import set_tag, start_active_span, tags
|
|
|
90
90
|
from synapse.metrics import SERVER_NAME_LABEL
|
|
91
91
|
from synapse.types import JsonDict
|
|
92
92
|
from synapse.util.async_helpers import AwakenableSleeper, Linearizer, timeout_deferred
|
|
93
|
+
from synapse.util.clock import Clock
|
|
93
94
|
from synapse.util.json import json_decoder
|
|
94
95
|
from synapse.util.metrics import Measure
|
|
95
96
|
from synapse.util.stringutils import parse_and_validate_server_name
|
|
@@ -270,6 +271,7 @@ class LegacyJsonSendParser(_BaseJsonParser[Tuple[int, JsonDict]]):
|
|
|
270
271
|
|
|
271
272
|
|
|
272
273
|
async def _handle_response(
|
|
274
|
+
clock: Clock,
|
|
273
275
|
reactor: IReactorTime,
|
|
274
276
|
timeout_sec: float,
|
|
275
277
|
request: MatrixFederationRequest,
|
|
@@ -299,7 +301,11 @@ async def _handle_response(
|
|
|
299
301
|
check_content_type_is(response.headers, parser.CONTENT_TYPE)
|
|
300
302
|
|
|
301
303
|
d = read_body_with_max_size(response, parser, max_response_size)
|
|
302
|
-
d = timeout_deferred(
|
|
304
|
+
d = timeout_deferred(
|
|
305
|
+
deferred=d,
|
|
306
|
+
timeout=timeout_sec,
|
|
307
|
+
clock=clock,
|
|
308
|
+
)
|
|
303
309
|
|
|
304
310
|
length = await make_deferred_yieldable(d)
|
|
305
311
|
|
|
@@ -411,6 +417,7 @@ class MatrixFederationHttpClient:
|
|
|
411
417
|
self.server_name = hs.hostname
|
|
412
418
|
|
|
413
419
|
self.reactor = hs.get_reactor()
|
|
420
|
+
self.clock = hs.get_clock()
|
|
414
421
|
|
|
415
422
|
user_agent = hs.version_string
|
|
416
423
|
if hs.config.server.user_agent_suffix:
|
|
@@ -424,6 +431,7 @@ class MatrixFederationHttpClient:
|
|
|
424
431
|
federation_agent: IAgent = MatrixFederationAgent(
|
|
425
432
|
server_name=self.server_name,
|
|
426
433
|
reactor=self.reactor,
|
|
434
|
+
clock=self.clock,
|
|
427
435
|
tls_client_options_factory=tls_client_options_factory,
|
|
428
436
|
user_agent=user_agent.encode("ascii"),
|
|
429
437
|
ip_allowlist=hs.config.server.federation_ip_range_allowlist,
|
|
@@ -457,7 +465,6 @@ class MatrixFederationHttpClient:
|
|
|
457
465
|
ip_blocklist=hs.config.server.federation_ip_range_blocklist,
|
|
458
466
|
)
|
|
459
467
|
|
|
460
|
-
self.clock = hs.get_clock()
|
|
461
468
|
self._store = hs.get_datastores().main
|
|
462
469
|
self.version_string_bytes = hs.version_string.encode("ascii")
|
|
463
470
|
self.default_timeout_seconds = hs.config.federation.client_timeout_ms / 1000
|
|
@@ -470,9 +477,9 @@ class MatrixFederationHttpClient:
|
|
|
470
477
|
self.max_long_retries = hs.config.federation.max_long_retries
|
|
471
478
|
self.max_short_retries = hs.config.federation.max_short_retries
|
|
472
479
|
|
|
473
|
-
self._cooperator = Cooperator(scheduler=_make_scheduler(self.
|
|
480
|
+
self._cooperator = Cooperator(scheduler=_make_scheduler(self.clock))
|
|
474
481
|
|
|
475
|
-
self._sleeper = AwakenableSleeper(self.
|
|
482
|
+
self._sleeper = AwakenableSleeper(self.clock)
|
|
476
483
|
|
|
477
484
|
self._simple_http_client = SimpleHttpClient(
|
|
478
485
|
hs,
|
|
@@ -481,7 +488,13 @@ class MatrixFederationHttpClient:
|
|
|
481
488
|
use_proxy=True,
|
|
482
489
|
)
|
|
483
490
|
|
|
484
|
-
self.remote_download_linearizer = Linearizer(
|
|
491
|
+
self.remote_download_linearizer = Linearizer(
|
|
492
|
+
name="remote_download_linearizer", max_count=6, clock=self.clock
|
|
493
|
+
)
|
|
494
|
+
self._is_shutdown = False
|
|
495
|
+
|
|
496
|
+
def shutdown(self) -> None:
|
|
497
|
+
self._is_shutdown = True
|
|
485
498
|
|
|
486
499
|
def wake_destination(self, destination: str) -> None:
|
|
487
500
|
"""Called when the remote server may have come back online."""
|
|
@@ -627,6 +640,7 @@ class MatrixFederationHttpClient:
|
|
|
627
640
|
limiter = await synapse.util.retryutils.get_retry_limiter(
|
|
628
641
|
destination=request.destination,
|
|
629
642
|
our_server_name=self.server_name,
|
|
643
|
+
hs=self.hs,
|
|
630
644
|
clock=self.clock,
|
|
631
645
|
store=self._store,
|
|
632
646
|
backoff_on_404=backoff_on_404,
|
|
@@ -673,7 +687,7 @@ class MatrixFederationHttpClient:
|
|
|
673
687
|
(b"", b"", path_bytes, None, query_bytes, b"")
|
|
674
688
|
)
|
|
675
689
|
|
|
676
|
-
while
|
|
690
|
+
while not self._is_shutdown:
|
|
677
691
|
try:
|
|
678
692
|
json = request.get_json()
|
|
679
693
|
if json:
|
|
@@ -731,9 +745,9 @@ class MatrixFederationHttpClient:
|
|
|
731
745
|
bodyProducer=producer,
|
|
732
746
|
)
|
|
733
747
|
request_deferred = timeout_deferred(
|
|
734
|
-
request_deferred,
|
|
748
|
+
deferred=request_deferred,
|
|
735
749
|
timeout=_sec_timeout,
|
|
736
|
-
|
|
750
|
+
clock=self.clock,
|
|
737
751
|
)
|
|
738
752
|
|
|
739
753
|
response = await make_deferred_yieldable(request_deferred)
|
|
@@ -791,7 +805,9 @@ class MatrixFederationHttpClient:
|
|
|
791
805
|
# Update transactions table?
|
|
792
806
|
d = treq.content(response)
|
|
793
807
|
d = timeout_deferred(
|
|
794
|
-
d,
|
|
808
|
+
deferred=d,
|
|
809
|
+
timeout=_sec_timeout,
|
|
810
|
+
clock=self.clock,
|
|
795
811
|
)
|
|
796
812
|
|
|
797
813
|
try:
|
|
@@ -860,6 +876,15 @@ class MatrixFederationHttpClient:
|
|
|
860
876
|
delay_seconds,
|
|
861
877
|
)
|
|
862
878
|
|
|
879
|
+
if self._is_shutdown:
|
|
880
|
+
# Immediately fail sending the request instead of starting a
|
|
881
|
+
# potentially long sleep after the server has requested
|
|
882
|
+
# shutdown.
|
|
883
|
+
# This is the code path followed when the
|
|
884
|
+
# `federation_transaction_transmission_loop` has been
|
|
885
|
+
# cancelled.
|
|
886
|
+
raise
|
|
887
|
+
|
|
863
888
|
# Sleep for the calculated delay, or wake up immediately
|
|
864
889
|
# if we get notified that the server is back up.
|
|
865
890
|
await self._sleeper.sleep(
|
|
@@ -1072,6 +1097,7 @@ class MatrixFederationHttpClient:
|
|
|
1072
1097
|
parser = cast(ByteParser[T], JsonParser())
|
|
1073
1098
|
|
|
1074
1099
|
body = await _handle_response(
|
|
1100
|
+
self.clock,
|
|
1075
1101
|
self.reactor,
|
|
1076
1102
|
_sec_timeout,
|
|
1077
1103
|
request,
|
|
@@ -1150,7 +1176,13 @@ class MatrixFederationHttpClient:
|
|
|
1150
1176
|
_sec_timeout = self.default_timeout_seconds
|
|
1151
1177
|
|
|
1152
1178
|
body = await _handle_response(
|
|
1153
|
-
self.
|
|
1179
|
+
self.clock,
|
|
1180
|
+
self.reactor,
|
|
1181
|
+
_sec_timeout,
|
|
1182
|
+
request,
|
|
1183
|
+
response,
|
|
1184
|
+
start_ms,
|
|
1185
|
+
parser=JsonParser(),
|
|
1154
1186
|
)
|
|
1155
1187
|
return body
|
|
1156
1188
|
|
|
@@ -1356,6 +1388,7 @@ class MatrixFederationHttpClient:
|
|
|
1356
1388
|
parser = cast(ByteParser[T], JsonParser())
|
|
1357
1389
|
|
|
1358
1390
|
body = await _handle_response(
|
|
1391
|
+
self.clock,
|
|
1359
1392
|
self.reactor,
|
|
1360
1393
|
_sec_timeout,
|
|
1361
1394
|
request,
|
|
@@ -1429,7 +1462,13 @@ class MatrixFederationHttpClient:
|
|
|
1429
1462
|
_sec_timeout = self.default_timeout_seconds
|
|
1430
1463
|
|
|
1431
1464
|
body = await _handle_response(
|
|
1432
|
-
self.
|
|
1465
|
+
self.clock,
|
|
1466
|
+
self.reactor,
|
|
1467
|
+
_sec_timeout,
|
|
1468
|
+
request,
|
|
1469
|
+
response,
|
|
1470
|
+
start_ms,
|
|
1471
|
+
parser=JsonParser(),
|
|
1433
1472
|
)
|
|
1434
1473
|
return body
|
|
1435
1474
|
|
synapse/http/proxy.py
CHANGED
|
@@ -161,12 +161,12 @@ class ProxyResource(_AsyncResource):
|
|
|
161
161
|
bodyProducer=QuieterFileBodyProducer(request.content),
|
|
162
162
|
)
|
|
163
163
|
request_deferred = timeout_deferred(
|
|
164
|
-
request_deferred,
|
|
164
|
+
deferred=request_deferred,
|
|
165
165
|
# This should be set longer than the timeout in `MatrixFederationHttpClient`
|
|
166
166
|
# so that it has enough time to complete and pass us the data before we give
|
|
167
167
|
# up.
|
|
168
168
|
timeout=90,
|
|
169
|
-
|
|
169
|
+
clock=self._clock,
|
|
170
170
|
)
|
|
171
171
|
|
|
172
172
|
response = await make_deferred_yieldable(request_deferred)
|
synapse/http/server.py
CHANGED
|
@@ -411,8 +411,26 @@ class DirectServeJsonResource(_AsyncResource):
|
|
|
411
411
|
# Clock is optional as this class is exposed to the module API.
|
|
412
412
|
clock: Optional[Clock] = None,
|
|
413
413
|
):
|
|
414
|
+
"""
|
|
415
|
+
Args:
|
|
416
|
+
canonical_json: TODO
|
|
417
|
+
extract_context: TODO
|
|
418
|
+
clock: This is expected to be passed in by any Synapse code.
|
|
419
|
+
Only optional for the Module API.
|
|
420
|
+
"""
|
|
421
|
+
|
|
414
422
|
if clock is None:
|
|
415
|
-
|
|
423
|
+
# Ideally we wouldn't ignore the linter error here and instead enforce a
|
|
424
|
+
# required `Clock` be passed into the `__init__` function.
|
|
425
|
+
# However, this would change the function signature which is currently being
|
|
426
|
+
# exported to the module api. Since we don't want to break that api, we have
|
|
427
|
+
# to settle with ignoring the linter error here.
|
|
428
|
+
# As of the time of writing this, all Synapse internal usages of
|
|
429
|
+
# `DirectServeJsonResource` pass in the existing homeserver clock instance.
|
|
430
|
+
clock = Clock( # type: ignore[multiple-internal-clocks]
|
|
431
|
+
cast(ISynapseThreadlessReactor, reactor),
|
|
432
|
+
server_name="synapse_module_running_from_unknown_server",
|
|
433
|
+
)
|
|
416
434
|
|
|
417
435
|
super().__init__(clock, extract_context)
|
|
418
436
|
self.canonical_json = canonical_json
|
|
@@ -590,8 +608,24 @@ class DirectServeHtmlResource(_AsyncResource):
|
|
|
590
608
|
# Clock is optional as this class is exposed to the module API.
|
|
591
609
|
clock: Optional[Clock] = None,
|
|
592
610
|
):
|
|
611
|
+
"""
|
|
612
|
+
Args:
|
|
613
|
+
extract_context: TODO
|
|
614
|
+
clock: This is expected to be passed in by any Synapse code.
|
|
615
|
+
Only optional for the Module API.
|
|
616
|
+
"""
|
|
593
617
|
if clock is None:
|
|
594
|
-
|
|
618
|
+
# Ideally we wouldn't ignore the linter error here and instead enforce a
|
|
619
|
+
# required `Clock` be passed into the `__init__` function.
|
|
620
|
+
# However, this would change the function signature which is currently being
|
|
621
|
+
# exported to the module api. Since we don't want to break that api, we have
|
|
622
|
+
# to settle with ignoring the linter error here.
|
|
623
|
+
# As of the time of writing this, all Synapse internal usages of
|
|
624
|
+
# `DirectServeHtmlResource` pass in the existing homeserver clock instance.
|
|
625
|
+
clock = Clock( # type: ignore[multiple-internal-clocks]
|
|
626
|
+
cast(ISynapseThreadlessReactor, reactor),
|
|
627
|
+
server_name="synapse_module_running_from_unknown_server",
|
|
628
|
+
)
|
|
595
629
|
|
|
596
630
|
super().__init__(clock, extract_context)
|
|
597
631
|
|
synapse/http/site.py
CHANGED
|
@@ -22,7 +22,7 @@ import contextlib
|
|
|
22
22
|
import logging
|
|
23
23
|
import time
|
|
24
24
|
from http import HTTPStatus
|
|
25
|
-
from typing import TYPE_CHECKING, Any, Generator, Optional, Tuple, Union
|
|
25
|
+
from typing import TYPE_CHECKING, Any, Generator, List, Optional, Tuple, Union
|
|
26
26
|
|
|
27
27
|
import attr
|
|
28
28
|
from zope.interface import implementer
|
|
@@ -30,6 +30,7 @@ from zope.interface import implementer
|
|
|
30
30
|
from twisted.internet.address import UNIXAddress
|
|
31
31
|
from twisted.internet.defer import Deferred
|
|
32
32
|
from twisted.internet.interfaces import IAddress
|
|
33
|
+
from twisted.internet.protocol import Protocol
|
|
33
34
|
from twisted.python.failure import Failure
|
|
34
35
|
from twisted.web.http import HTTPChannel
|
|
35
36
|
from twisted.web.resource import IResource, Resource
|
|
@@ -302,10 +303,15 @@ class SynapseRequest(Request):
|
|
|
302
303
|
# this is called once a Resource has been found to serve the request; in our
|
|
303
304
|
# case the Resource in question will normally be a JsonResource.
|
|
304
305
|
|
|
305
|
-
#
|
|
306
|
+
# Create a LogContext for this request
|
|
307
|
+
#
|
|
308
|
+
# We only care about associating logs and tallying up metrics at the per-request
|
|
309
|
+
# level so we don't worry about setting the `parent_context`; preventing us from
|
|
310
|
+
# unnecessarily piling up metrics on the main process's context.
|
|
306
311
|
request_id = self.get_request_id()
|
|
307
312
|
self.logcontext = LoggingContext(
|
|
308
|
-
request_id,
|
|
313
|
+
name=request_id,
|
|
314
|
+
server_name=self.our_server_name,
|
|
309
315
|
request=ContextRequest(
|
|
310
316
|
request_id=request_id,
|
|
311
317
|
ip_address=self.get_client_ip_if_available(),
|
|
@@ -655,6 +661,70 @@ class _XForwardedForAddress:
|
|
|
655
661
|
host: str
|
|
656
662
|
|
|
657
663
|
|
|
664
|
+
class SynapseProtocol(HTTPChannel):
|
|
665
|
+
"""
|
|
666
|
+
Synapse-specific twisted http Protocol.
|
|
667
|
+
|
|
668
|
+
This is a small wrapper around the twisted HTTPChannel so we can track active
|
|
669
|
+
connections in order to close any outstanding connections on shutdown.
|
|
670
|
+
"""
|
|
671
|
+
|
|
672
|
+
def __init__(
|
|
673
|
+
self,
|
|
674
|
+
site: "SynapseSite",
|
|
675
|
+
our_server_name: str,
|
|
676
|
+
max_request_body_size: int,
|
|
677
|
+
request_id_header: Optional[str],
|
|
678
|
+
request_class: type,
|
|
679
|
+
):
|
|
680
|
+
super().__init__()
|
|
681
|
+
self.factory: SynapseSite = site
|
|
682
|
+
self.site = site
|
|
683
|
+
self.our_server_name = our_server_name
|
|
684
|
+
self.max_request_body_size = max_request_body_size
|
|
685
|
+
self.request_id_header = request_id_header
|
|
686
|
+
self.request_class = request_class
|
|
687
|
+
|
|
688
|
+
def connectionMade(self) -> None:
|
|
689
|
+
"""
|
|
690
|
+
Called when a connection is made.
|
|
691
|
+
|
|
692
|
+
This may be considered the initializer of the protocol, because
|
|
693
|
+
it is called when the connection is completed.
|
|
694
|
+
|
|
695
|
+
Add the connection to the factory's connection list when it's established.
|
|
696
|
+
"""
|
|
697
|
+
super().connectionMade()
|
|
698
|
+
self.factory.addConnection(self)
|
|
699
|
+
|
|
700
|
+
def connectionLost(self, reason: Failure) -> None: # type: ignore[override]
|
|
701
|
+
"""
|
|
702
|
+
Called when the connection is shut down.
|
|
703
|
+
|
|
704
|
+
Clear any circular references here, and any external references to this
|
|
705
|
+
Protocol. The connection has been closed. In our case, we need to remove the
|
|
706
|
+
connection from the factory's connection list, when it's lost.
|
|
707
|
+
"""
|
|
708
|
+
super().connectionLost(reason)
|
|
709
|
+
self.factory.removeConnection(self)
|
|
710
|
+
|
|
711
|
+
def requestFactory(self, http_channel: HTTPChannel, queued: bool) -> SynapseRequest: # type: ignore[override]
|
|
712
|
+
"""
|
|
713
|
+
A callable used to build `twisted.web.iweb.IRequest` objects.
|
|
714
|
+
|
|
715
|
+
Use our own custom SynapseRequest type instead of the regular
|
|
716
|
+
twisted.web.server.Request.
|
|
717
|
+
"""
|
|
718
|
+
return self.request_class(
|
|
719
|
+
self,
|
|
720
|
+
self.factory,
|
|
721
|
+
our_server_name=self.our_server_name,
|
|
722
|
+
max_request_body_size=self.max_request_body_size,
|
|
723
|
+
queued=queued,
|
|
724
|
+
request_id_header=self.request_id_header,
|
|
725
|
+
)
|
|
726
|
+
|
|
727
|
+
|
|
658
728
|
class SynapseSite(ProxySite):
|
|
659
729
|
"""
|
|
660
730
|
Synapse-specific twisted http Site
|
|
@@ -671,6 +741,7 @@ class SynapseSite(ProxySite):
|
|
|
671
741
|
|
|
672
742
|
def __init__(
|
|
673
743
|
self,
|
|
744
|
+
*,
|
|
674
745
|
logger_name: str,
|
|
675
746
|
site_tag: str,
|
|
676
747
|
config: ListenerConfig,
|
|
@@ -705,23 +776,44 @@ class SynapseSite(ProxySite):
|
|
|
705
776
|
|
|
706
777
|
assert config.http_options is not None
|
|
707
778
|
proxied = config.http_options.x_forwarded
|
|
708
|
-
request_class = XForwardedForRequest if proxied else SynapseRequest
|
|
709
|
-
|
|
710
|
-
request_id_header = config.http_options.request_id_header
|
|
711
|
-
|
|
712
|
-
def request_factory(channel: HTTPChannel, queued: bool) -> Request:
|
|
713
|
-
return request_class(
|
|
714
|
-
channel,
|
|
715
|
-
self,
|
|
716
|
-
our_server_name=self.server_name,
|
|
717
|
-
max_request_body_size=max_request_body_size,
|
|
718
|
-
queued=queued,
|
|
719
|
-
request_id_header=request_id_header,
|
|
720
|
-
)
|
|
779
|
+
self.request_class = XForwardedForRequest if proxied else SynapseRequest
|
|
780
|
+
|
|
781
|
+
self.request_id_header = config.http_options.request_id_header
|
|
782
|
+
self.max_request_body_size = max_request_body_size
|
|
721
783
|
|
|
722
|
-
self.requestFactory = request_factory # type: ignore
|
|
723
784
|
self.access_logger = logging.getLogger(logger_name)
|
|
724
785
|
self.server_version_string = server_version_string.encode("ascii")
|
|
786
|
+
self.connections: List[Protocol] = []
|
|
787
|
+
|
|
788
|
+
def buildProtocol(self, addr: IAddress) -> SynapseProtocol:
|
|
789
|
+
protocol = SynapseProtocol(
|
|
790
|
+
self,
|
|
791
|
+
self.server_name,
|
|
792
|
+
self.max_request_body_size,
|
|
793
|
+
self.request_id_header,
|
|
794
|
+
self.request_class,
|
|
795
|
+
)
|
|
796
|
+
return protocol
|
|
797
|
+
|
|
798
|
+
def addConnection(self, protocol: Protocol) -> None:
|
|
799
|
+
self.connections.append(protocol)
|
|
800
|
+
|
|
801
|
+
def removeConnection(self, protocol: Protocol) -> None:
|
|
802
|
+
if protocol in self.connections:
|
|
803
|
+
self.connections.remove(protocol)
|
|
804
|
+
|
|
805
|
+
def stopFactory(self) -> None:
|
|
806
|
+
super().stopFactory()
|
|
807
|
+
|
|
808
|
+
# Shutdown any connections which are still active.
|
|
809
|
+
# These can be long lived HTTP connections which wouldn't normally be closed
|
|
810
|
+
# when calling `shutdown` on the respective `Port`.
|
|
811
|
+
# Closing the connections here is required for us to fully shutdown the
|
|
812
|
+
# `SynapseHomeServer` in order for it to be garbage collected.
|
|
813
|
+
for protocol in self.connections[:]:
|
|
814
|
+
if protocol.transport is not None:
|
|
815
|
+
protocol.transport.loseConnection()
|
|
816
|
+
self.connections.clear()
|
|
725
817
|
|
|
726
818
|
def log(self, request: SynapseRequest) -> None: # type: ignore[override]
|
|
727
819
|
pass
|