mongo-charms-single-kernel 1.8.8__py3-none-any.whl → 1.8.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mongo-charms-single-kernel might be problematic. Click here for more details.
- {mongo_charms_single_kernel-1.8.8.dist-info → mongo_charms_single_kernel-1.8.9.dist-info}/METADATA +1 -1
- {mongo_charms_single_kernel-1.8.8.dist-info → mongo_charms_single_kernel-1.8.9.dist-info}/RECORD +27 -27
- single_kernel_mongo/config/literals.py +7 -0
- single_kernel_mongo/config/relations.py +2 -1
- single_kernel_mongo/config/statuses.py +127 -20
- single_kernel_mongo/core/operator.py +7 -0
- single_kernel_mongo/core/structured_config.py +2 -0
- single_kernel_mongo/core/workload.py +10 -4
- single_kernel_mongo/events/cluster.py +5 -0
- single_kernel_mongo/events/sharding.py +3 -1
- single_kernel_mongo/events/tls.py +183 -157
- single_kernel_mongo/exceptions.py +0 -8
- single_kernel_mongo/lib/charms/tls_certificates_interface/v4/tls_certificates.py +1995 -0
- single_kernel_mongo/managers/cluster.py +70 -28
- single_kernel_mongo/managers/config.py +14 -8
- single_kernel_mongo/managers/mongo.py +1 -1
- single_kernel_mongo/managers/mongodb_operator.py +44 -22
- single_kernel_mongo/managers/mongos_operator.py +16 -20
- single_kernel_mongo/managers/sharding.py +154 -127
- single_kernel_mongo/managers/tls.py +223 -206
- single_kernel_mongo/state/charm_state.py +39 -16
- single_kernel_mongo/state/cluster_state.py +8 -0
- single_kernel_mongo/state/config_server_state.py +9 -0
- single_kernel_mongo/state/tls_state.py +39 -12
- single_kernel_mongo/utils/helpers.py +4 -19
- single_kernel_mongo/lib/charms/tls_certificates_interface/v3/tls_certificates.py +0 -2123
- {mongo_charms_single_kernel-1.8.8.dist-info → mongo_charms_single_kernel-1.8.9.dist-info}/WHEEL +0 -0
- {mongo_charms_single_kernel-1.8.8.dist-info → mongo_charms_single_kernel-1.8.9.dist-info}/licenses/LICENSE +0 -0
|
@@ -17,7 +17,6 @@ from typing import TYPE_CHECKING
|
|
|
17
17
|
from data_platform_helpers.advanced_statuses.models import StatusObject
|
|
18
18
|
from data_platform_helpers.advanced_statuses.protocol import ManagerStatusProtocol
|
|
19
19
|
from data_platform_helpers.advanced_statuses.types import Scope
|
|
20
|
-
from ops import StatusBase
|
|
21
20
|
from ops.framework import Object
|
|
22
21
|
from ops.model import (
|
|
23
22
|
Relation,
|
|
@@ -50,6 +49,7 @@ from single_kernel_mongo.exceptions import (
|
|
|
50
49
|
NonDeferrableFailedHookChecksError,
|
|
51
50
|
NotDrainedError,
|
|
52
51
|
RemoveLastShardError,
|
|
52
|
+
SetPasswordError,
|
|
53
53
|
ShardAuthError,
|
|
54
54
|
ShardNotInClusterError,
|
|
55
55
|
ShardNotPlannedForRemovalError,
|
|
@@ -126,6 +126,8 @@ class ConfigServerManager(Object, ManagerStatusProtocol):
|
|
|
126
126
|
|
|
127
127
|
if int_tls_ca := self.state.tls.get_secret(internal=True, label_name=SECRET_CA_LABEL):
|
|
128
128
|
relation_data[AppShardingComponentKeys.INT_CA_SECRET.value] = int_tls_ca
|
|
129
|
+
if ext_tls_ca := self.state.tls.get_secret(internal=False, label_name=SECRET_CA_LABEL):
|
|
130
|
+
relation_data[AppShardingComponentKeys.EXT_CA_SECRET.value] = ext_tls_ca
|
|
129
131
|
|
|
130
132
|
self.data_interface.update_relation_data(relation.id, relation_data)
|
|
131
133
|
self.data_interface.set_credentials(
|
|
@@ -188,7 +190,7 @@ class ConfigServerManager(Object, ManagerStatusProtocol):
|
|
|
188
190
|
self.dependent.state.statuses.add(status, scope="unit", component=self.dependent.name)
|
|
189
191
|
raise NonDeferrableFailedHookChecksError("relation is not feasible")
|
|
190
192
|
if not self.charm.unit.is_leader():
|
|
191
|
-
raise NonDeferrableFailedHookChecksError
|
|
193
|
+
raise NonDeferrableFailedHookChecksError("Not leader")
|
|
192
194
|
|
|
193
195
|
# Note: we permit this logic based on status since we aren't checking
|
|
194
196
|
# self.charm.unit.status`, instead `get_cluster_mismatched_revision_status` directly
|
|
@@ -545,32 +547,21 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
545
547
|
"Config-server never set up, no need to process broken event."
|
|
546
548
|
)
|
|
547
549
|
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
ShardStatuses.REQUIRES_TLS.value, scope="unit", component=self.name
|
|
553
|
-
)
|
|
554
|
-
raise DeferrableFailedHookChecksError(
|
|
555
|
-
"Config-Server uses TLS but shard does not. Please synchronise encryption method."
|
|
556
|
-
)
|
|
557
|
-
case True, False:
|
|
558
|
-
self.state.statuses.add(
|
|
559
|
-
ShardStatuses.REQUIRES_NO_TLS.value, scope="unit", component=self.name
|
|
560
|
-
)
|
|
561
|
-
raise DeferrableFailedHookChecksError(
|
|
562
|
-
"Shard uses TLS but config-server does not. Please synchronise encryption method."
|
|
563
|
-
)
|
|
564
|
-
case _:
|
|
565
|
-
pass
|
|
550
|
+
if internal_tls_status := self.get_tls_status(internal=True):
|
|
551
|
+
self.state.statuses.add(internal_tls_status, scope="unit", component=self.name)
|
|
552
|
+
if external_tls_status := self.get_tls_status(internal=False):
|
|
553
|
+
self.state.statuses.add(external_tls_status, scope="unit", component=self.name)
|
|
566
554
|
|
|
567
|
-
if
|
|
568
|
-
raise DeferrableFailedHookChecksError(
|
|
569
|
-
"Shard is integrated to a different CA than the config server. Please use the same CA for all cluster components.",
|
|
570
|
-
)
|
|
555
|
+
if internal_tls_status or external_tls_status:
|
|
556
|
+
raise DeferrableFailedHookChecksError("Invalid TLS integration, check logs.")
|
|
571
557
|
|
|
572
558
|
if is_leaving:
|
|
573
559
|
self.dependent.assert_proceed_on_broken_event(relation)
|
|
560
|
+
else:
|
|
561
|
+
if not self.state.shard_state.has_received_credentials():
|
|
562
|
+
# Nothing to do until we receive credentials
|
|
563
|
+
logger.info("Still waiting for credentials.")
|
|
564
|
+
raise NonDeferrableFailedHookChecksError("Missing user credentials.")
|
|
574
565
|
|
|
575
566
|
def prepare_to_add_shard(self) -> None:
|
|
576
567
|
"""Sets status and flags in relation data relevant to sharding."""
|
|
@@ -594,24 +585,24 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
594
585
|
|
|
595
586
|
operator_password = self.state.shard_state.operator_password
|
|
596
587
|
backup_password = self.state.shard_state.backup_password
|
|
588
|
+
|
|
597
589
|
if not operator_password or not backup_password:
|
|
598
590
|
logger.info("Missing secrets, returning.")
|
|
599
591
|
return
|
|
600
592
|
|
|
601
593
|
keyfile = self.state.shard_state.keyfile
|
|
602
594
|
tls_ca = self.state.shard_state.internal_ca_secret
|
|
595
|
+
external_tls_ca = self.state.shard_state.external_ca_secret
|
|
603
596
|
|
|
604
597
|
if keyfile is None:
|
|
605
598
|
logger.info("Waiting for secrets from config-server")
|
|
606
599
|
raise WaitingForSecretsError("Missing keyfile")
|
|
607
600
|
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
raise NotReadyError
|
|
601
|
+
# Let's start by updating the passwords, before any restart so they are in sync already.
|
|
602
|
+
if self.charm.unit.is_leader():
|
|
603
|
+
self.sync_cluster_passwords(operator_password, backup_password)
|
|
612
604
|
|
|
613
|
-
|
|
614
|
-
self.state.statuses.set(ShardStatuses.ACTIVE_IDLE.value, scope="unit", component=self.name)
|
|
605
|
+
self.update_member_auth(keyfile, tls_ca, external_tls_ca)
|
|
615
606
|
|
|
616
607
|
# Add the certificate if it is present
|
|
617
608
|
if (
|
|
@@ -631,17 +622,15 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
631
622
|
# We updated the configuration, so we restart PBM.
|
|
632
623
|
self.dependent.backup_manager.configure_and_restart(force=True)
|
|
633
624
|
|
|
634
|
-
if not self.
|
|
635
|
-
|
|
625
|
+
if not self.dependent.mongo_manager.mongod_ready():
|
|
626
|
+
logger.info("MongoDB is not ready")
|
|
627
|
+
raise NotReadyError
|
|
636
628
|
|
|
637
|
-
#
|
|
638
|
-
|
|
639
|
-
logger.info("Repairing missing database field in DB")
|
|
640
|
-
self.data_requirer.update_relation_data(
|
|
641
|
-
relation.id, {"database": self.data_requirer.database}
|
|
642
|
-
)
|
|
629
|
+
# By setting the status we ensure that the former statuses of this component are removed.
|
|
630
|
+
self.state.statuses.set(ShardStatuses.ACTIVE_IDLE.value, scope="unit", component=self.name)
|
|
643
631
|
|
|
644
|
-
self.
|
|
632
|
+
if not self.charm.unit.is_leader():
|
|
633
|
+
return
|
|
645
634
|
|
|
646
635
|
# We have updated our auth, config-server can add the shard.
|
|
647
636
|
self.data_requirer.update_relation_data(relation.id, {"auth-updated": "true"})
|
|
@@ -666,6 +655,8 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
666
655
|
)
|
|
667
656
|
return
|
|
668
657
|
|
|
658
|
+
self.assert_pass_hook_checks(relation, is_leaving=False)
|
|
659
|
+
|
|
669
660
|
if self.charm.unit.is_leader():
|
|
670
661
|
if self.data_requirer.fetch_my_relation_field(relation.id, "auth-updated") != "true":
|
|
671
662
|
return
|
|
@@ -710,46 +701,50 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
710
701
|
ShardStatuses.SHARD_DRAINED.value, scope="unit", component=self.name
|
|
711
702
|
)
|
|
712
703
|
|
|
713
|
-
def update_member_auth(
|
|
704
|
+
def update_member_auth(
|
|
705
|
+
self, keyfile: str, peer_tls_ca: str | None, external_tls_ca: str | None
|
|
706
|
+
) -> None:
|
|
714
707
|
"""Updates the shard to have the same membership auth as the config-server."""
|
|
715
|
-
cluster_auth_tls =
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
# same in their CSRs. Re-requesting a cert after integrated with the config-server
|
|
721
|
-
# regenerates the cert with the appropriate configurations needed for sharding.
|
|
722
|
-
if cluster_auth_tls and tls_integrated and self._should_request_new_certs():
|
|
723
|
-
logger.info("Cluster implements internal membership auth via certificates")
|
|
724
|
-
for internal in (True, False):
|
|
725
|
-
csr = self.dependent.tls_manager.generate_certificate_request(
|
|
726
|
-
param=None, internal=internal
|
|
727
|
-
)
|
|
728
|
-
self.dependent.tls_events.certs_client.request_certificate_creation(
|
|
729
|
-
certificate_signing_request=csr
|
|
730
|
-
)
|
|
731
|
-
self.dependent.tls_manager.set_waiting_for_cert_to_update(
|
|
732
|
-
internal=internal, waiting=True
|
|
733
|
-
)
|
|
734
|
-
else:
|
|
735
|
-
logger.info("Cluster implements internal membership auth via keyFile")
|
|
708
|
+
cluster_auth_tls = peer_tls_ca is not None
|
|
709
|
+
external_auth_tls = external_tls_ca is not None
|
|
710
|
+
peer_tls_integrated = self.state.peer_tls_relation is not None
|
|
711
|
+
client_tls_integrated = self.state.client_tls_relation is not None
|
|
712
|
+
keyfile_changed = self.state.get_keyfile() != keyfile
|
|
736
713
|
|
|
737
714
|
# Copy over keyfile regardless of whether the cluster uses TLS or or KeyFile for internal
|
|
738
715
|
# membership authentication. If TLS is disabled on the cluster this enables the cluster to
|
|
739
716
|
# have the correct cluster KeyFile readily available.
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
self.
|
|
717
|
+
if keyfile_changed:
|
|
718
|
+
self.workload.write(path=self.workload.paths.keyfile, content=keyfile)
|
|
719
|
+
|
|
720
|
+
# Sets the keyfile anyway
|
|
721
|
+
if self.charm.unit.is_leader():
|
|
722
|
+
self.state.set_keyfile(keyfile)
|
|
723
|
+
|
|
724
|
+
if not cluster_auth_tls:
|
|
725
|
+
logger.info("Cluster implements internal auth via keyfile.")
|
|
726
|
+
if (
|
|
727
|
+
external_auth_tls
|
|
728
|
+
and client_tls_integrated
|
|
729
|
+
and not self.dependent.tls_manager.is_certificate_available(internal=False)
|
|
730
|
+
):
|
|
731
|
+
logger.info("Cluster implements external auth via certificates.")
|
|
732
|
+
self.dependent.tls_events.refresh_certificates()
|
|
733
|
+
raise WaitingForCertificatesError()
|
|
734
|
+
if keyfile_changed:
|
|
735
|
+
self.dependent.restart_charm_services(force=True)
|
|
736
|
+
return
|
|
745
737
|
|
|
746
|
-
#
|
|
747
|
-
|
|
738
|
+
# Edge case: shard has TLS enabled before having connected to the config-server. For TLS in
|
|
739
|
+
# sharded MongoDB clusters it is necessary that the common name and organisation name are
|
|
740
|
+
# the same in their CSRs. Re-requesting a cert after integrated with the config-server
|
|
741
|
+
# regenerates the cert with the appropriate configurations needed for sharding.
|
|
742
|
+
if peer_tls_integrated and self.dependent.tls_manager.is_waiting_for_a_cert():
|
|
743
|
+
logger.info("Cluster implements internal membership auth via certificates.")
|
|
748
744
|
logger.info("Waiting for requested certs before restarting and adding to cluster.")
|
|
745
|
+
self.dependent.tls_events.refresh_certificates()
|
|
749
746
|
raise WaitingForCertificatesError
|
|
750
747
|
|
|
751
|
-
self.dependent.restart_charm_services(force=True)
|
|
752
|
-
|
|
753
748
|
def update_mongos_hosts(self):
|
|
754
749
|
"""Updates the hosts for mongos on the relation data."""
|
|
755
750
|
if (hosts := self.state.shard_state.mongos_hosts) != self.state.app_peer_data.mongos_hosts:
|
|
@@ -757,6 +752,10 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
757
752
|
|
|
758
753
|
def sync_cluster_passwords(self, operator_password: str, backup_password: str) -> None:
|
|
759
754
|
"""Update shared cluster passwords."""
|
|
755
|
+
if not self.should_synchronise_cluster_passwords():
|
|
756
|
+
logger.debug("No need to update passwords, already correct")
|
|
757
|
+
return
|
|
758
|
+
|
|
760
759
|
for attempt in Retrying(stop=stop_after_delay(60), wait=wait_fixed(3), reraise=True):
|
|
761
760
|
with attempt:
|
|
762
761
|
if self.dependent.primary_unit_name is None:
|
|
@@ -765,16 +764,19 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
765
764
|
)
|
|
766
765
|
raise NotReadyError
|
|
767
766
|
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
767
|
+
for user, password in ((OperatorUser, operator_password), (BackupUser, backup_password)):
|
|
768
|
+
try:
|
|
769
|
+
self.update_password(user=user, new_password=password)
|
|
770
|
+
except SetPasswordError:
|
|
771
|
+
# RelationChangedEvents will only update passwords when the
|
|
772
|
+
# relation is first joined, otherwise all other password
|
|
773
|
+
# changes result in a Secret Changed Event.
|
|
774
|
+
logger.error(
|
|
775
|
+
"Failed to sync cluster passwords from config-server to shard. Deferring event and retrying."
|
|
776
|
+
)
|
|
777
|
+
raise FailedToUpdateCredentialsError(
|
|
778
|
+
f"Failed to update credentials of {user.username}"
|
|
779
|
+
)
|
|
778
780
|
try:
|
|
779
781
|
# after updating the password of the backup user, restart pbm with correct password
|
|
780
782
|
self.dependent.backup_manager.configure_and_restart()
|
|
@@ -786,52 +788,44 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
786
788
|
if not new_password or not self.charm.unit.is_leader():
|
|
787
789
|
return
|
|
788
790
|
|
|
789
|
-
current_password = self.state.get_user_password(user)
|
|
790
|
-
|
|
791
|
-
if new_password == current_password:
|
|
792
|
-
logger.info("Not updating password: password not changed.")
|
|
793
|
-
return
|
|
794
|
-
|
|
795
791
|
# updating operator password, usually comes after keyfile was updated, hence, the mongodb
|
|
796
792
|
# service was restarted. Sometimes this requires units getting insync again.
|
|
797
793
|
for attempt in Retrying(stop=stop_after_delay(60), wait=wait_fixed(3), reraise=True):
|
|
798
794
|
with attempt:
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
)
|
|
806
|
-
raise
|
|
807
|
-
except PyMongoError as e:
|
|
808
|
-
logger.error(f"Failed changing the password: {e}")
|
|
809
|
-
raise
|
|
810
|
-
self.state.set_user_password(user, new_password)
|
|
811
|
-
|
|
812
|
-
def _should_request_new_certs(self) -> bool:
|
|
813
|
-
"""Returns if the shard has already requested the certificates for internal-membership.
|
|
814
|
-
|
|
815
|
-
Sharded components must have the same subject names in their certs.
|
|
816
|
-
"""
|
|
817
|
-
int_subject = self.state.unit_peer_data.get("int_certs_subject") or None
|
|
818
|
-
ext_subject = self.state.unit_peer_data.get("ext_certs_subject") or None
|
|
819
|
-
return {int_subject, ext_subject} != {self.state.config_server_name}
|
|
820
|
-
|
|
821
|
-
def tls_status(self) -> tuple[bool, bool]:
|
|
822
|
-
"""Returns the TLS integration status for shard and config-server."""
|
|
823
|
-
shard_relation = self.state.shard_relation
|
|
824
|
-
if shard_relation:
|
|
825
|
-
shard_has_tls = self.state.tls_relation is not None
|
|
795
|
+
self.dependent.mongo_manager.set_user_password(user, new_password)
|
|
796
|
+
|
|
797
|
+
def shard_and_config_server_peer_tls_status(self) -> tuple[bool, bool]:
|
|
798
|
+
"""Returns the peer TLS integration status for shard and config-server."""
|
|
799
|
+
if self.state.shard_relation:
|
|
800
|
+
shard_has_tls = self.state.peer_tls_relation is not None
|
|
826
801
|
config_server_has_tls = self.state.shard_state.internal_ca_secret is not None
|
|
827
802
|
return shard_has_tls, config_server_has_tls
|
|
828
803
|
|
|
829
804
|
return False, False
|
|
830
805
|
|
|
831
|
-
def
|
|
832
|
-
"""Returns
|
|
833
|
-
|
|
834
|
-
|
|
806
|
+
def shard_and_config_server_client_tls_status(self) -> tuple[bool, bool]:
|
|
807
|
+
"""Returns the client TLS integration status for shard and config-server."""
|
|
808
|
+
if self.state.shard_relation:
|
|
809
|
+
shard_has_tls = self.state.client_tls_relation is not None
|
|
810
|
+
config_server_has_tls = self.state.shard_state.external_ca_secret is not None
|
|
811
|
+
return shard_has_tls, config_server_has_tls
|
|
812
|
+
|
|
813
|
+
return False, False
|
|
814
|
+
|
|
815
|
+
def is_client_ca_compatible(self) -> bool:
|
|
816
|
+
"""Returns true if both the shard and the config server use the same peer CA."""
|
|
817
|
+
if not self.state.shard_relation:
|
|
818
|
+
return True
|
|
819
|
+
config_server_tls_ca = self.state.shard_state.external_ca_secret
|
|
820
|
+
shard_tls_ca = self.state.tls.get_secret(internal=False, label_name=SECRET_CA_LABEL)
|
|
821
|
+
if not config_server_tls_ca or not shard_tls_ca:
|
|
822
|
+
return True
|
|
823
|
+
|
|
824
|
+
return config_server_tls_ca == shard_tls_ca
|
|
825
|
+
|
|
826
|
+
def is_peer_ca_compatible(self) -> bool:
|
|
827
|
+
"""Returns true if both the shard and the config server use the same peer CA."""
|
|
828
|
+
if not self.state.shard_relation:
|
|
835
829
|
return True
|
|
836
830
|
config_server_tls_ca = self.state.shard_state.internal_ca_secret
|
|
837
831
|
shard_tls_ca = self.state.tls.get_secret(internal=True, label_name=SECRET_CA_LABEL)
|
|
@@ -931,6 +925,14 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
931
925
|
|
|
932
926
|
return True
|
|
933
927
|
|
|
928
|
+
def should_synchronise_cluster_passwords(self) -> bool:
|
|
929
|
+
"""Decides if we should synchronise cluster passwords or not."""
|
|
930
|
+
if self.state.shard_state.operator_password != self.state.get_user_password(OperatorUser):
|
|
931
|
+
return True
|
|
932
|
+
if self.state.shard_state.backup_password != self.state.get_user_password(BackupUser):
|
|
933
|
+
return True
|
|
934
|
+
return False
|
|
935
|
+
|
|
934
936
|
def _is_shard_aware(self) -> bool:
|
|
935
937
|
"""Returns True if provided shard is shard aware."""
|
|
936
938
|
with MongoConnection(self.state.remote_mongos_config) as mongo:
|
|
@@ -959,24 +961,49 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
959
961
|
|
|
960
962
|
return False
|
|
961
963
|
|
|
962
|
-
def get_tls_status(self) ->
|
|
963
|
-
"""
|
|
964
|
-
|
|
965
|
-
|
|
964
|
+
def get_tls_status(self, internal: bool) -> StatusObject | None:
|
|
965
|
+
"""Computes the TLS status for the scope.
|
|
966
|
+
|
|
967
|
+
Args:
|
|
968
|
+
internal: (bool) if true, represents the internal TLS, otherwise external TLS.
|
|
969
|
+
"""
|
|
970
|
+
if internal:
|
|
971
|
+
shard_tls, config_server_tls = self.shard_and_config_server_peer_tls_status()
|
|
972
|
+
is_ca_compatible = self.is_peer_ca_compatible()
|
|
973
|
+
else:
|
|
974
|
+
shard_tls, config_server_tls = self.shard_and_config_server_client_tls_status()
|
|
975
|
+
is_ca_compatible = self.is_client_ca_compatible()
|
|
976
|
+
|
|
977
|
+
match (shard_tls, config_server_tls):
|
|
966
978
|
case False, True:
|
|
967
|
-
|
|
979
|
+
logger.warning(
|
|
980
|
+
"Config-Server uses peer TLS but shard does not. Please synchronise encryption method."
|
|
981
|
+
)
|
|
982
|
+
return ShardStatuses.missing_tls(internal=internal)
|
|
968
983
|
case True, False:
|
|
969
|
-
|
|
984
|
+
logger.warning(
|
|
985
|
+
"Shard uses peer TLS but config-server does not. Please synchronise encryption method."
|
|
986
|
+
)
|
|
987
|
+
return ShardStatuses.invalid_tls(internal=internal)
|
|
970
988
|
case _:
|
|
971
989
|
pass
|
|
972
990
|
|
|
973
|
-
if not
|
|
991
|
+
if not is_ca_compatible:
|
|
974
992
|
logger.error(
|
|
975
993
|
"Shard is integrated to a different CA than the config server. Please use the same CA for all cluster components."
|
|
976
994
|
)
|
|
977
|
-
return ShardStatuses.
|
|
995
|
+
return ShardStatuses.incompatible_ca(internal=internal)
|
|
996
|
+
|
|
978
997
|
return None
|
|
979
998
|
|
|
999
|
+
def tls_statuses(self) -> list[StatusObject]:
|
|
1000
|
+
"""All TLS statuses, for both scopes."""
|
|
1001
|
+
statuses = []
|
|
1002
|
+
for internal in True, False:
|
|
1003
|
+
if status := self.get_tls_status(internal=internal):
|
|
1004
|
+
statuses.append(status)
|
|
1005
|
+
return statuses
|
|
1006
|
+
|
|
980
1007
|
def get_statuses(self, scope: Scope, recompute: bool = False) -> list[StatusObject]: # noqa: C901
|
|
981
1008
|
"""Returns the current status of the shard."""
|
|
982
1009
|
charm_statuses: list[StatusObject] = []
|
|
@@ -1002,8 +1029,8 @@ class ShardManager(Object, ManagerStatusProtocol):
|
|
|
1002
1029
|
# No need to go further if the revision is invalid
|
|
1003
1030
|
return charm_statuses
|
|
1004
1031
|
|
|
1005
|
-
if
|
|
1006
|
-
charm_statuses
|
|
1032
|
+
if tls_statuses := self.tls_statuses():
|
|
1033
|
+
charm_statuses += tls_statuses
|
|
1007
1034
|
# if TLS is misconfigured we will get redherrings on the remaining messages
|
|
1008
1035
|
return charm_statuses
|
|
1009
1036
|
|