mongo-charms-single-kernel 1.8.6__py3-none-any.whl → 1.8.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mongo-charms-single-kernel might be problematic. Click here for more details.

Files changed (47) hide show
  1. {mongo_charms_single_kernel-1.8.6.dist-info → mongo_charms_single_kernel-1.8.8.dist-info}/METADATA +2 -1
  2. {mongo_charms_single_kernel-1.8.6.dist-info → mongo_charms_single_kernel-1.8.8.dist-info}/RECORD +41 -40
  3. single_kernel_mongo/abstract_charm.py +8 -0
  4. single_kernel_mongo/config/literals.py +2 -23
  5. single_kernel_mongo/config/models.py +12 -0
  6. single_kernel_mongo/config/relations.py +0 -1
  7. single_kernel_mongo/config/statuses.py +10 -57
  8. single_kernel_mongo/core/abstract_upgrades_v3.py +149 -0
  9. single_kernel_mongo/core/k8s_workload.py +2 -2
  10. single_kernel_mongo/core/kubernetes_upgrades_v3.py +17 -0
  11. single_kernel_mongo/core/machine_upgrades_v3.py +54 -0
  12. single_kernel_mongo/core/operator.py +86 -5
  13. single_kernel_mongo/core/version_checker.py +7 -6
  14. single_kernel_mongo/core/vm_workload.py +30 -13
  15. single_kernel_mongo/core/workload.py +17 -19
  16. single_kernel_mongo/events/backups.py +3 -3
  17. single_kernel_mongo/events/cluster.py +1 -1
  18. single_kernel_mongo/events/database.py +1 -1
  19. single_kernel_mongo/events/lifecycle.py +5 -4
  20. single_kernel_mongo/events/tls.py +7 -4
  21. single_kernel_mongo/exceptions.py +4 -24
  22. single_kernel_mongo/lib/charms/operator_libs_linux/v1/systemd.py +288 -0
  23. single_kernel_mongo/managers/cluster.py +8 -8
  24. single_kernel_mongo/managers/config.py +5 -3
  25. single_kernel_mongo/managers/ldap.py +2 -1
  26. single_kernel_mongo/managers/mongo.py +48 -9
  27. single_kernel_mongo/managers/mongodb_operator.py +199 -96
  28. single_kernel_mongo/managers/mongos_operator.py +97 -35
  29. single_kernel_mongo/managers/sharding.py +4 -4
  30. single_kernel_mongo/managers/tls.py +54 -27
  31. single_kernel_mongo/managers/upgrade_v3.py +452 -0
  32. single_kernel_mongo/managers/upgrade_v3_status.py +133 -0
  33. single_kernel_mongo/state/app_peer_state.py +12 -2
  34. single_kernel_mongo/state/charm_state.py +31 -141
  35. single_kernel_mongo/state/config_server_state.py +0 -33
  36. single_kernel_mongo/state/unit_peer_state.py +10 -0
  37. single_kernel_mongo/templates/enable-transparent-huge-pages.service.j2 +14 -0
  38. single_kernel_mongo/utils/helpers.py +0 -6
  39. single_kernel_mongo/utils/mongo_config.py +32 -8
  40. single_kernel_mongo/core/abstract_upgrades.py +0 -890
  41. single_kernel_mongo/core/kubernetes_upgrades.py +0 -194
  42. single_kernel_mongo/core/machine_upgrades.py +0 -188
  43. single_kernel_mongo/events/upgrades.py +0 -157
  44. single_kernel_mongo/managers/upgrade.py +0 -334
  45. single_kernel_mongo/state/upgrade_state.py +0 -134
  46. {mongo_charms_single_kernel-1.8.6.dist-info → mongo_charms_single_kernel-1.8.8.dist-info}/WHEEL +0 -0
  47. {mongo_charms_single_kernel-1.8.6.dist-info → mongo_charms_single_kernel-1.8.8.dist-info}/licenses/LICENSE +0 -0
@@ -21,6 +21,8 @@ from logging import getLogger
21
21
  from pathlib import Path
22
22
  from typing import TYPE_CHECKING, ClassVar, TypeAlias
23
23
 
24
+ import charm_refresh
25
+ import jinja2
24
26
  from data_platform_helpers.advanced_statuses.models import StatusObject
25
27
  from data_platform_helpers.advanced_statuses.protocol import ManagerStatusProtocol
26
28
  from ops.charm import RelationDepartedEvent
@@ -28,17 +30,26 @@ from ops.framework import Object
28
30
  from ops.model import Relation, Unit
29
31
 
30
32
  from single_kernel_mongo.config.literals import (
33
+ OS_REQUIREMENTS,
31
34
  TRUST_STORE_PATH,
32
35
  Scope,
33
36
  Substrates,
34
37
  TrustStoreFiles,
35
38
  )
36
- from single_kernel_mongo.config.models import CharmSpec, LogRotateConfig
39
+ from single_kernel_mongo.config.models import SNAP_NAME, THP_CONFIG, CharmSpec, LogRotateConfig
37
40
  from single_kernel_mongo.events.ldap import LDAPEventHandler
38
41
  from single_kernel_mongo.exceptions import (
39
42
  DeferrableFailedHookChecksError,
40
43
  NonDeferrableFailedHookChecksError,
41
44
  )
45
+ from single_kernel_mongo.lib.charms.operator_libs_linux.v0 import sysctl
46
+ from single_kernel_mongo.lib.charms.operator_libs_linux.v1.systemd import (
47
+ SystemdError,
48
+ daemon_reload,
49
+ service_disable,
50
+ service_enable,
51
+ service_start,
52
+ )
42
53
  from single_kernel_mongo.managers.config import FileBasedConfigManager
43
54
  from single_kernel_mongo.managers.mongo import MongoManager
44
55
  from single_kernel_mongo.state.charm_state import CharmState
@@ -49,10 +60,11 @@ if TYPE_CHECKING:
49
60
  from single_kernel_mongo.abstract_charm import AbstractMongoCharm
50
61
  from single_kernel_mongo.events.database import DatabaseEventsHandler
51
62
  from single_kernel_mongo.events.tls import TLSEventsHandler
52
- from single_kernel_mongo.events.upgrades import UpgradeEventHandler
63
+ from single_kernel_mongo.lib.charms.operator_libs_linux.v0.sysctl import Config
53
64
  from single_kernel_mongo.managers.ldap import LDAPManager
54
65
  from single_kernel_mongo.managers.tls import TLSManager
55
- from single_kernel_mongo.managers.upgrade import MongoUpgradeManager
66
+ from single_kernel_mongo.managers.upgrade_v3 import MongoDBUpgradesManager
67
+ from single_kernel_mongo.managers.upgrade_v3_status import MongoDBUpgradesStatusManager
56
68
 
57
69
  logger = getLogger(__name__)
58
70
 
@@ -79,14 +91,16 @@ class OperatorProtocol(ABC, Object, ManagerStatusProtocol):
79
91
  config_manager: FileBasedConfigManager
80
92
  tls_manager: TLSManager
81
93
  state: CharmState
94
+ refresh: charm_refresh.Common | None
82
95
  mongo_manager: MongoManager
83
- upgrade_manager: MongoUpgradeManager
96
+ upgrades_manager: MongoDBUpgradesManager
97
+ upgrades_status_manager: MongoDBUpgradesStatusManager
84
98
  ldap_manager: LDAPManager
85
99
  workload: MainWorkloadType
86
100
  client_events: DatabaseEventsHandler
87
101
  tls_events: TLSEventsHandler
88
- upgrade_events: UpgradeEventHandler
89
102
  ldap_events: LDAPEventHandler
103
+ sysctl_config: Config
90
104
 
91
105
  if TYPE_CHECKING:
92
106
 
@@ -215,6 +229,13 @@ class OperatorProtocol(ABC, Object, ManagerStatusProtocol):
215
229
  "Scaling down the application, no need to process removed relation in broken hook."
216
230
  )
217
231
 
232
+ @property
233
+ def refresh_in_progress(self) -> bool:
234
+ """Check if charm-refresh is currently in progress."""
235
+ # If charm_refresh.UnitTearDown or charm_refresh.PeerRelationNotReady
236
+ # we consider a refresh to be in progress.
237
+ return not self.refresh or self.refresh.in_progress
238
+
218
239
  def handle_licenses(self) -> None:
219
240
  """Pull / Push licenses.
220
241
 
@@ -271,6 +292,17 @@ class OperatorProtocol(ABC, Object, ManagerStatusProtocol):
271
292
  f"{path}",
272
293
  ]
273
294
  )
295
+
296
+ if self.substrate == Substrates.VM:
297
+ self.workload.exec(
298
+ [
299
+ "chown",
300
+ "-R",
301
+ f"{self.workload.users.user}:{self.workload.users.group}",
302
+ f"{self.workload.paths.common_path}",
303
+ ]
304
+ )
305
+
274
306
  for path in (
275
307
  self.workload.paths.config_file,
276
308
  self.workload.paths.mongos_config_file,
@@ -305,3 +337,52 @@ class OperatorProtocol(ABC, Object, ManagerStatusProtocol):
305
337
  self.workload.exec(["update-ca-certificates"])
306
338
  # Restart the service
307
339
  self.restart_charm_services(force=True)
340
+
341
+ def write_thp_config_file(self):
342
+ """Writes the unit file to enable Transparent Huge Pages."""
343
+ data = THP_CONFIG.service_template.read_text()
344
+ template = jinja2.Template(data)
345
+
346
+ rendered_template = template.render(
347
+ service_file=f"snap.{SNAP_NAME}.{self.workload.service}.service"
348
+ )
349
+ self.workload.write(path=THP_CONFIG.service_file_path, content=rendered_template)
350
+ daemon_reload()
351
+ service_enable(THP_CONFIG.service_name)
352
+ service_start(THP_CONFIG.service_name)
353
+
354
+ def _set_os_config(self) -> None:
355
+ """Sets sysctl config for mongodb."""
356
+ try:
357
+ self.sysctl_config.configure(OS_REQUIREMENTS)
358
+ except (sysctl.ApplyError, sysctl.ValidationError, sysctl.CommandError) as e:
359
+ # we allow events to continue in the case that we are not able to correctly configure
360
+ # sysctl config, since we can still run the workload with wrong sysctl parameters
361
+ # even if it is not optimal.
362
+ logger.error(f"Error setting values on sysctl parameters: {e.message}")
363
+ # containers share the kernel with the host system, and some sysctl parameters are
364
+ # set at kernel level.
365
+ logger.warning("sysctl params cannot be set. Is the machine running on a container?")
366
+ try:
367
+ self.write_thp_config_file()
368
+ except SystemdError as e:
369
+ # we allow events to continue in the case that we are not able to correctly configure
370
+ # sysctl config, since we can still run the workload with wrong kernel parameters
371
+ # even if it is not optimal.
372
+ logger.error(f"Error setting values on kernel parameters: {e.args}")
373
+ # containers share the kernel with the host system, and some sysctl parameters are
374
+ # set at kernel level.
375
+ logger.warning("kernel params cannot be set. Is the machine running on a container?")
376
+ service_disable(THP_CONFIG.service_name)
377
+
378
+ def build_local_tls_directory(self) -> None:
379
+ """On Kubernetes, we need the local configuration directory.
380
+
381
+ This will store the certificates locally, which allows to construct the
382
+ same URIS to connect locally and on the sidecar container running
383
+ mongodb.
384
+ """
385
+ if self.substrate == Substrates.VM:
386
+ return
387
+
388
+ Path(self.state.paths.conf_path).mkdir(exist_ok=True)
@@ -36,19 +36,20 @@ class VersionChecker:
36
36
  # revision 88 and a config-server running on revision 110
37
37
  current_charms_version = get_charm_revision(
38
38
  self.charm.unit,
39
- local_version=self.dependent.workload.get_internal_revision(),
39
+ local_version=self.dependent.workload.get_charm_revision(),
40
40
  )
41
41
  local_identifier = (
42
42
  "-locally built" if self.version_checker.is_local_charm(self.charm.app.name) else ""
43
43
  )
44
44
  try:
45
- # This part needs some explanation: If we are running this during
46
- # the pre-refresh hook that happens after the upgrade, we want to
47
- # check our version against the already upgraded config server, so
45
+ # This part needs some explanation: On VM, if we are running this
46
+ # during the pre-refresh hook that happens after the upgrade, all
47
+ # charm codes have been refreshed.
48
+ # We want to check our version against the already upgraded config server, so
48
49
  # we use the current revision that stores the revision of the
49
- # former charm until the charm is fully upgraded.
50
+ # former charm until that unit is fully upgraded.
50
51
  old_version = self.version_checker.version
51
- self.version_checker.version = self.state.unit_upgrade_peer_data.current_revision
52
+ self.version_checker.version = self.state.unit_peer_data.current_revision
52
53
  if self.version_checker.are_related_apps_valid():
53
54
  return None
54
55
  except NoVersionError as e:
@@ -9,18 +9,18 @@ from collections.abc import Mapping
9
9
  from itertools import chain
10
10
  from logging import getLogger
11
11
  from pathlib import Path
12
+ from platform import machine
12
13
  from shutil import copyfile
13
14
 
14
15
  from ops import Container
15
- from tenacity import retry, retry_if_result, stop_after_attempt, wait_fixed
16
+ from tenacity import retry, retry_if_exception_type, retry_if_result, stop_after_attempt, wait_fixed
16
17
  from typing_extensions import override
17
18
 
18
19
  from single_kernel_mongo.config.literals import (
19
20
  CRON_FILE,
20
- SNAP,
21
21
  VmUser,
22
22
  )
23
- from single_kernel_mongo.config.models import CharmSpec
23
+ from single_kernel_mongo.config.models import SNAP_NAME, CharmSpec
24
24
  from single_kernel_mongo.core.workload import WorkloadBase
25
25
  from single_kernel_mongo.exceptions import (
26
26
  WorkloadExecError,
@@ -41,8 +41,7 @@ class VMWorkload(WorkloadBase):
41
41
 
42
42
  def __init__(self, role: CharmSpec, container: Container | None) -> None:
43
43
  super().__init__(role, container)
44
- self.snap = SNAP
45
- self.mongod_snap = snap.SnapCache()[self.snap.name]
44
+ self.mongod_snap = snap.SnapCache()[SNAP_NAME]
46
45
 
47
46
  @property
48
47
  @override
@@ -186,23 +185,41 @@ class VMWorkload(WorkloadBase):
186
185
  stop=stop_after_attempt(20),
187
186
  wait=wait_fixed(1),
188
187
  reraise=True,
188
+ retry=retry_if_exception_type(WorkloadServiceError),
189
189
  )
190
- def install(self) -> None:
191
- """Loads the MongoDB snap from LP.
190
+ def install(self, revision: str | None = None, retry_and_raise: bool = True) -> bool:
191
+ """Install the charmed-mongodb snap from the snap store.
192
+
193
+ Args:
194
+ revision (str | None): the snap revision to install. Will be loaded from the
195
+ `refresh_versions.toml` file if None.
196
+ retry_and_raise (bool): whether to retry in case of errors. Will raise if the error
197
+ persists.
192
198
 
193
199
  Returns:
194
- True if successfully installed. False otherwise.
200
+ True if successfully installed, False if errors occur and `retry_and_raise` is False.
195
201
  """
196
202
  try:
203
+ if not revision:
204
+ versions = self.load_toml_file(Path("refresh_versions.toml"))
205
+ revision = versions["snap"]["revisions"][machine()]
206
+
197
207
  self.mongod_snap.ensure(
198
- snap.SnapState.Latest,
199
- channel=self.snap.channel,
200
- revision=self.snap.revision,
208
+ snap.SnapState.Present,
209
+ revision=revision,
201
210
  )
202
211
  self.mongod_snap.hold()
212
+ return True
203
213
  except snap.SnapError as err:
204
- logger.error(f"Failed to install {self.snap.name}. Reason: {err}.")
205
- raise WorkloadNotReadyError("Failed to install mongodb")
214
+ logger.error(f"Failed to install {SNAP_NAME}. Reason: {err}.")
215
+ if retry_and_raise:
216
+ raise WorkloadNotReadyError("Failed to install mongodb")
217
+ return False
218
+
219
+ @override
220
+ def snap_revision(self) -> str:
221
+ """The currently installed snap_revision."""
222
+ return self.mongod_snap.revision
206
223
 
207
224
  @override
208
225
  def setup_cron(self, lines: list[str]) -> None: # pragma: nocover
@@ -9,12 +9,13 @@ import string
9
9
  from abc import ABC, abstractmethod
10
10
  from itertools import chain
11
11
  from pathlib import Path
12
- from typing import ClassVar
12
+ from typing import Any, ClassVar
13
13
 
14
+ import tomllib
14
15
  from ops import Container
15
16
  from ops.pebble import Layer
16
17
 
17
- from single_kernel_mongo.config.literals import WorkloadUser
18
+ from single_kernel_mongo.config.literals import VERSIONS_FILE, WorkloadUser
18
19
  from single_kernel_mongo.config.models import CharmSpec
19
20
 
20
21
 
@@ -149,7 +150,7 @@ class WorkloadBase(ABC): # pragma: nocover
149
150
  self.role = role
150
151
 
151
152
  @abstractmethod
152
- def install(self) -> None:
153
+ def install(self, revision: str | None = None, retry_and_raise: bool = True) -> bool:
153
154
  """Installs the workload snap or raises an error.
154
155
 
155
156
  VM-only: on k8s, just returns None.
@@ -283,6 +284,17 @@ class WorkloadBase(ABC): # pragma: nocover
283
284
  """
284
285
  ...
285
286
 
287
+ def snap_revision(self) -> str:
288
+ """The currently installed snap_revision."""
289
+ return ""
290
+
291
+ def load_toml_file(self, file: Path) -> dict[str, Any]:
292
+ """Loads a TOML file to a dictionary."""
293
+ if not file.exists():
294
+ return {}
295
+
296
+ return tomllib.loads(file.read_text())
297
+
286
298
  def get_version(self) -> str:
287
299
  """Get the workload version.
288
300
 
@@ -295,21 +307,6 @@ class WorkloadBase(ABC): # pragma: nocover
295
307
  version = ""
296
308
  return version
297
309
 
298
- def get_internal_revision(self) -> str:
299
- """Get the internal revision.
300
-
301
- Note: This should be removed soon because we're moving away from `charm
302
- version` + `internal revision` to `charm_version+git hash`.
303
-
304
- Returns:
305
- String of charm internal revision
306
- """
307
- try:
308
- version = Path("charm_version").read_text().strip()
309
- except: # noqa: E722
310
- version = ""
311
- return version
312
-
313
310
  def get_charm_revision(self) -> str:
314
311
  """Get the charm revision.
315
312
 
@@ -317,7 +314,8 @@ class WorkloadBase(ABC): # pragma: nocover
317
314
  String of charm revision
318
315
  """
319
316
  try:
320
- version = Path("charm_version").read_text().strip()
317
+ versions = self.load_toml_file(VERSIONS_FILE)
318
+ version = versions["charm"]
321
319
  except: # noqa: E722
322
320
  version = ""
323
321
  return version
@@ -79,7 +79,7 @@ class BackupEventsHandler(Object):
79
79
 
80
80
  def _on_s3_relation_joined(self, event: RelationJoinedEvent) -> None:
81
81
  """Checks for valid integration for s3-integrations."""
82
- if self.dependent.state.upgrade_in_progress:
82
+ if self.dependent.refresh_in_progress:
83
83
  logger.warning(
84
84
  "Adding s3-relations is not supported during an upgrade. The charm may be in a broken, unrecoverable state."
85
85
  )
@@ -97,7 +97,7 @@ class BackupEventsHandler(Object):
97
97
 
98
98
  def _on_s3_credential_changed(self, event: CredentialsChangedEvent) -> None: # noqa: C901
99
99
  action = "configure-pbm"
100
- if self.dependent.state.upgrade_in_progress:
100
+ if self.dependent.refresh_in_progress:
101
101
  logger.warning(
102
102
  "Changing s3-credentials is not supported during an upgrade. The charm may be in a broken, unrecoverable state."
103
103
  )
@@ -285,7 +285,7 @@ class BackupEventsHandler(Object):
285
285
  )
286
286
  return
287
287
 
288
- if self.dependent.state.upgrade_in_progress:
288
+ if self.dependent.refresh_in_progress:
289
289
  fail_action_with_error_log(
290
290
  logger,
291
291
  event,
@@ -72,7 +72,7 @@ class ClusterConfigServerEventHandler(Object):
72
72
  Calls the manager to share the secrets with mongos charm.
73
73
  """
74
74
  try:
75
- self.manager.share_secret_to_mongos(event.relation)
75
+ self.manager.share_secret_to_mongos(event.relation, initial_event=True)
76
76
  except DeferrableFailedHookChecksError as e:
77
77
  logger.info("Skipping database requested event: hook checks did not pass.")
78
78
  defer_event_with_info_log(logger, event, str(type(event)), str(e))
@@ -138,7 +138,7 @@ class DatabaseEventsHandler(Object):
138
138
  if not self.charm.unit.is_leader():
139
139
  return False
140
140
 
141
- if self.dependent.state.upgrade_in_progress:
141
+ if self.dependent.refresh_in_progress:
142
142
  logger.warning(
143
143
  "Adding relations is not supported during an upgrade. The charm may be in a broken, unrecoverable state."
144
144
  )
@@ -130,9 +130,10 @@ class LifecycleEventsHandler(Object):
130
130
  """Start event."""
131
131
  try:
132
132
  self.dependent.prepare_for_startup()
133
- except (ContainerNotReadyError, WorkloadServiceError):
134
- logger.info("Not ready to start.")
135
- event.defer()
133
+ except (ContainerNotReadyError, WorkloadServiceError) as e:
134
+ defer_event_with_info_log(
135
+ logger, event, "start", f"Not ready to start: {e.__class__.__name__}({e})"
136
+ )
136
137
  return
137
138
  except InvalidConfigRoleError:
138
139
  logger.info("Missing a valid role.")
@@ -222,7 +223,7 @@ class LifecycleEventsHandler(Object):
222
223
  secret_id=event.secret.id or "",
223
224
  )
224
225
  except (WorkloadServiceError, ChangeError) as err:
225
- logger.info("Failed to restart services", err, exc_info=True)
226
+ logger.info("Failed to restart services: %s", err, exc_info=True)
226
227
  self.dependent.state.statuses.add(
227
228
  CharmStatuses.FAILED_SERVICES_START.value,
228
229
  scope="unit",
@@ -77,7 +77,7 @@ class TLSEventsHandler(Object):
77
77
  )
78
78
  event.fail("Mongos cannot set TLS keys until integrated to config-server.")
79
79
  return
80
- if self.manager.state.upgrade_in_progress:
80
+ if self.dependent.refresh_in_progress:
81
81
  fail_action_with_error_log(
82
82
  logger,
83
83
  event,
@@ -106,7 +106,8 @@ class TLSEventsHandler(Object):
106
106
  )
107
107
  event.defer()
108
108
  return
109
- if self.manager.state.upgrade_in_progress:
109
+
110
+ if self.dependent.refresh_in_progress:
110
111
  logger.warning(
111
112
  "Enabling TLS is not supported during an upgrade. The charm may be in a broken, unrecoverable state."
112
113
  )
@@ -131,7 +132,7 @@ class TLSEventsHandler(Object):
131
132
  event.defer()
132
133
  return
133
134
 
134
- if self.manager.state.upgrade_in_progress:
135
+ if self.dependent.refresh_in_progress:
135
136
  logger.warning(
136
137
  "Disabling TLS is not supported during an upgrade. The charm may be in a broken, unrecoverable state."
137
138
  )
@@ -162,7 +163,9 @@ class TLSEventsHandler(Object):
162
163
  logger.info(f"Deferring {str(type(event))}: db is not initialised")
163
164
  event.defer()
164
165
  return
165
- if self.manager.state.upgrade_in_progress:
166
+ # Check if refresh is in progress and this is the initial integration, delay.
167
+ # Otherwise it's a rotation and we're safe to continue.
168
+ if self.dependent.refresh_in_progress and self.manager.initial_integration():
166
169
  logger.warning(
167
170
  "Enabling TLS is not supported during an upgrade. The charm may be in a broken, unrecoverable state."
168
171
  )
@@ -4,10 +4,6 @@
4
4
 
5
5
  """All general exceptions."""
6
6
 
7
- from data_platform_helpers.advanced_statuses.models import StatusObject
8
-
9
- from single_kernel_mongo.config.statuses import UpgradeStatuses
10
-
11
7
 
12
8
  class InvalidCharmKindError(Exception):
13
9
  """Raised when calling a function on the wrong charm kind."""
@@ -122,6 +118,10 @@ class UpgradeInProgressError(Exception):
122
118
  """Raised when an upgrade is in progress."""
123
119
 
124
120
 
121
+ class MongoDBUpgradeError(Exception):
122
+ """Raised when the snap upgrade fails."""
123
+
124
+
125
125
  class OpenPortFailedError(Exception):
126
126
  """Raised when we fail to open ports."""
127
127
 
@@ -209,22 +209,6 @@ class EarlyRemovalOfConfigServerError(Exception):
209
209
  """Raised when we try to remove config server while it still has shards."""
210
210
 
211
211
 
212
- class StatusError(Exception):
213
- """Exception with ops status."""
214
-
215
- def __init__(self, status: StatusObject) -> None:
216
- super().__init__(status.message)
217
- self.status = status
218
-
219
-
220
- class PrecheckFailedError(StatusError):
221
- """App is not ready to upgrade."""
222
-
223
- def __init__(self, message: str):
224
- self.message = message
225
- super().__init__(UpgradeStatuses.REFRESH_IN_PROGRESS.value)
226
-
227
-
228
212
  class FailedToElectNewPrimaryError(Exception):
229
213
  """Raised when a new primary isn't elected after stepping down."""
230
214
 
@@ -249,10 +233,6 @@ class ActionFailedError(Exception):
249
233
  """Raised when we failed an action."""
250
234
 
251
235
 
252
- class UnhealthyUpgradeError(Exception):
253
- """Raised when the upgrade is unhealthy during an post upgrade check."""
254
-
255
-
256
236
  class WaitingForLdapDataError(DeferrableError):
257
237
  """Raised when the charm hasn't received data from ldap yet."""
258
238