qontract-reconcile 0.10.1rc879__py3-none-any.whl → 0.10.1rc894__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qontract_reconcile-0.10.1rc879.dist-info → qontract_reconcile-0.10.1rc894.dist-info}/METADATA +1 -1
- {qontract_reconcile-0.10.1rc879.dist-info → qontract_reconcile-0.10.1rc894.dist-info}/RECORD +291 -284
- reconcile/acs_rbac.py +1 -2
- reconcile/aus/advanced_upgrade_service.py +14 -14
- reconcile/aus/aus_label_source.py +1 -2
- reconcile/aus/base.py +23 -26
- reconcile/aus/cluster_version_data.py +4 -4
- reconcile/aus/models.py +2 -3
- reconcile/aus/version_gate_approver.py +2 -6
- reconcile/aus/version_gates/__init__.py +1 -3
- reconcile/aus/version_gates/sts_version_gate_handler.py +2 -3
- reconcile/aws_account_manager/integration.py +9 -14
- reconcile/aws_account_manager/reconciler.py +51 -1
- reconcile/aws_account_manager/utils.py +3 -0
- reconcile/aws_ami_cleanup/integration.py +3 -4
- reconcile/aws_iam_password_reset.py +2 -5
- reconcile/aws_version_sync/integration.py +2 -2
- reconcile/blackbox_exporter_endpoint_monitoring.py +2 -5
- reconcile/change_owners/approver.py +4 -5
- reconcile/change_owners/bundle.py +20 -22
- reconcile/change_owners/change_types.py +23 -24
- reconcile/change_owners/changes.py +13 -16
- reconcile/change_owners/decision.py +2 -5
- reconcile/change_owners/diff.py +11 -15
- reconcile/change_owners/self_service_roles.py +1 -2
- reconcile/change_owners/tester.py +7 -10
- reconcile/checkpoint.py +2 -5
- reconcile/cli.py +26 -12
- reconcile/closedbox_endpoint_monitoring_base.py +8 -11
- reconcile/cluster_deployment_mapper.py +2 -5
- reconcile/cna/assets/asset.py +4 -7
- reconcile/cna/assets/null.py +2 -5
- reconcile/cna/integration.py +2 -3
- reconcile/cna/state.py +2 -5
- reconcile/dashdotdb_base.py +8 -11
- reconcile/dashdotdb_cso.py +3 -6
- reconcile/dashdotdb_dora.py +10 -14
- reconcile/dashdotdb_dvo.py +10 -13
- reconcile/dashdotdb_slo.py +5 -8
- reconcile/database_access_manager.py +5 -6
- reconcile/dynatrace_token_provider/integration.py +3 -6
- reconcile/dynatrace_token_provider/integration_v2.py +20 -0
- reconcile/dynatrace_token_provider/meta.py +1 -0
- reconcile/external_resources/integration.py +1 -1
- reconcile/external_resources/manager.py +4 -4
- reconcile/external_resources/model.py +3 -3
- reconcile/external_resources/secrets_sync.py +5 -5
- reconcile/external_resources/state.py +5 -5
- reconcile/gabi_authorized_users.py +3 -6
- reconcile/gcr_mirror.py +1 -1
- reconcile/github_org.py +1 -3
- reconcile/github_repo_invites.py +2 -5
- reconcile/gitlab_housekeeping.py +7 -11
- reconcile/gitlab_labeler.py +1 -2
- reconcile/gitlab_members.py +2 -5
- reconcile/gitlab_permissions.py +1 -3
- reconcile/glitchtip/integration.py +5 -8
- reconcile/glitchtip_project_alerts/integration.py +57 -33
- reconcile/glitchtip_project_dsn/integration.py +8 -11
- reconcile/gql_definitions/aws_account_manager/aws_accounts.py +6 -0
- reconcile/gql_definitions/fragments/aws_account_managed.py +8 -0
- reconcile/gql_definitions/glitchtip/glitchtip_project.py +4 -4
- reconcile/gql_definitions/glitchtip_project_alerts/glitchtip_project.py +27 -7
- reconcile/integrations_manager.py +5 -8
- reconcile/jenkins/types.py +5 -6
- reconcile/jenkins_job_builder.py +9 -12
- reconcile/jenkins_roles.py +1 -1
- reconcile/jira_watcher.py +2 -2
- reconcile/ldap_groups/integration.py +2 -5
- reconcile/ocm/types.py +21 -26
- reconcile/ocm_addons_upgrade_tests_trigger.py +3 -6
- reconcile/ocm_clusters.py +8 -8
- reconcile/ocm_internal_notifications/integration.py +1 -2
- reconcile/ocm_labels/integration.py +2 -5
- reconcile/ocm_machine_pools.py +11 -15
- reconcile/ocm_upgrade_scheduler_org_updater.py +2 -5
- reconcile/openshift_base.py +29 -30
- reconcile/openshift_groups.py +15 -20
- reconcile/openshift_namespace_labels.py +8 -14
- reconcile/openshift_namespaces.py +5 -8
- reconcile/openshift_network_policies.py +2 -4
- reconcile/openshift_resources_base.py +19 -29
- reconcile/openshift_saas_deploy.py +9 -10
- reconcile/openshift_saas_deploy_change_tester.py +7 -10
- reconcile/openshift_saas_deploy_trigger_base.py +4 -7
- reconcile/openshift_saas_deploy_trigger_cleaner.py +5 -8
- reconcile/openshift_saas_deploy_trigger_configs.py +1 -2
- reconcile/openshift_saas_deploy_trigger_images.py +1 -2
- reconcile/openshift_saas_deploy_trigger_moving_commits.py +1 -2
- reconcile/openshift_saas_deploy_trigger_upstream_jobs.py +1 -2
- reconcile/openshift_tekton_resources.py +7 -11
- reconcile/openshift_upgrade_watcher.py +10 -13
- reconcile/openshift_users.py +8 -11
- reconcile/oum/base.py +3 -4
- reconcile/oum/labelset.py +1 -2
- reconcile/oum/metrics.py +2 -2
- reconcile/oum/models.py +1 -2
- reconcile/oum/standalone.py +2 -3
- reconcile/prometheus_rules_tester/integration.py +6 -9
- reconcile/quay_membership.py +1 -2
- reconcile/quay_mirror.py +12 -13
- reconcile/quay_mirror_org.py +10 -10
- reconcile/queries.py +4 -7
- reconcile/resource_scraper.py +3 -4
- reconcile/rhidp/common.py +2 -2
- reconcile/saas_auto_promotions_manager/integration.py +5 -6
- reconcile/saas_auto_promotions_manager/merge_request_manager/batcher.py +1 -2
- reconcile/saas_auto_promotions_manager/publisher.py +5 -6
- reconcile/saas_auto_promotions_manager/subscriber.py +36 -15
- reconcile/saas_auto_promotions_manager/utils/saas_files_inventory.py +8 -0
- reconcile/saas_file_validator.py +2 -5
- reconcile/signalfx_endpoint_monitoring.py +2 -5
- reconcile/skupper_network/integration.py +3 -6
- reconcile/skupper_network/models.py +3 -5
- reconcile/slack_base.py +4 -7
- reconcile/slack_usergroups.py +15 -17
- reconcile/sql_query.py +5 -9
- reconcile/status_board.py +4 -5
- reconcile/statuspage/atlassian.py +14 -15
- reconcile/statuspage/integrations/maintenances.py +3 -3
- reconcile/statuspage/page.py +8 -8
- reconcile/statuspage/state.py +4 -5
- reconcile/statuspage/status.py +7 -8
- reconcile/templating/lib/rendering.py +8 -8
- reconcile/templating/renderer.py +10 -11
- reconcile/templating/validator.py +4 -4
- reconcile/terraform_aws_route53.py +3 -6
- reconcile/terraform_cloudflare_dns.py +9 -12
- reconcile/terraform_cloudflare_resources.py +9 -11
- reconcile/terraform_cloudflare_users.py +8 -11
- reconcile/terraform_init/integration.py +2 -2
- reconcile/terraform_repo.py +11 -14
- reconcile/terraform_resources.py +20 -21
- reconcile/terraform_tgw_attachments.py +32 -36
- reconcile/terraform_users.py +6 -7
- reconcile/terraform_vpc_resources/integration.py +6 -6
- reconcile/test/conftest.py +7 -10
- reconcile/test/fixtures.py +1 -1
- reconcile/test/saas_auto_promotions_manager/conftest.py +3 -2
- reconcile/test/saas_auto_promotions_manager/merge_request_manager/renderer/conftest.py +2 -2
- reconcile/test/test_database_access_manager.py +3 -6
- reconcile/test/test_gitlab_labeler.py +2 -5
- reconcile/test/test_jump_host.py +5 -8
- reconcile/test/test_ocm_machine_pools.py +1 -4
- reconcile/test/test_openshift_base.py +3 -6
- reconcile/test/test_openshift_cluster_bots.py +5 -5
- reconcile/test/test_openshift_namespace_labels.py +2 -3
- reconcile/test/test_openshift_saas_deploy_trigger_cleaner.py +2 -2
- reconcile/test/test_saasherder.py +9 -12
- reconcile/test/test_slack_base.py +4 -6
- reconcile/test/test_status_board.py +4 -7
- reconcile/test/test_terraform_tgw_attachments.py +14 -20
- reconcile/typed_queries/alerting_services_settings.py +1 -2
- reconcile/typed_queries/app_interface_custom_messages.py +2 -3
- reconcile/typed_queries/app_interface_deadmanssnitch_settings.py +1 -3
- reconcile/typed_queries/app_interface_repo_url.py +1 -2
- reconcile/typed_queries/app_interface_state_settings.py +1 -3
- reconcile/typed_queries/app_interface_vault_settings.py +1 -2
- reconcile/typed_queries/aws_vpc_requests.py +1 -3
- reconcile/typed_queries/aws_vpcs.py +1 -3
- reconcile/typed_queries/clusters.py +2 -4
- reconcile/typed_queries/clusters_minimal.py +1 -3
- reconcile/typed_queries/clusters_with_dms.py +1 -3
- reconcile/typed_queries/dynatrace_environments.py +14 -0
- reconcile/typed_queries/external_resources.py +3 -4
- reconcile/typed_queries/pagerduty_instances.py +1 -2
- reconcile/typed_queries/repos.py +2 -3
- reconcile/typed_queries/reserved_networks.py +1 -3
- reconcile/typed_queries/saas_files.py +49 -59
- reconcile/typed_queries/slo_documents.py +1 -3
- reconcile/typed_queries/status_board.py +3 -7
- reconcile/typed_queries/tekton_pipeline_providers.py +1 -2
- reconcile/typed_queries/terraform_namespaces.py +1 -2
- reconcile/typed_queries/terraform_tgw_attachments/aws_accounts.py +1 -3
- reconcile/utils/acs/base.py +2 -3
- reconcile/utils/acs/notifiers.py +3 -3
- reconcile/utils/acs/policies.py +3 -3
- reconcile/utils/aggregated_list.py +1 -1
- reconcile/utils/amtool.py +1 -2
- reconcile/utils/aws_api.py +28 -31
- reconcile/utils/aws_api_typed/account.py +23 -0
- reconcile/utils/aws_api_typed/api.py +20 -9
- reconcile/utils/binary.py +1 -3
- reconcile/utils/clusterhealth/providerbase.py +1 -2
- reconcile/utils/clusterhealth/telemeter.py +2 -2
- reconcile/utils/deadmanssnitch_api.py +1 -2
- reconcile/utils/disabled_integrations.py +4 -6
- reconcile/utils/environ.py +1 -1
- reconcile/utils/expiration.py +3 -7
- reconcile/utils/external_resource_spec.py +3 -4
- reconcile/utils/external_resources.py +4 -7
- reconcile/utils/filtering.py +1 -2
- reconcile/utils/git.py +3 -9
- reconcile/utils/git_secrets.py +5 -5
- reconcile/utils/github_api.py +5 -9
- reconcile/utils/gitlab_api.py +2 -3
- reconcile/utils/glitchtip/client.py +2 -4
- reconcile/utils/glitchtip/models.py +8 -11
- reconcile/utils/gql.py +26 -35
- reconcile/utils/grouping.py +1 -3
- reconcile/utils/imap_client.py +2 -5
- reconcile/utils/internal_groups/client.py +1 -2
- reconcile/utils/internal_groups/models.py +8 -9
- reconcile/utils/jenkins_api.py +4 -4
- reconcile/utils/jinja2/extensions.py +1 -1
- reconcile/utils/jinja2/filters.py +4 -4
- reconcile/utils/jinja2/utils.py +16 -16
- reconcile/utils/jira_client.py +10 -11
- reconcile/utils/jjb_client.py +14 -17
- reconcile/utils/jobcontroller/controller.py +5 -5
- reconcile/utils/jobcontroller/models.py +2 -2
- reconcile/utils/jsonpath.py +4 -5
- reconcile/utils/jump_host.py +7 -8
- reconcile/utils/keycloak.py +3 -7
- reconcile/utils/ldap_client.py +2 -3
- reconcile/utils/lean_terraform_client.py +13 -17
- reconcile/utils/membershipsources/app_interface_resolver.py +1 -1
- reconcile/utils/membershipsources/models.py +19 -22
- reconcile/utils/metrics.py +13 -15
- reconcile/utils/mr/base.py +7 -11
- reconcile/utils/mr/glitchtip_access_reporter.py +2 -2
- reconcile/utils/mr/notificator.py +1 -2
- reconcile/utils/oc.py +38 -38
- reconcile/utils/oc_connection_parameters.py +24 -25
- reconcile/utils/oc_filters.py +2 -3
- reconcile/utils/oc_map.py +9 -15
- reconcile/utils/ocm/addons.py +7 -10
- reconcile/utils/ocm/base.py +38 -39
- reconcile/utils/ocm/clusters.py +6 -9
- reconcile/utils/ocm/label_sources.py +1 -2
- reconcile/utils/ocm/labels.py +3 -6
- reconcile/utils/ocm/ocm.py +11 -14
- reconcile/utils/ocm/products.py +1 -3
- reconcile/utils/ocm/search_filters.py +16 -17
- reconcile/utils/ocm/service_log.py +2 -3
- reconcile/utils/ocm/sre_capability_labels.py +4 -8
- reconcile/utils/ocm/subscriptions.py +1 -3
- reconcile/utils/ocm/syncsets.py +2 -4
- reconcile/utils/ocm/upgrades.py +5 -9
- reconcile/utils/ocm_base_client.py +13 -16
- reconcile/utils/openshift_resource.py +5 -11
- reconcile/utils/output.py +2 -3
- reconcile/utils/pagerduty_api.py +4 -5
- reconcile/utils/prometheus.py +2 -2
- reconcile/utils/promotion_state.py +4 -5
- reconcile/utils/promtool.py +2 -8
- reconcile/utils/quay_api.py +12 -22
- reconcile/utils/raw_github_api.py +3 -5
- reconcile/utils/rosa/rosa_cli.py +6 -6
- reconcile/utils/rosa/session.py +6 -7
- reconcile/utils/runtime/desired_state_diff.py +3 -8
- reconcile/utils/runtime/environment.py +4 -7
- reconcile/utils/runtime/integration.py +4 -4
- reconcile/utils/runtime/meta.py +1 -2
- reconcile/utils/runtime/runner.py +7 -10
- reconcile/utils/runtime/sharding.py +22 -27
- reconcile/utils/saasherder/interfaces.py +63 -69
- reconcile/utils/saasherder/models.py +30 -35
- reconcile/utils/saasherder/saasherder.py +39 -54
- reconcile/utils/secret_reader.py +17 -19
- reconcile/utils/slack_api.py +15 -17
- reconcile/utils/smtp_client.py +1 -2
- reconcile/utils/sqs_gateway.py +1 -3
- reconcile/utils/state.py +1 -2
- reconcile/utils/terraform/config_client.py +4 -5
- reconcile/utils/terraform_client.py +12 -8
- reconcile/utils/terrascript/cloudflare_client.py +4 -10
- reconcile/utils/terrascript/cloudflare_resources.py +10 -13
- reconcile/utils/terrascript/models.py +2 -3
- reconcile/utils/terrascript/resources.py +1 -2
- reconcile/utils/terrascript_aws_client.py +50 -38
- reconcile/utils/unleash/client.py +4 -7
- reconcile/utils/unleash/server.py +2 -2
- reconcile/utils/vault.py +8 -11
- reconcile/utils/vaultsecretref.py +2 -3
- reconcile/utils/vcs.py +7 -8
- reconcile/vault_replication.py +4 -8
- reconcile/vpc_peerings_validator.py +4 -9
- release/version.py +6 -7
- tools/app_interface_reporter.py +2 -2
- tools/cli_commands/gpg_encrypt.py +3 -6
- tools/cli_commands/systems_and_tools.py +4 -7
- tools/qontract_cli.py +105 -17
- tools/saas_promotion_state/__init__.py +0 -0
- tools/saas_promotion_state/saas_promotion_state.py +105 -0
- tools/template_validation.py +1 -1
- tools/test/conftest.py +45 -6
- tools/test/test_saas_promotion_state.py +187 -0
- {qontract_reconcile-0.10.1rc879.dist-info → qontract_reconcile-0.10.1rc894.dist-info}/WHEEL +0 -0
- {qontract_reconcile-0.10.1rc879.dist-info → qontract_reconcile-0.10.1rc894.dist-info}/entry_points.txt +0 -0
- {qontract_reconcile-0.10.1rc879.dist-info → qontract_reconcile-0.10.1rc894.dist-info}/top_level.txt +0 -0
@@ -5,11 +5,7 @@ from collections.abc import (
|
|
5
5
|
Generator,
|
6
6
|
)
|
7
7
|
from threading import Lock
|
8
|
-
from typing import
|
9
|
-
Any,
|
10
|
-
Optional,
|
11
|
-
Union,
|
12
|
-
)
|
8
|
+
from typing import Any
|
13
9
|
|
14
10
|
from kubernetes.client.exceptions import ApiException
|
15
11
|
from sretoolbox.utils import threaded
|
@@ -47,9 +43,9 @@ CURRENT = "current"
|
|
47
43
|
CHANGED = "changed"
|
48
44
|
UPDATED_MANAGED = "updated-managed"
|
49
45
|
|
50
|
-
Labels = dict[str,
|
46
|
+
Labels = dict[str, str | None]
|
51
47
|
LabelKeys = list[str]
|
52
|
-
LabelsOrKeys =
|
48
|
+
LabelsOrKeys = Labels | LabelKeys
|
53
49
|
Types = dict[str, LabelsOrKeys]
|
54
50
|
|
55
51
|
InternalLabelInventory = dict[str, dict[str, Types]]
|
@@ -104,8 +100,8 @@ class LabelInventory:
|
|
104
100
|
cluster: str,
|
105
101
|
namespace: str,
|
106
102
|
type: str,
|
107
|
-
default:
|
108
|
-
) ->
|
103
|
+
default: LabelsOrKeys | None = None,
|
104
|
+
) -> LabelsOrKeys | None:
|
109
105
|
"""Get the labels or keys for the given cluster / namespace / type"""
|
110
106
|
return self._inv.get(cluster, {}).get(namespace, {}).get(type, default)
|
111
107
|
|
@@ -293,9 +289,7 @@ def get_managed(inventory: LabelInventory, state: State) -> None:
|
|
293
289
|
inventory.set(cluster=cluster, namespace=ns_name, type=MANAGED, labels=managed)
|
294
290
|
|
295
291
|
|
296
|
-
def lookup_namespaces(
|
297
|
-
cluster: str, oc_map: OCMap
|
298
|
-
) -> tuple[str, Optional[dict[str, Any]]]:
|
292
|
+
def lookup_namespaces(cluster: str, oc_map: OCMap) -> tuple[str, dict[str, Any] | None]:
|
299
293
|
"""
|
300
294
|
Retrieve all namespaces from the given cluster
|
301
295
|
"""
|
@@ -410,9 +404,9 @@ class NamespaceLabelError(Exception):
|
|
410
404
|
def run(
|
411
405
|
dry_run: bool,
|
412
406
|
thread_pool_size: int = 10,
|
413
|
-
internal:
|
407
|
+
internal: bool | None = None,
|
414
408
|
use_jump_host: bool = True,
|
415
|
-
defer:
|
409
|
+
defer: Callable | None = None,
|
416
410
|
raise_errors: bool = False,
|
417
411
|
) -> None:
|
418
412
|
_LOG.debug("Collecting GQL data ...")
|
@@ -6,10 +6,7 @@ from collections.abc import (
|
|
6
6
|
Mapping,
|
7
7
|
Sequence,
|
8
8
|
)
|
9
|
-
from typing import
|
10
|
-
Any,
|
11
|
-
Optional,
|
12
|
-
)
|
9
|
+
from typing import Any
|
13
10
|
|
14
11
|
from sretoolbox.utils import threaded
|
15
12
|
|
@@ -145,11 +142,11 @@ def check_results(
|
|
145
142
|
def run(
|
146
143
|
dry_run: bool,
|
147
144
|
thread_pool_size: int = 10,
|
148
|
-
internal:
|
145
|
+
internal: bool | None = None,
|
149
146
|
use_jump_host: bool = True,
|
150
|
-
cluster_name:
|
151
|
-
namespace_name:
|
152
|
-
defer:
|
147
|
+
cluster_name: Sequence[str] | None = None,
|
148
|
+
namespace_name: Sequence[str] | None = None,
|
149
|
+
defer: Callable | None = None,
|
153
150
|
) -> None:
|
154
151
|
all_namespaces = get_namespaces_minimal()
|
155
152
|
shard_namespaces, duplicates = get_shard_namespaces(all_namespaces)
|
@@ -94,12 +94,10 @@ def fetch_desired_state(namespaces, ri, oc_map):
|
|
94
94
|
source_cluster = source_namespace_info["cluster"]["name"]
|
95
95
|
if cluster != source_cluster:
|
96
96
|
ri.register_error()
|
97
|
-
msg =
|
98
|
-
cluster, namespace, source_cluster
|
99
|
-
)
|
97
|
+
msg = f"[{cluster}/{namespace}] Network Policy from cluster '{source_cluster}' not allowed."
|
100
98
|
logging.error(msg)
|
101
99
|
continue
|
102
|
-
resource_name = "allow-from-{}-namespace"
|
100
|
+
resource_name = f"allow-from-{source_namespace}-namespace"
|
103
101
|
oc_resource = construct_oc_resource(resource_name, source_namespace)
|
104
102
|
ri.add_desired(
|
105
103
|
cluster,
|
@@ -20,9 +20,7 @@ from textwrap import indent
|
|
20
20
|
from threading import Lock
|
21
21
|
from typing import (
|
22
22
|
Any,
|
23
|
-
Optional,
|
24
23
|
Protocol,
|
25
|
-
Tuple,
|
26
24
|
)
|
27
25
|
from unittest.mock import DEFAULT, patch
|
28
26
|
|
@@ -479,9 +477,7 @@ def fetch_provider_route(
|
|
479
477
|
tls[k] = v
|
480
478
|
continue
|
481
479
|
|
482
|
-
msg = "Route secret '{}' key '{}' not in valid keys {}"
|
483
|
-
tls_path, k, valid_keys
|
484
|
-
)
|
480
|
+
msg = f"Route secret '{tls_path}' key '{k}' not in valid keys {valid_keys}"
|
485
481
|
_locked_info_log(msg)
|
486
482
|
|
487
483
|
host = openshift_resource.body["spec"].get("host")
|
@@ -504,7 +500,7 @@ def fetch_openshift_resource(
|
|
504
500
|
provider = resource["provider"]
|
505
501
|
if provider == "resource":
|
506
502
|
path = resource["resource"]["path"]
|
507
|
-
_locked_debug_log("Processing {}: {}"
|
503
|
+
_locked_debug_log(f"Processing {provider}: {path}")
|
508
504
|
validate_json = resource.get("validate_json") or False
|
509
505
|
add_path_to_prom_rules = resource.get("add_path_to_prom_rules", True)
|
510
506
|
validate_alertmanager_config = (
|
@@ -524,7 +520,7 @@ def fetch_openshift_resource(
|
|
524
520
|
)
|
525
521
|
elif provider == "resource-template":
|
526
522
|
path = resource["resource"]["path"]
|
527
|
-
_locked_debug_log("Processing {}: {}"
|
523
|
+
_locked_debug_log(f"Processing {provider}: {path}")
|
528
524
|
add_path_to_prom_rules = resource.get("add_path_to_prom_rules", True)
|
529
525
|
validate_alertmanager_config = (
|
530
526
|
resource.get("validate_alertmanager_config") or False
|
@@ -557,12 +553,12 @@ def fetch_openshift_resource(
|
|
557
553
|
settings=settings,
|
558
554
|
)
|
559
555
|
except Exception as e:
|
560
|
-
msg = "could not render template at path {}\n{}"
|
556
|
+
msg = f"could not render template at path {path}\n{e}"
|
561
557
|
raise ResourceTemplateRenderError(msg)
|
562
558
|
elif provider == "vault-secret":
|
563
559
|
path = resource["path"]
|
564
560
|
version = resource["version"]
|
565
|
-
_locked_debug_log("Processing {}: {} - {}"
|
561
|
+
_locked_debug_log(f"Processing {provider}: {path} - {version}")
|
566
562
|
rn = resource["name"]
|
567
563
|
name = path.split("/")[-1] if rn is None else rn
|
568
564
|
rl = resource["labels"]
|
@@ -595,7 +591,7 @@ def fetch_openshift_resource(
|
|
595
591
|
raise FetchSecretError(e)
|
596
592
|
elif provider == "route":
|
597
593
|
path = resource["resource"]["path"]
|
598
|
-
_locked_debug_log("Processing {}: {}"
|
594
|
+
_locked_debug_log(f"Processing {provider}: {path}")
|
599
595
|
tls_path = resource["vault_tls_secret_path"]
|
600
596
|
tls_version = resource["vault_tls_secret_version"]
|
601
597
|
openshift_resource = fetch_provider_route(
|
@@ -603,7 +599,7 @@ def fetch_openshift_resource(
|
|
603
599
|
)
|
604
600
|
elif provider == "prometheus-rule":
|
605
601
|
path = resource["resource"]["path"]
|
606
|
-
_locked_debug_log("Processing {}: {}"
|
602
|
+
_locked_debug_log(f"Processing {provider}: {path}")
|
607
603
|
add_path_to_prom_rules = resource.get("add_path_to_prom_rules", True)
|
608
604
|
tv = {}
|
609
605
|
if resource["variables"]:
|
@@ -631,7 +627,7 @@ def fetch_openshift_resource(
|
|
631
627
|
settings=settings,
|
632
628
|
)
|
633
629
|
except Exception as e:
|
634
|
-
msg = "could not render template at path {}\n{}"
|
630
|
+
msg = f"could not render template at path {path}\n{e}"
|
635
631
|
raise ResourceTemplateRenderError(msg)
|
636
632
|
|
637
633
|
else:
|
@@ -684,7 +680,7 @@ def fetch_desired_state(
|
|
684
680
|
UnknownProviderError,
|
685
681
|
) as e:
|
686
682
|
ri.register_error()
|
687
|
-
msg = "[{}/{}] {
|
683
|
+
msg = f"[{cluster}/{namespace}] {str(e)}"
|
688
684
|
_locked_error_log(msg)
|
689
685
|
return
|
690
686
|
|
@@ -702,9 +698,7 @@ def fetch_desired_state(
|
|
702
698
|
# combination was not initialized, meaning that it shouldn't be
|
703
699
|
# managed. But someone is trying to add it via app-interface
|
704
700
|
ri.register_error()
|
705
|
-
msg = "[{}/{}] unknown kind: {}. hint: is it missing from managedResourceTypes?"
|
706
|
-
cluster, namespace, openshift_resource.kind
|
707
|
-
)
|
701
|
+
msg = f"[{cluster}/{namespace}] unknown kind: {openshift_resource.kind}. hint: is it missing from managedResourceTypes?"
|
708
702
|
_locked_error_log(msg)
|
709
703
|
return
|
710
704
|
except ResourceKeyExistsError:
|
@@ -712,18 +706,14 @@ def fetch_desired_state(
|
|
712
706
|
# a desired resource with the same name and
|
713
707
|
# the same type was already added previously
|
714
708
|
ri.register_error()
|
715
|
-
msg =
|
716
|
-
cluster, namespace, openshift_resource.kind, openshift_resource.name
|
717
|
-
)
|
709
|
+
msg = f"[{cluster}/{namespace}] desired item already exists: {openshift_resource.kind}/{openshift_resource.name}."
|
718
710
|
_locked_error_log(msg)
|
719
711
|
return
|
720
712
|
except ResourceNotManagedError:
|
721
713
|
# This is failing because the resource name is
|
722
714
|
# not in the list of resource names that are managed
|
723
715
|
ri.register_error()
|
724
|
-
msg = "[{}/{}] desired item is not managed: {}/{}."
|
725
|
-
cluster, namespace, openshift_resource.kind, openshift_resource.name
|
726
|
-
)
|
716
|
+
msg = f"[{cluster}/{namespace}] desired item is not managed: {openshift_resource.kind}/{openshift_resource.name}."
|
727
717
|
_locked_error_log(msg)
|
728
718
|
return
|
729
719
|
|
@@ -1000,7 +990,7 @@ class CheckClusterScopedResourceNames:
|
|
1000
990
|
@dataclass
|
1001
991
|
class CheckClusterScopedResourceDuplicates:
|
1002
992
|
oc_map: OC_Map
|
1003
|
-
all_namespaces:
|
993
|
+
all_namespaces: Iterable[Mapping] | None = None
|
1004
994
|
|
1005
995
|
def check(self) -> list[Exception]:
|
1006
996
|
errors: list[Exception] = []
|
@@ -1022,13 +1012,13 @@ class CheckClusterScopedResourceDuplicates:
|
|
1022
1012
|
|
1023
1013
|
def _find_resource_duplicates(
|
1024
1014
|
self, cluster_cs_resources: dict[str, dict[str, dict[str, list[str]]]]
|
1025
|
-
) -> list[
|
1015
|
+
) -> list[tuple[str, str, str, list[str]]]:
|
1026
1016
|
# ) -> dict[Tuple[str, str, str], list[str]]:
|
1027
1017
|
"""Finds cluster resource duplicates by kind/name.
|
1028
1018
|
:param cluster_cs_resources
|
1029
1019
|
:return: duplicates as [(cluster, kind, name, [namespaces])]
|
1030
1020
|
"""
|
1031
|
-
duplicates: list[
|
1021
|
+
duplicates: list[tuple[str, str, str, list[str]]] = []
|
1032
1022
|
|
1033
1023
|
for cluster, cluster_resources in cluster_cs_resources.items():
|
1034
1024
|
_kind_name: dict[str, dict[str, list[str]]] = {}
|
@@ -1048,7 +1038,7 @@ def check_cluster_scoped_resources(
|
|
1048
1038
|
oc_map: OC_Map,
|
1049
1039
|
ri: ResourceInventory,
|
1050
1040
|
namespaces: Iterable[Mapping[str, Any]],
|
1051
|
-
all_namespaces:
|
1041
|
+
all_namespaces: Iterable[Mapping[str, Any]] | None = None,
|
1052
1042
|
) -> bool:
|
1053
1043
|
checks = [
|
1054
1044
|
CheckClusterScopedResourceNames(oc_map, ri, namespaces),
|
@@ -1069,7 +1059,7 @@ def check_cluster_scoped_resources(
|
|
1069
1059
|
def get_cluster_scoped_resources(
|
1070
1060
|
oc_map: OC_Map,
|
1071
1061
|
clusters: Iterable[str],
|
1072
|
-
namespaces:
|
1062
|
+
namespaces: Iterable[Mapping[str, Any]] | None = None,
|
1073
1063
|
thread_pool_size: int = 10,
|
1074
1064
|
) -> dict[str, dict[str, dict[str, list[str]]]]:
|
1075
1065
|
"""Returns cluster scoped resources for a list of clusters
|
@@ -1106,7 +1096,7 @@ def get_cluster_scoped_resources(
|
|
1106
1096
|
def _get_namespace_cluster_scoped_resources(
|
1107
1097
|
namespace: Mapping,
|
1108
1098
|
oc_map: OC_Map,
|
1109
|
-
) ->
|
1099
|
+
) -> tuple[str, str, dict[str, dict[str, Any]]]:
|
1110
1100
|
"""Returns all non-namespaced resources defined in a namespace manifest.
|
1111
1101
|
|
1112
1102
|
:param namespace: the namespace dict
|
@@ -1125,7 +1115,7 @@ def _get_namespace_cluster_scoped_resources(
|
|
1125
1115
|
|
1126
1116
|
|
1127
1117
|
def early_exit_desired_state(
|
1128
|
-
providers: list[str], resource_schema_filter:
|
1118
|
+
providers: list[str], resource_schema_filter: str | None = None
|
1129
1119
|
) -> dict[str, Any]:
|
1130
1120
|
settings = queries.get_secret_reader_settings()
|
1131
1121
|
namespaces, _ = get_namespaces(
|
@@ -3,7 +3,6 @@ import logging
|
|
3
3
|
import os
|
4
4
|
import sys
|
5
5
|
from collections.abc import Callable
|
6
|
-
from typing import Optional
|
7
6
|
|
8
7
|
import reconcile.openshift_base as ob
|
9
8
|
from reconcile import (
|
@@ -68,9 +67,9 @@ def slack_notify(
|
|
68
67
|
ri: ResourceInventory,
|
69
68
|
console_url: str,
|
70
69
|
in_progress: bool,
|
71
|
-
trigger_integration:
|
72
|
-
trigger_reason:
|
73
|
-
skip_successful_notifications:
|
70
|
+
trigger_integration: str | None = None,
|
71
|
+
trigger_reason: str | None = None,
|
72
|
+
skip_successful_notifications: bool | None = False,
|
74
73
|
) -> None:
|
75
74
|
success = not ri.has_error_registered()
|
76
75
|
# if the deployment doesn't want any notifications for successful
|
@@ -112,12 +111,12 @@ def run(
|
|
112
111
|
thread_pool_size: int = 10,
|
113
112
|
io_dir: str = "throughput/",
|
114
113
|
use_jump_host: bool = True,
|
115
|
-
saas_file_name:
|
116
|
-
env_name:
|
117
|
-
trigger_integration:
|
118
|
-
trigger_reason:
|
119
|
-
saas_file_list:
|
120
|
-
defer:
|
114
|
+
saas_file_name: str | None = None,
|
115
|
+
env_name: str | None = None,
|
116
|
+
trigger_integration: str | None = None,
|
117
|
+
trigger_reason: str | None = None,
|
118
|
+
saas_file_list: SaasFileList | None = None,
|
119
|
+
defer: Callable | None = None,
|
121
120
|
) -> None:
|
122
121
|
vault_settings = get_app_interface_vault_settings()
|
123
122
|
secret_reader = create_secret_reader(use_vault=vault_settings.vault)
|
@@ -1,10 +1,7 @@
|
|
1
1
|
import logging
|
2
2
|
import sys
|
3
3
|
from collections.abc import Iterable
|
4
|
-
from typing import
|
5
|
-
Any,
|
6
|
-
Optional,
|
7
|
-
)
|
4
|
+
from typing import Any
|
8
5
|
|
9
6
|
from pydantic import BaseModel
|
10
7
|
from sretoolbox.utils import threaded
|
@@ -37,7 +34,7 @@ class Definition(BaseModel):
|
|
37
34
|
class State(BaseModel):
|
38
35
|
saas_file_path: str
|
39
36
|
saas_file_name: str
|
40
|
-
saas_file_deploy_resources:
|
37
|
+
saas_file_deploy_resources: DeployResourcesV1 | None
|
41
38
|
resource_template_name: str
|
42
39
|
cluster: str
|
43
40
|
namespace: str
|
@@ -47,10 +44,10 @@ class State(BaseModel):
|
|
47
44
|
parameters: dict[str, Any]
|
48
45
|
secret_parameters: dict[str, VaultSecret]
|
49
46
|
saas_file_definitions: Definition
|
50
|
-
upstream:
|
51
|
-
disable:
|
52
|
-
delete:
|
53
|
-
target_path:
|
47
|
+
upstream: SaasResourceTemplateTargetUpstreamV1 | None
|
48
|
+
disable: bool | None
|
49
|
+
delete: bool | None
|
50
|
+
target_path: str | None
|
54
51
|
|
55
52
|
|
56
53
|
def osd_run_wrapper(
|
@@ -58,7 +55,7 @@ def osd_run_wrapper(
|
|
58
55
|
dry_run: bool,
|
59
56
|
available_thread_pool_size: int,
|
60
57
|
use_jump_host: bool,
|
61
|
-
saas_file_list:
|
58
|
+
saas_file_list: SaasFileList | None,
|
62
59
|
) -> int:
|
63
60
|
saas_file_name, env_name = spec
|
64
61
|
exit_code = 0
|
@@ -1,10 +1,7 @@
|
|
1
1
|
import logging
|
2
2
|
from collections.abc import Callable
|
3
3
|
from threading import Lock
|
4
|
-
from typing import
|
5
|
-
Any,
|
6
|
-
Optional,
|
7
|
-
)
|
4
|
+
from typing import Any
|
8
5
|
|
9
6
|
from sretoolbox.utils import threaded
|
10
7
|
|
@@ -63,7 +60,7 @@ def run(
|
|
63
60
|
internal: bool,
|
64
61
|
use_jump_host: bool,
|
65
62
|
include_trigger_trace: bool,
|
66
|
-
defer:
|
63
|
+
defer: Callable | None = None,
|
67
64
|
) -> bool:
|
68
65
|
"""Run trigger integration
|
69
66
|
|
@@ -327,13 +324,13 @@ def _construct_tekton_trigger_resource(
|
|
327
324
|
saas_file_name: str,
|
328
325
|
env_name: str,
|
329
326
|
tkn_pipeline_name: str,
|
330
|
-
timeout:
|
327
|
+
timeout: str | None,
|
331
328
|
tkn_cluster_console_url: str,
|
332
329
|
tkn_namespace_name: str,
|
333
330
|
integration: str,
|
334
331
|
integration_version: str,
|
335
332
|
include_trigger_trace: bool,
|
336
|
-
reason:
|
333
|
+
reason: str | None,
|
337
334
|
) -> tuple[OR, str]:
|
338
335
|
"""Construct a resource (PipelineRun) to trigger a deployment via Tekton.
|
339
336
|
|
@@ -1,14 +1,11 @@
|
|
1
1
|
import logging
|
2
2
|
from collections.abc import Callable
|
3
3
|
from datetime import (
|
4
|
+
UTC,
|
4
5
|
datetime,
|
5
6
|
timedelta,
|
6
|
-
timezone,
|
7
|
-
)
|
8
|
-
from typing import (
|
9
|
-
Any,
|
10
|
-
Optional,
|
11
7
|
)
|
8
|
+
from typing import Any
|
12
9
|
|
13
10
|
from dateutil import parser
|
14
11
|
|
@@ -67,11 +64,11 @@ def get_pipeline_runs_to_delete(
|
|
67
64
|
def run(
|
68
65
|
dry_run: bool,
|
69
66
|
thread_pool_size: int = 10,
|
70
|
-
internal:
|
67
|
+
internal: bool | None = None,
|
71
68
|
use_jump_host: bool = True,
|
72
|
-
defer:
|
69
|
+
defer: Callable | None = None,
|
73
70
|
) -> None:
|
74
|
-
now_date = datetime.now(
|
71
|
+
now_date = datetime.now(UTC)
|
75
72
|
vault_settings = get_app_interface_vault_settings()
|
76
73
|
secret_reader = create_secret_reader(use_vault=vault_settings.vault)
|
77
74
|
pipeline_providers = get_tekton_pipeline_providers()
|
@@ -1,5 +1,4 @@
|
|
1
1
|
import sys
|
2
|
-
from typing import Optional
|
3
2
|
|
4
3
|
import reconcile.openshift_saas_deploy_trigger_base as osdt_base
|
5
4
|
from reconcile.status import ExitCodes
|
@@ -13,7 +12,7 @@ QONTRACT_INTEGRATION_VERSION = make_semver(0, 3, 0)
|
|
13
12
|
def run(
|
14
13
|
dry_run: bool,
|
15
14
|
thread_pool_size: int = 10,
|
16
|
-
internal:
|
15
|
+
internal: bool | None = None,
|
17
16
|
use_jump_host: bool = True,
|
18
17
|
include_trigger_trace: bool = False,
|
19
18
|
) -> None:
|
@@ -1,5 +1,4 @@
|
|
1
1
|
import sys
|
2
|
-
from typing import Optional
|
3
2
|
|
4
3
|
import reconcile.openshift_saas_deploy_trigger_base as osdt_base
|
5
4
|
from reconcile.status import ExitCodes
|
@@ -13,7 +12,7 @@ QONTRACT_INTEGRATION_VERSION = make_semver(0, 1, 0)
|
|
13
12
|
def run(
|
14
13
|
dry_run: bool,
|
15
14
|
thread_pool_size: int = 10,
|
16
|
-
internal:
|
15
|
+
internal: bool | None = None,
|
17
16
|
use_jump_host: bool = True,
|
18
17
|
include_trigger_trace: bool = False,
|
19
18
|
) -> None:
|
@@ -1,5 +1,4 @@
|
|
1
1
|
import sys
|
2
|
-
from typing import Optional
|
3
2
|
|
4
3
|
import reconcile.openshift_saas_deploy_trigger_base as osdt_base
|
5
4
|
from reconcile.status import ExitCodes
|
@@ -13,7 +12,7 @@ QONTRACT_INTEGRATION_VERSION = make_semver(0, 3, 0)
|
|
13
12
|
def run(
|
14
13
|
dry_run: bool,
|
15
14
|
thread_pool_size: int = 10,
|
16
|
-
internal:
|
15
|
+
internal: bool | None = None,
|
17
16
|
use_jump_host: bool = True,
|
18
17
|
include_trigger_trace: bool = False,
|
19
18
|
) -> None:
|
@@ -1,5 +1,4 @@
|
|
1
1
|
import sys
|
2
|
-
from typing import Optional
|
3
2
|
|
4
3
|
import reconcile.openshift_saas_deploy_trigger_base as osdt_base
|
5
4
|
from reconcile.status import ExitCodes
|
@@ -13,7 +12,7 @@ QONTRACT_INTEGRATION_VERSION = make_semver(0, 3, 0)
|
|
13
12
|
def run(
|
14
13
|
dry_run: bool,
|
15
14
|
thread_pool_size: int = 10,
|
16
|
-
internal:
|
15
|
+
internal: bool | None = None,
|
17
16
|
use_jump_host: bool = True,
|
18
17
|
include_trigger_trace: bool = False,
|
19
18
|
) -> None:
|
@@ -2,11 +2,7 @@ import json
|
|
2
2
|
import logging
|
3
3
|
import sys
|
4
4
|
from collections.abc import Mapping
|
5
|
-
from typing import
|
6
|
-
Any,
|
7
|
-
Optional,
|
8
|
-
Union,
|
9
|
-
)
|
5
|
+
from typing import Any
|
10
6
|
|
11
7
|
import jinja2
|
12
8
|
import yaml
|
@@ -77,7 +73,7 @@ class OpenshiftTektonResourcesBadConfigError(Exception):
|
|
77
73
|
pass
|
78
74
|
|
79
75
|
|
80
|
-
def fetch_saas_files(saas_file_name:
|
76
|
+
def fetch_saas_files(saas_file_name: str | None) -> list[dict[str, Any]]:
|
81
77
|
"""Fetch saas v2 files"""
|
82
78
|
saas_files = gql.get_api().query(SAAS_FILES_QUERY)["saas_files"]
|
83
79
|
|
@@ -93,7 +89,7 @@ def fetch_saas_files(saas_file_name: Optional[str]) -> list[dict[str, Any]]:
|
|
93
89
|
return saas_files
|
94
90
|
|
95
91
|
|
96
|
-
def fetch_tkn_providers(saas_file_name:
|
92
|
+
def fetch_tkn_providers(saas_file_name: str | None) -> dict[str, Any]:
|
97
93
|
"""Fetch tekton providers data for the saas files handled here"""
|
98
94
|
saas_files = fetch_saas_files(saas_file_name)
|
99
95
|
if not saas_files:
|
@@ -138,7 +134,7 @@ def fetch_tkn_providers(saas_file_name: Optional[str]) -> dict[str, Any]:
|
|
138
134
|
|
139
135
|
def fetch_desired_resources(
|
140
136
|
tkn_providers: dict[str, Any],
|
141
|
-
) -> list[dict[str,
|
137
|
+
) -> list[dict[str, str | OR]]:
|
142
138
|
"""Create an array of dicts that will be used as args of ri.add_desired
|
143
139
|
This will also add resourceNames inside tkn_providers['namespace']
|
144
140
|
while we are migrating from the current system to this integration"""
|
@@ -344,7 +340,7 @@ def load_tkn_template(path: str, variables: dict[str, str]) -> dict[str, Any]:
|
|
344
340
|
|
345
341
|
def build_desired_resource(
|
346
342
|
tkn_object: dict[str, Any], path: str, cluster: str, namespace: str
|
347
|
-
) -> dict[str,
|
343
|
+
) -> dict[str, str | OR]:
|
348
344
|
"""Returns a dict with ResourceInventory.add_desired args"""
|
349
345
|
openshift_resource = OR(
|
350
346
|
tkn_object,
|
@@ -424,9 +420,9 @@ def build_one_per_saas_file_tkn_task_name(
|
|
424
420
|
def run(
|
425
421
|
dry_run: bool,
|
426
422
|
thread_pool_size: int = 10,
|
427
|
-
internal:
|
423
|
+
internal: bool | None = None,
|
428
424
|
use_jump_host: bool = True,
|
429
|
-
saas_file_name:
|
425
|
+
saas_file_name: str | None = None,
|
430
426
|
) -> None:
|
431
427
|
tkn_providers = fetch_tkn_providers(saas_file_name)
|
432
428
|
|
@@ -4,7 +4,6 @@ from collections.abc import (
|
|
4
4
|
Iterable,
|
5
5
|
)
|
6
6
|
from datetime import datetime
|
7
|
-
from typing import Optional
|
8
7
|
|
9
8
|
from reconcile import queries
|
10
9
|
from reconcile.gql_definitions.common.clusters import ClusterV1
|
@@ -32,7 +31,7 @@ from reconcile.utils.state import (
|
|
32
31
|
QONTRACT_INTEGRATION = "openshift-upgrade-watcher"
|
33
32
|
|
34
33
|
|
35
|
-
def cluster_slack_handle(cluster: str, slack:
|
34
|
+
def cluster_slack_handle(cluster: str, slack: SlackApi | None) -> str:
|
36
35
|
usergroup = f"{cluster}-cluster"
|
37
36
|
usergroup_id = f"@{usergroup}"
|
38
37
|
if slack:
|
@@ -42,10 +41,10 @@ def cluster_slack_handle(cluster: str, slack: Optional[SlackApi]) -> str:
|
|
42
41
|
|
43
42
|
def handle_slack_notification(
|
44
43
|
msg: str,
|
45
|
-
slack:
|
44
|
+
slack: SlackApi | None,
|
46
45
|
state: State,
|
47
46
|
state_key: str,
|
48
|
-
state_value:
|
47
|
+
state_value: str | None,
|
49
48
|
) -> None:
|
50
49
|
"""Check notification status, notify if needed and update the notification status"""
|
51
50
|
if state.exists(state_key) and state.get(state_key) == state_value:
|
@@ -58,9 +57,7 @@ def handle_slack_notification(
|
|
58
57
|
state.add(state_key, state_value, force=True)
|
59
58
|
|
60
59
|
|
61
|
-
def _get_start_osd(
|
62
|
-
oc_map: OCMap, cluster_name: str
|
63
|
-
) -> tuple[Optional[str], Optional[str]]:
|
60
|
+
def _get_start_osd(oc_map: OCMap, cluster_name: str) -> tuple[str | None, str | None]:
|
64
61
|
oc = oc_map.get(cluster_name)
|
65
62
|
if isinstance(oc, OCLogMsg):
|
66
63
|
logging.log(level=oc.log_level, msg=oc.message)
|
@@ -84,7 +81,7 @@ def _get_start_osd(
|
|
84
81
|
|
85
82
|
def _get_start_hypershift(
|
86
83
|
ocm_api: OCMBaseClient, cluster_id: str
|
87
|
-
) -> tuple[
|
84
|
+
) -> tuple[str | None, str | None]:
|
88
85
|
schedules = get_control_plane_upgrade_policies(ocm_api, cluster_id)
|
89
86
|
schedule = [s for s in schedules if s["state"] == "started"]
|
90
87
|
if not schedule:
|
@@ -101,7 +98,7 @@ def notify_upgrades_start(
|
|
101
98
|
oc_map: OCMap,
|
102
99
|
ocm_map: OCMMap,
|
103
100
|
state: State,
|
104
|
-
slack:
|
101
|
+
slack: SlackApi | None,
|
105
102
|
) -> None:
|
106
103
|
now = datetime.utcnow()
|
107
104
|
for cluster in clusters:
|
@@ -135,7 +132,7 @@ def notify_upgrades_start(
|
|
135
132
|
|
136
133
|
|
137
134
|
def notify_cluster_new_version(
|
138
|
-
clusters: Iterable[ClusterV1], state: State, slack:
|
135
|
+
clusters: Iterable[ClusterV1], state: State, slack: SlackApi | None
|
139
136
|
) -> None:
|
140
137
|
# Send a notification, if a cluster runs a version it was not running in the past
|
141
138
|
# This does not check if an upgrade was successful or not
|
@@ -159,11 +156,11 @@ def notify_cluster_new_version(
|
|
159
156
|
def run(
|
160
157
|
dry_run: bool,
|
161
158
|
thread_pool_size: int = 10,
|
162
|
-
internal:
|
159
|
+
internal: bool | None = None,
|
163
160
|
use_jump_host: bool = True,
|
164
|
-
defer:
|
161
|
+
defer: Callable | None = None,
|
165
162
|
) -> None:
|
166
|
-
slack:
|
163
|
+
slack: SlackApi | None = None
|
167
164
|
if not dry_run:
|
168
165
|
slack = slackapi_from_queries(QONTRACT_INTEGRATION)
|
169
166
|
|