qontract-reconcile 0.10.2.dev43__py3-none-any.whl → 0.10.2.dev44__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: qontract-reconcile
3
- Version: 0.10.2.dev43
3
+ Version: 0.10.2.dev44
4
4
  Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
5
5
  Project-URL: homepage, https://github.com/app-sre/qontract-reconcile
6
6
  Project-URL: repository, https://github.com/app-sre/qontract-reconcile
@@ -633,7 +633,7 @@ reconcile/utils/state.py,sha256=az4tBmZ0EdbFcAGiBVUxs3cr2-BVWsuDQiNTvjjQq8s,1637
633
633
  reconcile/utils/structs.py,sha256=LcbLEg8WxfRqM6nW7NhcWN0YeqF7SQzxOgntmLs1SgY,352
634
634
  reconcile/utils/template.py,sha256=wTvRU4AnAV_o042tD4Mwls2dwWMuk7MKnde3MaCjaYg,331
635
635
  reconcile/utils/terraform_client.py,sha256=H8frsS370y8xfivKLNBD1dwlBLHvfuR6JSN_syBL5Qc,36033
636
- reconcile/utils/terrascript_aws_client.py,sha256=SNGtsG1n-IDZaI0blKLm3t3AfVNmxW-O8Y8NtX08OOc,270318
636
+ reconcile/utils/terrascript_aws_client.py,sha256=UdEM3JeTMiE0VRqtz7gcBWR-c0fouORtPFrniRJ3pao,283505
637
637
  reconcile/utils/three_way_diff_strategy.py,sha256=oQcHXd9LVhirJfoaOBoHUYuZVGfyL2voKr6KVI34zZE,4833
638
638
  reconcile/utils/throughput.py,sha256=iP4UWAe2LVhDo69mPPmgo9nQ7RxHD6_GS8MZe-aSiuM,344
639
639
  reconcile/utils/vault.py,sha256=aSA8l9cJlPUHpChFGl27nSY-Mpq9FMjBo7Dcgb1BVfM,15036
@@ -773,7 +773,7 @@ tools/saas_promotion_state/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJ
773
773
  tools/saas_promotion_state/saas_promotion_state.py,sha256=UfwwRLS5Ya4_Nh1w5n1dvoYtchQvYE9yj1VANt2IKqI,3925
774
774
  tools/sre_checkpoints/__init__.py,sha256=CDaDaywJnmRCLyl_NCcvxi-Zc0hTi_3OdwKiFOyS39I,145
775
775
  tools/sre_checkpoints/util.py,sha256=zEDbGr18ZeHNQwW8pUsr2JRjuXIPz--WAGJxZo9sv_Y,894
776
- qontract_reconcile-0.10.2.dev43.dist-info/METADATA,sha256=dR0wcgtr249AUyR4Ry3LQDzF8VgP0FQO4AEs_sCbJLM,24665
777
- qontract_reconcile-0.10.2.dev43.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
778
- qontract_reconcile-0.10.2.dev43.dist-info/entry_points.txt,sha256=5i9l54La3vQrDLAdwDKQWC0iG4sV9RRfOb1BpvzOWLc,698
779
- qontract_reconcile-0.10.2.dev43.dist-info/RECORD,,
776
+ qontract_reconcile-0.10.2.dev44.dist-info/METADATA,sha256=VYLbNmLeQFocQBk-JLSHKadTfEMFX8Oq3W0GgOBUpkk,24665
777
+ qontract_reconcile-0.10.2.dev44.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
778
+ qontract_reconcile-0.10.2.dev44.dist-info/entry_points.txt,sha256=5i9l54La3vQrDLAdwDKQWC0iG4sV9RRfOb1BpvzOWLc,698
779
+ qontract_reconcile-0.10.2.dev44.dist-info/RECORD,,
@@ -77,6 +77,8 @@ from terrascript.resource import (
77
77
  aws_ec2_transit_gateway_vpc_attachment,
78
78
  aws_ec2_transit_gateway_vpc_attachment_accepter,
79
79
  aws_ecr_repository,
80
+ aws_elasticache_parameter_group,
81
+ aws_elasticache_replication_group,
80
82
  aws_elasticsearch_domain,
81
83
  aws_iam_access_key,
82
84
  aws_iam_group,
@@ -104,6 +106,8 @@ from terrascript.resource import (
104
106
  aws_lb_listener_rule,
105
107
  aws_lb_target_group,
106
108
  aws_lb_target_group_attachment,
109
+ aws_msk_cluster,
110
+ aws_msk_configuration,
107
111
  aws_ram_principal_association,
108
112
  aws_ram_resource_association,
109
113
  aws_ram_resource_share,
@@ -1641,6 +1645,8 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
1641
1645
  self.populate_tf_resource_rds(spec)
1642
1646
  elif provider == "s3":
1643
1647
  self.populate_tf_resource_s3(spec)
1648
+ elif provider == "elasticache":
1649
+ self.populate_tf_resource_elasticache(spec)
1644
1650
  elif provider == "aws-iam-service-account":
1645
1651
  self.populate_tf_resource_service_account(spec, ocm_map=ocm_map)
1646
1652
  elif provider == "secrets-manager-service-account":
@@ -1683,6 +1689,8 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
1683
1689
  self.populate_tf_resource_rosa_authenticator(spec)
1684
1690
  elif provider == "rosa-authenticator-vpce":
1685
1691
  self.populate_tf_resource_rosa_authenticator_vpce(spec)
1692
+ elif provider == "msk":
1693
+ self.populate_tf_resource_msk(spec)
1686
1694
  else:
1687
1695
  raise UnknownProviderError(provider)
1688
1696
 
@@ -2483,6 +2491,94 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
2483
2491
 
2484
2492
  return bucket_tf_resource
2485
2493
 
2494
+ def populate_tf_resource_elasticache(self, spec):
2495
+ account = spec.provisioner_name
2496
+ identifier = spec.identifier
2497
+ values = self.init_values(spec)
2498
+ output_prefix = spec.output_prefix
2499
+ values.setdefault("replication_group_id", values["identifier"])
2500
+ values.pop("identifier", None)
2501
+
2502
+ tf_resources = []
2503
+ self.init_common_outputs(tf_resources, spec)
2504
+
2505
+ default_region = self.default_regions.get(account)
2506
+ desired_region = values.pop("region", default_region)
2507
+
2508
+ provider = ""
2509
+ if desired_region is not None and self._multiregion_account(account):
2510
+ provider = "aws." + desired_region
2511
+ values["provider"] = provider
2512
+
2513
+ if not values.get("apply_immediately"):
2514
+ values["apply_immediately"] = False
2515
+
2516
+ parameter_group = values.get("parameter_group")
2517
+ # Assume that cluster enabled is false if parameter group unset
2518
+ pg_cluster_enabled = False
2519
+
2520
+ if parameter_group:
2521
+ pg_values = self.get_values(parameter_group)
2522
+ pg_name = pg_values["name"]
2523
+ pg_identifier = pg_name
2524
+
2525
+ # If the desired region is not the same as the default region
2526
+ # we append the region to the identifier to make it unique
2527
+ # in the terraform config
2528
+ if desired_region is not None and desired_region != default_region:
2529
+ pg_identifier = f"{pg_name}-{desired_region}"
2530
+
2531
+ pg_values["parameter"] = pg_values.pop("parameters")
2532
+ for param in pg_values["parameter"]:
2533
+ if param["name"] == "cluster-enabled" and param["value"] == "yes":
2534
+ pg_cluster_enabled = True
2535
+
2536
+ if self._multiregion_account(account) and len(provider) > 0:
2537
+ pg_values["provider"] = provider
2538
+ pg_tf_resource = aws_elasticache_parameter_group(pg_identifier, **pg_values)
2539
+ tf_resources.append(pg_tf_resource)
2540
+ values["depends_on"] = [
2541
+ f"aws_elasticache_parameter_group.{pg_identifier}",
2542
+ ]
2543
+ values["parameter_group_name"] = pg_name
2544
+ values.pop("parameter_group", None)
2545
+
2546
+ auth_token = spec.get_secret_field("db.auth_token")
2547
+ if not auth_token:
2548
+ auth_token = self.generate_random_password()
2549
+
2550
+ if values.get("transit_encryption_enabled", False):
2551
+ values["auth_token"] = auth_token
2552
+
2553
+ # elasticache replication group
2554
+ # Ref: https://www.terraform.io/docs/providers/aws/r/
2555
+ # elasticache_replication_group.html
2556
+ tf_resource = aws_elasticache_replication_group(identifier, **values)
2557
+ tf_resources.append(tf_resource)
2558
+ # elasticache outputs
2559
+ # we want the outputs to be formed into an OpenShift Secret
2560
+ # with the following fields
2561
+ # db.endpoint
2562
+ output_name = output_prefix + "__db_endpoint"
2563
+ # https://docs.aws.amazon.com/AmazonElastiCache/
2564
+ # latest/red-ug/Endpoints.html
2565
+ if pg_cluster_enabled:
2566
+ output_value = "${" + tf_resource.configuration_endpoint_address + "}"
2567
+ else:
2568
+ output_value = "${" + tf_resource.primary_endpoint_address + "}"
2569
+ tf_resources.append(Output(output_name, value=output_value))
2570
+ # db.port
2571
+ output_name = output_prefix + "__db_port"
2572
+ output_value = "${" + str(tf_resource.port) + "}"
2573
+ tf_resources.append(Output(output_name, value=output_value))
2574
+ # db.auth_token
2575
+ if values.get("transit_encryption_enabled", False):
2576
+ output_name = output_prefix + "__db_auth_token"
2577
+ output_value = values["auth_token"]
2578
+ tf_resources.append(Output(output_name, value=output_value, sensitive=True))
2579
+
2580
+ self.add_resources(account, tf_resources)
2581
+
2486
2582
  def populate_tf_resource_service_account(self, spec, ocm_map=None):
2487
2583
  account = spec.provisioner_name
2488
2584
  identifier = spec.identifier
@@ -4191,6 +4287,24 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
4191
4287
  values["db_name"] = db_name
4192
4288
  if values.get("replica_source"):
4193
4289
  values.pop("db_name", None)
4290
+ elif spec.provider == "elasticache":
4291
+ if description := values.pop("replication_group_description", None):
4292
+ values["description"] = description
4293
+ if num_cache_clusters := values.pop("number_cache_clusters", None):
4294
+ values["num_cache_clusters"] = num_cache_clusters
4295
+ if cluster_mode := values.pop("cluster_mode", {}):
4296
+ for k, v in cluster_mode.items():
4297
+ values[k] = v
4298
+ values.pop("availability_zones", None)
4299
+ elif spec.provider == "msk":
4300
+ if ebs_volume_size := values.get("broker_node_group_info", {}).pop(
4301
+ "ebs_volume_size", None
4302
+ ):
4303
+ values["broker_node_group_info"].setdefault(
4304
+ "storage_info", {}
4305
+ ).setdefault("ebs_storage_info", {})[
4306
+ "volume_size"
4307
+ ] = ebs_volume_size
4194
4308
 
4195
4309
  return values
4196
4310
 
@@ -6542,6 +6656,205 @@ class TerrascriptClient: # pylint: disable=too-many-public-methods
6542
6656
 
6543
6657
  self.add_resources(account, tf_resources)
6544
6658
 
6659
+ def populate_tf_resource_msk(self, spec):
6660
+ account = spec.provisioner_name
6661
+ values = self.init_values(spec)
6662
+ output_prefix = spec.output_prefix
6663
+ tf_resources = []
6664
+ resource_id = spec.identifier
6665
+
6666
+ del values["identifier"]
6667
+ values.setdefault("cluster_name", spec.identifier)
6668
+
6669
+ # common
6670
+ self.init_common_outputs(tf_resources, spec)
6671
+
6672
+ # validations
6673
+ if (
6674
+ values["number_of_broker_nodes"]
6675
+ % len(values["broker_node_group_info"]["client_subnets"])
6676
+ != 0
6677
+ ):
6678
+ raise ValueError(
6679
+ "number_of_broker_nodes must be a multiple of the number of specified client subnets."
6680
+ )
6681
+
6682
+ scram_enabled = (
6683
+ values.get("client_authentication", {}).get("sasl", {}).get("scram", False)
6684
+ )
6685
+ scram_users = {}
6686
+ if scram_enabled:
6687
+ if not spec.resource.get("users", []):
6688
+ raise ValueError(
6689
+ "users attribute must be given when client_authentication.sasl.scram is enabled."
6690
+ )
6691
+ scram_users = {
6692
+ user["name"]: self.secret_reader.read_all(user["secret"])
6693
+ for user in spec.resource["users"]
6694
+ }
6695
+ # validate user objects
6696
+ for user, secret in scram_users.items():
6697
+ if secret.keys() != {"password", "username"}:
6698
+ raise ValueError(
6699
+ f"MSK user '{user}' secret must contain only 'username' and 'password' keys!"
6700
+ )
6701
+
6702
+ # resource - msk config
6703
+ # unique msk config resource name enables "create_before_destroy" lifecycle
6704
+ # which is required when changing version which requires a resource replacement
6705
+ msk_version_str = values["kafka_version"].replace(".", "-")
6706
+ msk_config_name = f"{resource_id}-{msk_version_str}"
6707
+ msk_config = aws_msk_configuration(
6708
+ msk_config_name,
6709
+ name=msk_config_name,
6710
+ kafka_versions=[values["kafka_version"]],
6711
+ server_properties=values["server_properties"],
6712
+ # lifecycle create_before_destroy is required to ensure that the config is created
6713
+ # before it is assigned to the cluster
6714
+ lifecycle={
6715
+ "create_before_destroy": True,
6716
+ },
6717
+ )
6718
+ tf_resources.append(msk_config)
6719
+ values.pop("server_properties", None)
6720
+
6721
+ # resource - cluster
6722
+ values["configuration_info"] = {
6723
+ "arn": "${" + msk_config.arn + "}",
6724
+ "revision": "${" + msk_config.latest_revision + "}",
6725
+ }
6726
+ msk_cluster = aws_msk_cluster(resource_id, **values)
6727
+ tf_resources.append(msk_cluster)
6728
+
6729
+ # resource - cloudwatch
6730
+ if (
6731
+ values.get("logging_info", {})
6732
+ .get("broker_logs", {})
6733
+ .get("cloudwatch_logs", {})
6734
+ .get("enabled", False)
6735
+ ):
6736
+ log_group_values = {
6737
+ "name": f"{resource_id}-msk-broker-logs",
6738
+ "tags": values["tags"],
6739
+ "retention_in_days": values["logging_info"]["broker_logs"][
6740
+ "cloudwatch_logs"
6741
+ ]["retention_in_days"],
6742
+ }
6743
+ log_group_tf_resource = aws_cloudwatch_log_group(
6744
+ resource_id, **log_group_values
6745
+ )
6746
+ tf_resources.append(log_group_tf_resource)
6747
+ del values["logging_info"]["broker_logs"]["cloudwatch_logs"][
6748
+ "retention_in_days"
6749
+ ]
6750
+ values["logging_info"]["broker_logs"]["cloudwatch_logs"]["log_group"] = (
6751
+ log_group_tf_resource.name
6752
+ )
6753
+
6754
+ # resource - secret manager for SCRAM client credentials
6755
+ if scram_enabled and scram_users:
6756
+ scram_secrets: list[
6757
+ tuple[aws_secretsmanager_secret, aws_secretsmanager_secret_version]
6758
+ ] = []
6759
+
6760
+ # kms
6761
+ kms_values = {
6762
+ "description": "KMS key for MSK SCRAM credentials",
6763
+ "tags": values["tags"],
6764
+ }
6765
+ kms_key = aws_kms_key(resource_id, **kms_values)
6766
+ tf_resources.append(kms_key)
6767
+
6768
+ kms_key_alias = aws_kms_alias(
6769
+ resource_id,
6770
+ name=f"alias/{resource_id}-msk-scram",
6771
+ target_key_id="${" + kms_key.arn + "}",
6772
+ )
6773
+ tf_resources.append(kms_key_alias)
6774
+
6775
+ for user, secret in scram_users.items():
6776
+ secret_identifier = f"AmazonMSK_{resource_id}-{user}"
6777
+
6778
+ secret_values = {
6779
+ "name": secret_identifier,
6780
+ "tags": values["tags"],
6781
+ "kms_key_id": "${" + kms_key.arn + "}",
6782
+ }
6783
+ secret_resource = aws_secretsmanager_secret(
6784
+ secret_identifier, **secret_values
6785
+ )
6786
+ tf_resources.append(secret_resource)
6787
+
6788
+ version_values = {
6789
+ "secret_id": "${" + secret_resource.arn + "}",
6790
+ "secret_string": json.dumps(secret, sort_keys=True),
6791
+ }
6792
+ version_resource = aws_secretsmanager_secret_version(
6793
+ secret_identifier, **version_values
6794
+ )
6795
+ tf_resources.append(version_resource)
6796
+
6797
+ secret_policy_values = {
6798
+ "secret_arn": "${" + secret_resource.arn + "}",
6799
+ "policy": json.dumps({
6800
+ "Version": "2012-10-17",
6801
+ "Statement": [
6802
+ {
6803
+ "Sid": "AWSKafkaResourcePolicy",
6804
+ "Effect": "Allow",
6805
+ "Principal": {"Service": "kafka.amazonaws.com"},
6806
+ "Action": "secretsmanager:getSecretValue",
6807
+ "Resource": "${" + secret_resource.arn + "}",
6808
+ }
6809
+ ],
6810
+ }),
6811
+ }
6812
+ secret_policy = aws_secretsmanager_secret_policy(
6813
+ secret_identifier, **secret_policy_values
6814
+ )
6815
+ tf_resources.append(secret_policy)
6816
+ scram_secrets.append((secret_resource, version_resource))
6817
+
6818
+ # create ONE scram secret association for each secret created above
6819
+ scram_secret_association_values = {
6820
+ "cluster_arn": "${" + msk_cluster.arn + "}",
6821
+ "secret_arn_list": ["${" + s.arn + "}" for s, _ in scram_secrets],
6822
+ "depends_on": self.get_dependencies([v for _, v in scram_secrets]),
6823
+ }
6824
+ scram_secret_association = aws_msk_scram_secret_association(
6825
+ resource_id, **scram_secret_association_values
6826
+ )
6827
+ tf_resources.append(scram_secret_association)
6828
+
6829
+ # outputs
6830
+ tf_resources += [
6831
+ Output(
6832
+ output_prefix + "__zookeeper_connect_string",
6833
+ value="${" + msk_cluster.zookeeper_connect_string + "}",
6834
+ ),
6835
+ Output(
6836
+ output_prefix + "__zookeeper_connect_string_tls",
6837
+ value="${" + msk_cluster.zookeeper_connect_string_tls + "}",
6838
+ ),
6839
+ Output(
6840
+ output_prefix + "__bootstrap_brokers",
6841
+ value="${" + msk_cluster.bootstrap_brokers + "}",
6842
+ ),
6843
+ Output(
6844
+ output_prefix + "__bootstrap_brokers_tls",
6845
+ value="${" + msk_cluster.bootstrap_brokers_tls + "}",
6846
+ ),
6847
+ Output(
6848
+ output_prefix + "__bootstrap_brokers_sasl_iam",
6849
+ value="${" + msk_cluster.bootstrap_brokers_sasl_iam + "}",
6850
+ ),
6851
+ Output(
6852
+ output_prefix + "__bootstrap_brokers_sasl_scram",
6853
+ value="${" + msk_cluster.bootstrap_brokers_sasl_scram + "}",
6854
+ ),
6855
+ ]
6856
+ self.add_resources(account, tf_resources)
6857
+
6545
6858
  def populate_saml_idp(self, account_name: str, name: str, metadata: str) -> None:
6546
6859
  saml_idp = aws_iam_saml_provider(
6547
6860
  f"{account_name}-{name}", name=name, saml_metadata_document=metadata