qontract-reconcile 0.10.2.dev345__py3-none-any.whl → 0.10.2.dev408__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. {qontract_reconcile-0.10.2.dev345.dist-info → qontract_reconcile-0.10.2.dev408.dist-info}/METADATA +11 -10
  2. {qontract_reconcile-0.10.2.dev345.dist-info → qontract_reconcile-0.10.2.dev408.dist-info}/RECORD +126 -120
  3. reconcile/aus/base.py +17 -14
  4. reconcile/automated_actions/config/integration.py +12 -0
  5. reconcile/aws_account_manager/integration.py +2 -2
  6. reconcile/aws_ami_cleanup/integration.py +6 -7
  7. reconcile/aws_ami_share.py +69 -62
  8. reconcile/aws_cloudwatch_log_retention/integration.py +155 -126
  9. reconcile/aws_ecr_image_pull_secrets.py +2 -2
  10. reconcile/aws_iam_keys.py +1 -0
  11. reconcile/aws_saml_idp/integration.py +7 -1
  12. reconcile/aws_saml_roles/integration.py +9 -3
  13. reconcile/change_owners/change_owners.py +1 -1
  14. reconcile/change_owners/diff.py +2 -4
  15. reconcile/checkpoint.py +11 -3
  16. reconcile/cli.py +33 -8
  17. reconcile/dashdotdb_dora.py +4 -11
  18. reconcile/database_access_manager.py +118 -111
  19. reconcile/endpoints_discovery/integration.py +4 -1
  20. reconcile/endpoints_discovery/merge_request_manager.py +9 -11
  21. reconcile/external_resources/factories.py +5 -12
  22. reconcile/external_resources/integration.py +1 -1
  23. reconcile/external_resources/manager.py +5 -3
  24. reconcile/external_resources/meta.py +0 -1
  25. reconcile/external_resources/model.py +10 -10
  26. reconcile/external_resources/reconciler.py +5 -2
  27. reconcile/external_resources/secrets_sync.py +4 -6
  28. reconcile/external_resources/state.py +5 -4
  29. reconcile/gabi_authorized_users.py +8 -5
  30. reconcile/gitlab_housekeeping.py +13 -15
  31. reconcile/gitlab_mr_sqs_consumer.py +2 -2
  32. reconcile/gitlab_owners.py +15 -11
  33. reconcile/gql_definitions/automated_actions/instance.py +41 -2
  34. reconcile/gql_definitions/aws_ami_cleanup/aws_accounts.py +10 -0
  35. reconcile/gql_definitions/aws_cloudwatch_log_retention/aws_accounts.py +22 -61
  36. reconcile/gql_definitions/aws_saml_idp/aws_accounts.py +10 -0
  37. reconcile/gql_definitions/aws_saml_roles/aws_accounts.py +10 -0
  38. reconcile/gql_definitions/common/aws_vpc_requests.py +10 -0
  39. reconcile/gql_definitions/common/clusters.py +2 -0
  40. reconcile/gql_definitions/external_resources/external_resources_namespaces.py +84 -1
  41. reconcile/gql_definitions/external_resources/external_resources_settings.py +2 -0
  42. reconcile/gql_definitions/fragments/aws_account_common.py +2 -0
  43. reconcile/gql_definitions/fragments/aws_organization.py +33 -0
  44. reconcile/gql_definitions/fragments/aws_vpc_request.py +2 -0
  45. reconcile/gql_definitions/introspection.json +3474 -1986
  46. reconcile/gql_definitions/jira_permissions_validator/jira_boards_for_permissions_validator.py +4 -0
  47. reconcile/gql_definitions/terraform_init/aws_accounts.py +14 -0
  48. reconcile/gql_definitions/terraform_resources/terraform_resources_namespaces.py +33 -1
  49. reconcile/gql_definitions/terraform_tgw_attachments/aws_accounts.py +10 -0
  50. reconcile/jenkins_worker_fleets.py +1 -0
  51. reconcile/jira_permissions_validator.py +236 -121
  52. reconcile/ocm/types.py +6 -0
  53. reconcile/openshift_base.py +47 -1
  54. reconcile/openshift_cluster_bots.py +2 -1
  55. reconcile/openshift_resources_base.py +6 -2
  56. reconcile/openshift_saas_deploy.py +2 -2
  57. reconcile/openshift_saas_deploy_trigger_cleaner.py +3 -5
  58. reconcile/openshift_upgrade_watcher.py +3 -3
  59. reconcile/queries.py +131 -0
  60. reconcile/saas_auto_promotions_manager/subscriber.py +4 -3
  61. reconcile/slack_usergroups.py +4 -3
  62. reconcile/sql_query.py +1 -0
  63. reconcile/statuspage/integrations/maintenances.py +4 -3
  64. reconcile/statuspage/status.py +5 -8
  65. reconcile/templates/rosa-classic-cluster-creation.sh.j2 +4 -0
  66. reconcile/templates/rosa-hcp-cluster-creation.sh.j2 +3 -0
  67. reconcile/templating/renderer.py +2 -1
  68. reconcile/terraform_aws_route53.py +7 -1
  69. reconcile/terraform_init/integration.py +185 -21
  70. reconcile/terraform_resources.py +11 -1
  71. reconcile/terraform_tgw_attachments.py +7 -1
  72. reconcile/terraform_users.py +7 -0
  73. reconcile/terraform_vpc_peerings.py +14 -3
  74. reconcile/terraform_vpc_resources/integration.py +7 -0
  75. reconcile/typed_queries/aws_account_tags.py +41 -0
  76. reconcile/typed_queries/saas_files.py +2 -2
  77. reconcile/utils/aggregated_list.py +4 -3
  78. reconcile/utils/aws_api.py +51 -20
  79. reconcile/utils/aws_api_typed/api.py +38 -9
  80. reconcile/utils/aws_api_typed/cloudformation.py +149 -0
  81. reconcile/utils/aws_api_typed/logs.py +73 -0
  82. reconcile/utils/datetime_util.py +67 -0
  83. reconcile/utils/differ.py +2 -3
  84. reconcile/utils/early_exit_cache.py +3 -2
  85. reconcile/utils/expiration.py +7 -3
  86. reconcile/utils/external_resource_spec.py +24 -1
  87. reconcile/utils/filtering.py +1 -1
  88. reconcile/utils/helm.py +2 -1
  89. reconcile/utils/helpers.py +1 -1
  90. reconcile/utils/jinja2/utils.py +4 -96
  91. reconcile/utils/jira_client.py +82 -63
  92. reconcile/utils/jjb_client.py +9 -12
  93. reconcile/utils/jobcontroller/controller.py +1 -1
  94. reconcile/utils/jobcontroller/models.py +17 -1
  95. reconcile/utils/json.py +32 -0
  96. reconcile/utils/merge_request_manager/merge_request_manager.py +3 -3
  97. reconcile/utils/merge_request_manager/parser.py +2 -2
  98. reconcile/utils/mr/app_interface_reporter.py +2 -2
  99. reconcile/utils/mr/base.py +2 -2
  100. reconcile/utils/mr/notificator.py +2 -2
  101. reconcile/utils/mr/update_access_report_base.py +3 -4
  102. reconcile/utils/oc.py +113 -95
  103. reconcile/utils/oc_filters.py +3 -3
  104. reconcile/utils/ocm/products.py +6 -0
  105. reconcile/utils/ocm/search_filters.py +3 -6
  106. reconcile/utils/ocm/service_log.py +3 -5
  107. reconcile/utils/openshift_resource.py +10 -5
  108. reconcile/utils/output.py +3 -2
  109. reconcile/utils/pagerduty_api.py +5 -5
  110. reconcile/utils/runtime/integration.py +1 -2
  111. reconcile/utils/runtime/runner.py +2 -2
  112. reconcile/utils/saasherder/models.py +2 -1
  113. reconcile/utils/saasherder/saasherder.py +9 -7
  114. reconcile/utils/slack_api.py +24 -2
  115. reconcile/utils/sloth.py +171 -2
  116. reconcile/utils/sqs_gateway.py +2 -1
  117. reconcile/utils/state.py +2 -1
  118. reconcile/utils/terraform_client.py +4 -3
  119. reconcile/utils/terrascript_aws_client.py +165 -111
  120. reconcile/utils/vault.py +1 -1
  121. reconcile/vault_replication.py +107 -42
  122. tools/app_interface_reporter.py +4 -4
  123. tools/cli_commands/systems_and_tools.py +5 -1
  124. tools/qontract_cli.py +25 -13
  125. {qontract_reconcile-0.10.2.dev345.dist-info → qontract_reconcile-0.10.2.dev408.dist-info}/WHEEL +0 -0
  126. {qontract_reconcile-0.10.2.dev345.dist-info → qontract_reconcile-0.10.2.dev408.dist-info}/entry_points.txt +0 -0
@@ -4,7 +4,6 @@ from collections import defaultdict
4
4
  from collections.abc import Iterable, Mapping
5
5
  from dataclasses import dataclass
6
6
  from datetime import (
7
- UTC,
8
7
  datetime,
9
8
  timedelta,
10
9
  )
@@ -31,6 +30,7 @@ from reconcile.typed_queries.app_interface_vault_settings import (
31
30
  get_app_interface_vault_settings,
32
31
  )
33
32
  from reconcile.typed_queries.saas_files import get_saas_files
33
+ from reconcile.utils.datetime_util import ensure_utc, utc_now
34
34
  from reconcile.utils.github_api import GithubRepositoryApi
35
35
  from reconcile.utils.gitlab_api import GitLabApi
36
36
  from reconcile.utils.secret_reader import create_secret_reader
@@ -159,15 +159,8 @@ class Commit:
159
159
  date: datetime
160
160
 
161
161
  def lttc(self, finish_timestamp: datetime) -> int:
162
- commit_date_tzaware = self.date
163
- finish_timestamp_tzaware = finish_timestamp
164
-
165
- if commit_date_tzaware.tzinfo is None:
166
- commit_date_tzaware = commit_date_tzaware.replace(tzinfo=UTC)
167
-
168
- if finish_timestamp_tzaware.tzinfo is None:
169
- finish_timestamp_tzaware = finish_timestamp_tzaware.replace(tzinfo=UTC)
170
-
162
+ commit_date_tzaware = ensure_utc(self.date)
163
+ finish_timestamp_tzaware = ensure_utc(finish_timestamp)
171
164
  return int((finish_timestamp_tzaware - commit_date_tzaware).total_seconds())
172
165
 
173
166
 
@@ -277,7 +270,7 @@ class DashdotdbDORA(DashdotdbBase):
277
270
  # from the DB for a unique (app_name, env_name) multiple times.
278
271
  app_envs = {s.app_env for s in saastargets}
279
272
 
280
- since_default = datetime.now() - timedelta(days=90)
273
+ since_default = utc_now() - timedelta(days=90)
281
274
  app_env_since_list: list[tuple[AppEnv, datetime]] = threaded.run(
282
275
  func=functools.partial(self.get_latest_with_default, since_default),
283
276
  iterable=app_envs,
@@ -11,7 +11,7 @@ from typing import (
11
11
  TypedDict,
12
12
  )
13
13
 
14
- from pydantic import BaseModel
14
+ from pydantic import BaseModel, Field
15
15
  from sretoolbox.container.image import Image
16
16
 
17
17
  from reconcile import openshift_base, queries
@@ -62,11 +62,14 @@ def get_database_access_namespaces(
62
62
 
63
63
 
64
64
  class DatabaseConnectionParameters(BaseModel):
65
- host: str
66
- port: str
67
- user: str
68
- password: str
69
- database: str
65
+ host: str = Field(..., alias="db.host")
66
+ port: str = Field(..., alias="db.port")
67
+ user: str = Field(..., alias="db.user")
68
+ password: str = Field(..., alias="db.password")
69
+ database: str = Field(..., alias="db.name")
70
+
71
+ class Config:
72
+ allow_population_by_field_name = True
70
73
 
71
74
 
72
75
  class PSQLScriptGenerator(BaseModel):
@@ -225,7 +228,7 @@ def get_db_engine(resource: NamespaceTerraformResourceRDSV1) -> str:
225
228
 
226
229
  class JobData(BaseModel):
227
230
  engine: str
228
- name_suffix: str
231
+ name: str
229
232
  image: str
230
233
  service_account_name: str
231
234
  rds_admin_secret_name: str
@@ -234,7 +237,7 @@ class JobData(BaseModel):
234
237
 
235
238
 
236
239
  def get_job_spec(job_data: JobData) -> OpenshiftResource:
237
- job_name = f"dbam-{job_data.name_suffix}"
240
+ job_name = job_data.name
238
241
 
239
242
  image_tag = Image(job_data.image).tag
240
243
  image_pull_policy = "Always" if image_tag == "latest" else "IfNotPresent"
@@ -391,14 +394,6 @@ def get_service_account_spec(name: str) -> OpenshiftResource:
391
394
  )
392
395
 
393
396
 
394
- class DBAMResource(BaseModel):
395
- resource: OpenshiftResource
396
- clean_up: bool
397
-
398
- class Config:
399
- arbitrary_types_allowed = True
400
-
401
-
402
397
  class JobStatusCondition(BaseModel):
403
398
  type: str
404
399
 
@@ -424,18 +419,12 @@ def _populate_resources(
424
419
  user_connection: DatabaseConnectionParameters,
425
420
  admin_connection: DatabaseConnectionParameters,
426
421
  current_db_access: DatabaseAccessV1 | None = None,
427
- ) -> list[DBAMResource]:
422
+ ) -> list[OpenshiftResource]:
428
423
  if user_connection.database == admin_connection.database:
429
424
  raise ValueError(f"Can not use default database {admin_connection.database}")
430
425
 
431
- managed_resources: list[DBAMResource] = []
432
426
  # create service account
433
- managed_resources.append(
434
- DBAMResource(
435
- resource=get_service_account_spec(resource_prefix),
436
- clean_up=True,
437
- )
438
- )
427
+ service_account_resource = get_service_account_spec(resource_prefix)
439
428
 
440
429
  # create script secret
441
430
  generator = PSQLScriptGenerator(
@@ -446,74 +435,107 @@ def _populate_resources(
446
435
  engine=engine,
447
436
  )
448
437
  script_secret_name = f"{resource_prefix}-script"
449
- managed_resources.extend([
450
- DBAMResource(
451
- resource=generate_script_secret_spec(
452
- script_secret_name,
453
- generator.generate_script(),
454
- ),
455
- clean_up=True,
456
- ),
457
- # create user secret
458
- DBAMResource(
459
- resource=generate_user_secret_spec(resource_prefix, user_connection),
460
- clean_up=False,
461
- ),
462
- ])
438
+ secret_script_resource = generate_script_secret_spec(
439
+ script_secret_name,
440
+ generator.generate_script(),
441
+ )
442
+
443
+ # create user secret
444
+ user_secret_resource = generate_user_secret_spec(
445
+ resource_prefix,
446
+ user_connection,
447
+ )
448
+
463
449
  # create pull secret
464
- labels = pull_secret["labels"] or {}
465
- pull_secret_resources = orb.fetch_provider_vault_secret(
450
+ pull_secret_resource = orb.fetch_provider_vault_secret(
466
451
  path=pull_secret["path"],
467
452
  version=pull_secret["version"],
468
453
  name=f"{resource_prefix}-pull-secret",
469
- labels=labels,
454
+ labels=pull_secret["labels"] or {},
470
455
  annotations=pull_secret["annotations"] or {},
471
456
  type=pull_secret["type"],
472
457
  integration=QONTRACT_INTEGRATION,
473
458
  integration_version=QONTRACT_INTEGRATION_VERSION,
474
459
  settings=settings,
475
460
  )
476
- managed_resources.extend([
477
- DBAMResource(resource=pull_secret_resources, clean_up=True),
478
- # create job
479
- DBAMResource(
480
- resource=get_job_spec(
481
- JobData(
482
- engine=engine,
483
- name_suffix=db_access.name,
484
- image=job_image,
485
- service_account_name=resource_prefix,
486
- rds_admin_secret_name=admin_secret_name,
487
- script_secret_name=script_secret_name,
488
- pull_secret=f"{resource_prefix}-pull-secret",
489
- )
490
- ),
491
- clean_up=True,
492
- ),
493
- ])
494
461
 
495
- return managed_resources
462
+ job_resource = get_job_spec(
463
+ JobData(
464
+ engine=engine,
465
+ name=resource_prefix,
466
+ image=job_image,
467
+ service_account_name=resource_prefix,
468
+ rds_admin_secret_name=admin_secret_name,
469
+ script_secret_name=script_secret_name,
470
+ pull_secret=f"{resource_prefix}-pull-secret",
471
+ )
472
+ )
473
+
474
+ return [
475
+ service_account_resource,
476
+ secret_script_resource,
477
+ user_secret_resource,
478
+ pull_secret_resource,
479
+ job_resource,
480
+ ]
496
481
 
497
482
 
498
483
  def _generate_password() -> str:
499
484
  return "".join(choices(ascii_letters + digits, k=32))
500
485
 
501
486
 
502
- class _DBDonnections(TypedDict):
487
+ class DBConnections(TypedDict):
503
488
  user: DatabaseConnectionParameters
504
489
  admin: DatabaseConnectionParameters
505
490
 
506
491
 
492
+ def _decode_secret_value(value: str) -> str:
493
+ return base64.b64decode(value).decode("utf-8")
494
+
495
+
496
+ def _build_user_connection(
497
+ db_access: DatabaseAccessV1,
498
+ admin_secret: dict[str, Any],
499
+ user_secret: dict[str, Any],
500
+ ) -> DatabaseConnectionParameters:
501
+ """
502
+ Build user connection parameters.
503
+
504
+ If user secret exists, use values from it as that's the one used to provision new database user.
505
+ Otherwise, generate a new password, this info will be saved as cluster secret.
506
+ After job completes, the secret will be saved to vault then deleted from the cluster.
507
+
508
+ Args:
509
+ db_access (DatabaseAccessV1): Database access definition from app-interface.
510
+ admin_secret (dict[str, Any]): Admin secret fetched from the cluster.
511
+ user_secret (dict[str, Any]): User secret fetched from the cluster, may be empty if not exists.
512
+ Returns:
513
+ DatabaseConnectionParameters: Connection parameters for the database user.
514
+ """
515
+ if user_secret:
516
+ return DatabaseConnectionParameters(
517
+ host=_decode_secret_value(user_secret["data"]["db.host"]),
518
+ port=_decode_secret_value(user_secret["data"]["db.port"]),
519
+ user=_decode_secret_value(user_secret["data"]["db.user"]),
520
+ password=_decode_secret_value(user_secret["data"]["db.password"]),
521
+ database=_decode_secret_value(user_secret["data"]["db.name"]),
522
+ )
523
+ return DatabaseConnectionParameters(
524
+ host=_decode_secret_value(admin_secret["data"]["db.host"]),
525
+ port=_decode_secret_value(admin_secret["data"]["db.port"]),
526
+ user=db_access.username,
527
+ password=_generate_password(),
528
+ database=db_access.database,
529
+ )
530
+
531
+
507
532
  def _create_database_connection_parameter(
508
533
  db_access: DatabaseAccessV1,
509
534
  namespace_name: str,
510
535
  oc: OCClient,
511
536
  admin_secret_name: str,
512
537
  user_secret_name: str,
513
- ) -> _DBDonnections:
514
- def _decode_secret_value(value: str) -> str:
515
- return base64.b64decode(value).decode("utf-8")
516
-
538
+ ) -> DBConnections:
517
539
  user_secret = oc.get(
518
540
  namespace_name,
519
541
  "Secret",
@@ -526,26 +548,11 @@ def _create_database_connection_parameter(
526
548
  admin_secret_name,
527
549
  allow_not_found=False,
528
550
  )
529
-
530
- if user_secret:
531
- password = _decode_secret_value(user_secret["data"]["db.password"])
532
- host = _decode_secret_value(user_secret["data"]["db.host"])
533
- user = _decode_secret_value(user_secret["data"]["db.user"])
534
- port = _decode_secret_value(user_secret["data"]["db.port"])
535
- database = _decode_secret_value(user_secret["data"]["db.name"])
536
- else:
537
- host = _decode_secret_value(admin_secret["data"]["db.host"])
538
- port = _decode_secret_value(admin_secret["data"]["db.port"])
539
- user = db_access.username
540
- password = _generate_password()
541
- database = db_access.database
542
- return _DBDonnections(
543
- user=DatabaseConnectionParameters(
544
- host=host,
545
- port=port,
546
- user=user,
547
- password=password,
548
- database=database,
551
+ return DBConnections(
552
+ user=_build_user_connection(
553
+ db_access=db_access,
554
+ admin_secret=admin_secret,
555
+ user_secret=user_secret,
549
556
  ),
550
557
  admin=DatabaseConnectionParameters(
551
558
  host=_decode_secret_value(admin_secret["data"]["db.host"]),
@@ -557,10 +564,10 @@ def _create_database_connection_parameter(
557
564
  )
558
565
 
559
566
 
560
- def _db_access_acccess_is_valid(db_acces: DatabaseAccessV1) -> bool:
567
+ def _db_access_access_is_valid(db_access: DatabaseAccessV1) -> bool:
561
568
  found_schema: set[str] = set()
562
569
 
563
- for schema in db_acces.access or []:
570
+ for schema in db_access.access or []:
564
571
  if schema.target.dbschema in found_schema:
565
572
  return False
566
573
  found_schema.add(schema.target.dbschema)
@@ -575,6 +582,7 @@ class JobFailedError(Exception):
575
582
  def _process_db_access(
576
583
  dry_run: bool,
577
584
  state: State,
585
+ state_key: str,
578
586
  db_access: DatabaseAccessV1,
579
587
  namespace: NamespaceV1,
580
588
  admin_secret_name: str,
@@ -583,12 +591,12 @@ def _process_db_access(
583
591
  vault_output_path: str,
584
592
  vault_client: VaultClient,
585
593
  ) -> None:
586
- if not _db_access_acccess_is_valid(db_access):
594
+ if not _db_access_access_is_valid(db_access):
587
595
  raise ValueError("Duplicate schema in access list.")
588
596
 
589
597
  current_db_access: DatabaseAccessV1 | None = None
590
- if state.exists(db_access.name):
591
- current_state = state.get(db_access.name)
598
+ if state.exists(state_key):
599
+ current_state = state.get(state_key)
592
600
  if current_state == db_access.dict(by_alias=True):
593
601
  return
594
602
  current_db_access = DatabaseAccessV1(**current_state)
@@ -600,7 +608,7 @@ def _process_db_access(
600
608
  cluster_name = namespace.cluster.name
601
609
  namespace_name = namespace.name
602
610
 
603
- resource_prefix = f"dbam-{db_access.name}"
611
+ resource_prefix = f"dbam-{state_key.replace('/', '-')}"
604
612
  with OC_Map(
605
613
  clusters=[namespace.cluster.dict(by_alias=True)],
606
614
  integration=QONTRACT_INTEGRATION,
@@ -613,7 +621,7 @@ def _process_db_access(
613
621
  namespace_name,
614
622
  oc,
615
623
  admin_secret_name,
616
- resource_prefix,
624
+ user_secret_name=resource_prefix,
617
625
  )
618
626
 
619
627
  sql_query_settings = settings.get("sqlQuery")
@@ -642,7 +650,7 @@ def _process_db_access(
642
650
  job = oc.get(
643
651
  namespace_name,
644
652
  "Job",
645
- f"dbam-{db_access.name}",
653
+ resource_prefix,
646
654
  allow_not_found=True,
647
655
  )
648
656
  if not job:
@@ -652,8 +660,8 @@ def _process_db_access(
652
660
  oc_map=oc_map,
653
661
  cluster=cluster_name,
654
662
  namespace=namespace_name,
655
- resource_type=r.resource.kind,
656
- resource=r.resource,
663
+ resource_type=r.kind,
664
+ resource=r,
657
665
  wait_for_namespace=False,
658
666
  )
659
667
  return
@@ -665,34 +673,31 @@ def _process_db_access(
665
673
  )
666
674
  if job_status.is_complete():
667
675
  if job_status.has_errors():
668
- raise JobFailedError(
669
- f"Job dbam-{db_access.name} failed, please check logs"
670
- )
676
+ raise JobFailedError(f"Job {resource_prefix} failed, please check logs")
671
677
  if not dry_run and not db_access.delete:
672
678
  secret = {
673
- "path": f"{vault_output_path}/{QONTRACT_INTEGRATION}/{cluster_name}/{namespace_name}/{db_access.name}",
679
+ "path": f"{vault_output_path}/{QONTRACT_INTEGRATION}/{state_key}",
674
680
  "data": connections["user"].dict(by_alias=True),
675
681
  }
676
682
  vault_client.write(secret, decode_base64=False)
677
683
  logging.debug("job completed, cleaning up")
678
684
  for r in managed_resources:
679
- if r.clean_up:
680
- openshift_base.delete(
681
- dry_run=dry_run,
682
- oc_map=oc_map,
683
- cluster=cluster_name,
684
- namespace=namespace_name,
685
- resource_type=r.resource.kind,
686
- name=r.resource.name,
687
- enable_deletion=True,
688
- )
685
+ openshift_base.delete(
686
+ dry_run=dry_run,
687
+ oc_map=oc_map,
688
+ cluster=cluster_name,
689
+ namespace=namespace_name,
690
+ resource_type=r.kind,
691
+ name=r.name,
692
+ enable_deletion=True,
693
+ )
689
694
  state.add(
690
- db_access.name,
695
+ state_key,
691
696
  value=db_access.dict(by_alias=True),
692
697
  force=True,
693
698
  )
694
699
  else:
695
- logging.info(f"Job dbam-{db_access.name} appears to be still running")
700
+ logging.info(f"Job {resource_prefix} appears to be still running")
696
701
 
697
702
 
698
703
  class DBAMIntegrationParams(PydanticRunParams):
@@ -734,9 +739,11 @@ class DatabaseAccessManagerIntegration(QontractReconcileIntegration):
734
739
 
735
740
  for db_access in resource.database_access or []:
736
741
  try:
742
+ state_key = f"{external_resource.provisioner.name}/{resource.identifier}/{db_access.name}"
737
743
  _process_db_access(
738
744
  dry_run,
739
745
  state,
746
+ state_key,
740
747
  db_access,
741
748
  namespace,
742
749
  admin_secret_name,
@@ -150,7 +150,10 @@ class EndpointsDiscoveryIntegration(
150
150
  return []
151
151
 
152
152
  routes = defaultdict(list)
153
- for item in oc.get_items(kind="Route", namespace=namespace.name):
153
+ for item in oc.get_items(
154
+ kind="Route.route.openshift.io",
155
+ namespace=namespace.name,
156
+ ):
154
157
  tls = bool(item["spec"].get("tls"))
155
158
  host = item["spec"]["host"]
156
159
  # group all routes with the same hostname/tls
@@ -1,8 +1,7 @@
1
1
  import hashlib
2
- import json
3
2
  import logging
4
3
  from collections.abc import Sequence
5
- from typing import Any, TypeAlias
4
+ from typing import Any
6
5
 
7
6
  from gitlab.exceptions import GitlabGetError
8
7
  from pydantic import BaseModel
@@ -16,6 +15,7 @@ from reconcile.endpoints_discovery.merge_request import (
16
15
  Renderer,
17
16
  )
18
17
  from reconcile.utils.gitlab_api import GitLabApi
18
+ from reconcile.utils.json import json_dumps
19
19
  from reconcile.utils.merge_request_manager.merge_request_manager import (
20
20
  MergeRequestManagerBase,
21
21
  )
@@ -64,22 +64,20 @@ class Endpoint(BaseModel):
64
64
 
65
65
  @property
66
66
  def hash(self) -> str:
67
- return hashlib.sha256(
68
- json.dumps(self.dict(), sort_keys=True).encode()
69
- ).hexdigest()
67
+ return hashlib.sha256(json_dumps(self.dict()).encode()).hexdigest()
70
68
 
71
69
 
72
- EndpointsToAdd: TypeAlias = list[Endpoint]
73
- EndpointsToChange: TypeAlias = list[Endpoint]
74
- EndpointsToDelete: TypeAlias = list[Endpoint]
70
+ EndpointsToAdd = list[Endpoint]
71
+ EndpointsToChange = list[Endpoint]
72
+ EndpointsToDelete = list[Endpoint]
75
73
 
76
74
 
77
75
  class App(BaseModel):
78
76
  name: str
79
77
  path: str
80
- endpoints_to_add: EndpointsToAdd = EndpointsToAdd()
81
- endpoints_to_change: EndpointsToChange = EndpointsToChange()
82
- endpoints_to_delete: EndpointsToDelete = EndpointsToDelete()
78
+ endpoints_to_add: EndpointsToAdd = []
79
+ endpoints_to_change: EndpointsToChange = []
80
+ endpoints_to_delete: EndpointsToDelete = []
83
81
 
84
82
  @property
85
83
  def hash(self) -> str:
@@ -2,7 +2,7 @@ from abc import (
2
2
  ABC,
3
3
  abstractmethod,
4
4
  )
5
- from typing import Generic, TypeVar
5
+ from typing import TypeVar
6
6
 
7
7
  from reconcile.external_resources.aws import (
8
8
  AWSDefaultResourceFactory,
@@ -32,16 +32,8 @@ from reconcile.utils.secret_reader import SecretReaderBase
32
32
 
33
33
  T = TypeVar("T")
34
34
 
35
- AWS_DEFAULT_TAGS = [
36
- {
37
- "tags": {
38
- "app": "app-sre-infra",
39
- }
40
- }
41
- ]
42
35
 
43
-
44
- class ObjectFactory(Generic[T]):
36
+ class ObjectFactory[T]:
45
37
  def __init__(
46
38
  self, factories: dict[str, T], default_factory: T | None = None
47
39
  ) -> None:
@@ -123,12 +115,14 @@ class AWSExternalResourceFactory(ExternalResourceFactory):
123
115
  secret_reader: SecretReaderBase,
124
116
  provision_factories: ObjectFactory[ModuleProvisionDataFactory],
125
117
  resource_factories: ObjectFactory[AWSResourceFactory],
118
+ default_tags: dict[str, str],
126
119
  ):
127
120
  self.provision_factories = provision_factories
128
121
  self.resource_factories = resource_factories
129
122
  self.module_inventory = module_inventory
130
123
  self.er_inventory = er_inventory
131
124
  self.secret_reader = secret_reader
125
+ self.default_tags = default_tags
132
126
 
133
127
  def create_external_resource(
134
128
  self,
@@ -137,8 +131,7 @@ class AWSExternalResourceFactory(ExternalResourceFactory):
137
131
  ) -> ExternalResource:
138
132
  f = self.resource_factories.get_factory(spec.provider)
139
133
  data = f.resolve(spec, module_conf)
140
- data["tags"] = spec.tags(integration=QONTRACT_INTEGRATION)
141
- data["default_tags"] = AWS_DEFAULT_TAGS
134
+ data["tags"] = self.default_tags | spec.tags(integration=QONTRACT_INTEGRATION)
142
135
 
143
136
  region = data.get("region")
144
137
  if region:
@@ -45,7 +45,7 @@ from reconcile.utils.secret_reader import SecretReaderBase, create_secret_reader
45
45
  def fetch_current_state(
46
46
  ri: ResourceInventory, oc: OCCli, cluster: str, namespace: str
47
47
  ) -> None:
48
- for item in oc.get_items("Job", namespace=namespace):
48
+ for item in oc.get_items("Job.batch", namespace=namespace):
49
49
  r = OpenshiftResource(item, QONTRACT_INTEGRATION, QONTRACT_INTEGRATION_VERSION)
50
50
  ri.add_current(cluster, namespace, "Job", r.name, r)
51
51
 
@@ -1,7 +1,7 @@
1
1
  import logging
2
2
  from collections import Counter
3
3
  from collections.abc import Iterable
4
- from datetime import UTC, datetime
4
+ from typing import cast
5
5
 
6
6
  from sretoolbox.utils import threaded
7
7
 
@@ -41,6 +41,7 @@ from reconcile.external_resources.state import (
41
41
  from reconcile.gql_definitions.external_resources.external_resources_settings import (
42
42
  ExternalResourcesSettingsV1,
43
43
  )
44
+ from reconcile.utils.datetime_util import utc_now
44
45
  from reconcile.utils.external_resource_spec import (
45
46
  ExternalResourceSpec,
46
47
  )
@@ -67,6 +68,7 @@ def setup_factories(
67
68
  resource_factories=setup_aws_resource_factories(
68
69
  er_inventory, secret_reader
69
70
  ),
71
+ default_tags=cast("dict[str, str]", settings.default_tags),
70
72
  )
71
73
  }
72
74
  )
@@ -141,7 +143,7 @@ class ExternalResourcesManager:
141
143
  def _resource_drift_detection_ttl_expired(
142
144
  self, reconciliation: Reconciliation, state: ExternalResourceState
143
145
  ) -> bool:
144
- return (datetime.now(state.ts.tzinfo) - state.ts).total_seconds() > (
146
+ return (utc_now() - state.ts).total_seconds() > (
145
147
  reconciliation.module_configuration.reconcile_drift_interval_minutes * 60
146
148
  )
147
149
 
@@ -355,7 +357,7 @@ class ExternalResourcesManager:
355
357
  def _set_resource_reconciliation_in_progress(
356
358
  self, r: Reconciliation, state: ExternalResourceState
357
359
  ) -> None:
358
- state.ts = datetime.now(UTC)
360
+ state.ts = utc_now()
359
361
  if r.action == Action.APPLY:
360
362
  state.resource_status = ResourceStatus.IN_PROGRESS
361
363
  elif r.action == Action.DESTROY:
@@ -10,7 +10,6 @@ SECRET_ANN_PROVISIONER = SECRET_ANN_PREFIX + "/provisioner_name"
10
10
  SECRET_ANN_PROVIDER = SECRET_ANN_PREFIX + "/provider"
11
11
  SECRET_ANN_IDENTIFIER = SECRET_ANN_PREFIX + "/identifier"
12
12
  SECRET_UPDATED_AT = SECRET_ANN_PREFIX + "/updated_at"
13
- SECRET_UPDATED_AT_TIMEFORMAT = "%Y-%m-%dT%H:%M:%SZ"
14
13
 
15
14
  FLAG_RESOURCE_MANAGED_BY_ERV2 = "managed_by_erv2"
16
15
  FLAG_DELETE_RESOURCE = "delete"
@@ -1,5 +1,4 @@
1
1
  import hashlib
2
- import json
3
2
  from abc import (
4
3
  ABC,
5
4
  )
@@ -23,6 +22,7 @@ from reconcile.gql_definitions.external_resources.external_resources_namespaces
23
22
  NamespaceTerraformResourceElastiCacheV1,
24
23
  NamespaceTerraformResourceKMSV1,
25
24
  NamespaceTerraformResourceMskV1,
25
+ NamespaceTerraformResourceRDSProxyV1,
26
26
  NamespaceTerraformResourceRDSV1,
27
27
  NamespaceV1,
28
28
  )
@@ -37,6 +37,7 @@ from reconcile.utils.exceptions import FetchResourceError
37
37
  from reconcile.utils.external_resource_spec import (
38
38
  ExternalResourceSpec,
39
39
  )
40
+ from reconcile.utils.json import json_dumps
40
41
 
41
42
 
42
43
  class ExternalResourceOrphanedResourcesError(Exception):
@@ -88,9 +89,7 @@ class ExternalResourceKey(BaseModel, frozen=True):
88
89
  )
89
90
 
90
91
  def hash(self) -> str:
91
- return hashlib.md5(
92
- json.dumps(self.dict(), sort_keys=True).encode("utf-8")
93
- ).hexdigest()
92
+ return hashlib.md5(json_dumps(self.dict()).encode("utf-8")).hexdigest()
94
93
 
95
94
  @property
96
95
  def state_path(self) -> str:
@@ -104,6 +103,7 @@ SUPPORTED_RESOURCE_TYPES = (
104
103
  | NamespaceTerraformResourceElastiCacheV1
105
104
  | NamespaceTerraformResourceKMSV1
106
105
  | NamespaceTerraformResourceCloudWatchV1
106
+ | NamespaceTerraformResourceRDSProxyV1
107
107
  )
108
108
 
109
109
 
@@ -115,7 +115,9 @@ class ExternalResourcesInventory(MutableMapping):
115
115
  (rp, ns)
116
116
  for ns in namespaces
117
117
  for rp in ns.external_resources or []
118
- if isinstance(rp, SUPPORTED_RESOURCE_PROVIDERS) and rp.resources
118
+ if isinstance(rp, SUPPORTED_RESOURCE_PROVIDERS)
119
+ and rp.resources
120
+ and ns.managed_external_resources
119
121
  ]
120
122
 
121
123
  desired_specs = [
@@ -144,9 +146,9 @@ class ExternalResourcesInventory(MutableMapping):
144
146
  MODULE_OVERRIDES,
145
147
  }
146
148
  ),
147
- namespace=namespace.dict(),
149
+ namespace=namespace.dict(by_alias=True),
148
150
  )
149
- spec.metadata[FLAG_DELETE_RESOURCE] = resource.delete
151
+ spec.metadata[FLAG_DELETE_RESOURCE] = resource.delete or namespace.delete
150
152
  spec.metadata[MODULE_OVERRIDES] = resource.module_overrides
151
153
  return spec
152
154
 
@@ -438,6 +440,4 @@ class ExternalResource(BaseModel):
438
440
  provision: ExternalResourceProvision
439
441
 
440
442
  def hash(self) -> str:
441
- return hashlib.md5(
442
- json.dumps(self.data, sort_keys=True).encode("utf-8")
443
- ).hexdigest()
443
+ return hashlib.md5(json_dumps(self.data).encode("utf-8")).hexdigest()