qontract-reconcile 0.10.1rc805__py3-none-any.whl → 0.10.1rc807__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: qontract-reconcile
3
- Version: 0.10.1rc805
3
+ Version: 0.10.1rc807
4
4
  Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
5
5
  Home-page: https://github.com/app-sre/qontract-reconcile
6
6
  Author: Red Hat App-SRE Team
@@ -10,7 +10,7 @@ reconcile/aws_iam_password_reset.py,sha256=NwErtrqgBiXr7eGCAHdtGGOx0S7-4JnSc29Ie
10
10
  reconcile/aws_support_cases_sos.py,sha256=Jk6_XjDeJSYxgRGqcEAOcynt9qJF2r5HPIPcSKmoBv8,2974
11
11
  reconcile/blackbox_exporter_endpoint_monitoring.py,sha256=W_VJagnsJR1v5oqjlI3RJJE0_nhtJ0m81RS8zWA5u5c,3538
12
12
  reconcile/checkpoint.py,sha256=R2WFXUXLTB4sWMi4GeA4eegsuf_1-Q4vH8M0Toh3Ij4,5036
13
- reconcile/cli.py,sha256=pBX9_8GU6g4m8Y3-KeX8wOCv9datyVqxvbkXTmgdvNQ,100833
13
+ reconcile/cli.py,sha256=n64KqkdnqF_H2WHoqlSxQ2PP0uYuY9THPGeyiLP9zJA,101154
14
14
  reconcile/closedbox_endpoint_monitoring_base.py,sha256=SMhkcQqprWvThrIJa3U_3uh5w1h-alleW1QnCJFY4Qw,4909
15
15
  reconcile/cluster_deployment_mapper.py,sha256=2Ah-nu-Mdig0pjuiZl_XLrmVAjYzFjORR3dMlCgkmw0,2352
16
16
  reconcile/dashdotdb_base.py,sha256=a5aPLVxyqPSbjdB0Ty-uliOtxwvEbbEljHJKxdK3-Zk,4813
@@ -104,7 +104,7 @@ reconcile/sendgrid_teammates.py,sha256=oO8QbLb4s1o8A6CGiCagN9CmS05BSS_WLztuY0Ym9
104
104
  reconcile/service_dependencies.py,sha256=PMKP9vc6oL-78rzyF_RE8DzLSQMSqN8vCqt9sWpBLAM,4470
105
105
  reconcile/signalfx_endpoint_monitoring.py,sha256=D1m8iq0EAKie0OD59FOcVCtpWWZ7xlo6lwBS9urwMIk,2894
106
106
  reconcile/slack_base.py,sha256=K3fSYx46G1djoPb07_C9j6ChhMCt5LgV5l6v2TFkNZk,3479
107
- reconcile/slack_usergroups.py,sha256=sp1QSPRUgTj3-hXpAWx1qOeR6okmVmlHcTUvUMTOLDI,27855
107
+ reconcile/slack_usergroups.py,sha256=vYecHCeJMZhxy1l0bIahTRN6ja0vxTSTXN2UfOkYXOo,29727
108
108
  reconcile/sql_query.py,sha256=FAQI9EIHsokZBbGwvGU4vnjg1fHemxpYQE20UtCB1qo,25941
109
109
  reconcile/status.py,sha256=cY4IJFXemhxptRJqR4qaaOWqei9e4jgLXuVSGajMsjg,544
110
110
  reconcile/status_board.py,sha256=nA74_133jukxVShjPKJpkXOA3vggDTTEhYTegoXbN1M,8632
@@ -790,15 +790,22 @@ tools/cli_commands/cost_report/openshift.py,sha256=XNEJpgtIQAi3Eoej1Btnl74IWg3g-
790
790
  tools/cli_commands/cost_report/response.py,sha256=_kbpBSjMjbPXGkjDgidTOLvFpVqfBo3VMkSOheUdmMA,1308
791
791
  tools/cli_commands/cost_report/util.py,sha256=r4K8nC1S0YZNSUNro141cYG1nuG8HwkkYEqWV9GCvu8,1861
792
792
  tools/cli_commands/cost_report/view.py,sha256=pKM6LDeWZJcTw2a7sWBwKSuR9p3SAk3lEb37uwMhlLw,10183
793
+ tools/saas_metrics_exporter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
794
+ tools/saas_metrics_exporter/main.py,sha256=piocx6meMdJxoxeNz52gQGUjt5n7Fma4kgqYamszPrM,3180
795
+ tools/saas_metrics_exporter/commit_distance/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
796
+ tools/saas_metrics_exporter/commit_distance/channel.py,sha256=XEAh3eL8TmgMe7V2BsyxuXYWgvBBVdSJETd6Ec7cI04,2171
797
+ tools/saas_metrics_exporter/commit_distance/commit_distance.py,sha256=snkcHKS7zxSIomS7psEQ13efN-j9MxKZHe0nLw55dAk,4042
798
+ tools/saas_metrics_exporter/commit_distance/metrics.py,sha256=rQTcinrv3uGLnHFumS37NN3QyVv1z6HGqy8MtfOwcxM,544
793
799
  tools/sre_checkpoints/__init__.py,sha256=CDaDaywJnmRCLyl_NCcvxi-Zc0hTi_3OdwKiFOyS39I,145
794
800
  tools/sre_checkpoints/util.py,sha256=zEDbGr18ZeHNQwW8pUsr2JRjuXIPz--WAGJxZo9sv_Y,894
795
801
  tools/test/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
802
+ tools/test/conftest.py,sha256=BYS85m3oTUxf7gxku14oH0ctlPMS1x1nk69kEJ6G5cc,1022
796
803
  tools/test/test_app_interface_metrics_exporter.py,sha256=SX7qL3D1SIRKFo95FoQztvftCWEEf-g1mfXOtgCog-g,1271
797
804
  tools/test/test_qontract_cli.py,sha256=_D61RFGAN5x44CY1tYbouhlGXXABwYfxKSWSQx3Jrss,4941
798
805
  tools/test/test_sd_app_sre_alert_report.py,sha256=v363r9zM7__0kR5K6mvJoGFcM9BvE33fWAayrqkpojA,2116
799
806
  tools/test/test_sre_checkpoints.py,sha256=SKqPPTl9ua0RFdSSofnoQX-JZE6dFLO3LRhfQzqtfh8,2607
800
- qontract_reconcile-0.10.1rc805.dist-info/METADATA,sha256=hQqEKCmIWG-75Lp4rO5VuV1Utx4ciyWOVz3hcotymnM,2314
801
- qontract_reconcile-0.10.1rc805.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
802
- qontract_reconcile-0.10.1rc805.dist-info/entry_points.txt,sha256=rIxI5zWtHNlfpDeq1a7pZXAPoqf7HG32KMTN3MeWK_8,429
803
- qontract_reconcile-0.10.1rc805.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
804
- qontract_reconcile-0.10.1rc805.dist-info/RECORD,,
807
+ qontract_reconcile-0.10.1rc807.dist-info/METADATA,sha256=2hjumgGnpdb2xNCQjgY_-ihI36Osy4S-xLrUaas-wDk,2314
808
+ qontract_reconcile-0.10.1rc807.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
809
+ qontract_reconcile-0.10.1rc807.dist-info/entry_points.txt,sha256=GKQqCl2j2X1BJQ69een6rHcR26PmnxnONLNOQB-nRjY,491
810
+ qontract_reconcile-0.10.1rc807.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
811
+ qontract_reconcile-0.10.1rc807.dist-info/RECORD,,
@@ -5,4 +5,5 @@ glitchtip-access-reporter = tools.glitchtip_access_reporter:main
5
5
  glitchtip-access-revalidation = tools.glitchtip_access_revalidation:main
6
6
  qontract-cli = tools.qontract_cli:root
7
7
  qontract-reconcile = reconcile.cli:integration
8
+ saas-metrics-exporter = tools.saas_metrics_exporter.main:main
8
9
  template-validation = tools.template_validation:main
reconcile/cli.py CHANGED
@@ -1200,8 +1200,18 @@ def openshift_upgrade_watcher(ctx, thread_pool_size, internal, use_jump_host):
1200
1200
  @integration.command(short_help="Manage Slack User Groups (channels and users).")
1201
1201
  @workspace_name
1202
1202
  @usergroup_name
1203
+ @enable_extended_early_exit
1204
+ @extended_early_exit_cache_ttl_seconds
1205
+ @log_cached_log_output
1203
1206
  @click.pass_context
1204
- def slack_usergroups(ctx, workspace_name, usergroup_name):
1207
+ def slack_usergroups(
1208
+ ctx,
1209
+ workspace_name,
1210
+ usergroup_name,
1211
+ enable_extended_early_exit,
1212
+ extended_early_exit_cache_ttl_seconds,
1213
+ log_cached_log_output,
1214
+ ):
1205
1215
  import reconcile.slack_usergroups
1206
1216
 
1207
1217
  run_integration(
@@ -1209,6 +1219,9 @@ def slack_usergroups(ctx, workspace_name, usergroup_name):
1209
1219
  ctx.obj,
1210
1220
  workspace_name,
1211
1221
  usergroup_name,
1222
+ enable_extended_early_exit,
1223
+ extended_early_exit_cache_ttl_seconds,
1224
+ log_cached_log_output,
1212
1225
  )
1213
1226
 
1214
1227
 
@@ -9,6 +9,7 @@ from typing import (
9
9
  Any,
10
10
  Optional,
11
11
  Sequence,
12
+ TypedDict,
12
13
  Union,
13
14
  )
14
15
  from urllib.parse import urlparse
@@ -47,6 +48,10 @@ from reconcile.utils.exceptions import (
47
48
  AppInterfaceSettingsError,
48
49
  UnknownError,
49
50
  )
51
+ from reconcile.utils.extended_early_exit import (
52
+ ExtendedEarlyExitRunnerResult,
53
+ extended_early_exit_run,
54
+ )
50
55
  from reconcile.utils.github_api import GithubRepositoryApi
51
56
  from reconcile.utils.gitlab_api import GitLabApi
52
57
  from reconcile.utils.pagerduty_api import (
@@ -67,6 +72,8 @@ from reconcile.utils.slack_api import (
67
72
 
68
73
  DATE_FORMAT = "%Y-%m-%d %H:%M"
69
74
  QONTRACT_INTEGRATION = "slack-usergroups"
75
+ INTEGRATION_VERSION = "0.1.0"
76
+
70
77
  error_occurred = False
71
78
 
72
79
 
@@ -511,14 +518,14 @@ def _create_usergroups(
511
518
  desired_ug_state: State,
512
519
  slack_client: SlackApi,
513
520
  dry_run: bool = True,
514
- ) -> None:
521
+ ) -> int:
515
522
  """Create Slack usergroups."""
516
523
  global error_occurred # noqa: PLW0603
517
524
  if current_ug_state:
518
525
  logging.debug(
519
526
  f"[{desired_ug_state.workspace}] Usergroup exists and will not be created {desired_ug_state.usergroup}"
520
527
  )
521
- return
528
+ return 0
522
529
 
523
530
  logging.info([
524
531
  "create_usergroup",
@@ -531,6 +538,7 @@ def _create_usergroups(
531
538
  except SlackApiError as error:
532
539
  logging.error(error)
533
540
  error_occurred = True
541
+ return 1
534
542
 
535
543
 
536
544
  def _update_usergroup_users_from_state(
@@ -538,14 +546,14 @@ def _update_usergroup_users_from_state(
538
546
  desired_ug_state: State,
539
547
  slack_client: SlackApi,
540
548
  dry_run: bool = True,
541
- ) -> None:
549
+ ) -> int:
542
550
  """Update the users in a Slack usergroup."""
543
551
  global error_occurred # noqa: PLW0603
544
552
  if current_ug_state.user_names == desired_ug_state.user_names:
545
553
  logging.debug(
546
554
  f"No usergroup user changes detected for {desired_ug_state.usergroup}"
547
555
  )
548
- return
556
+ return 0
549
557
 
550
558
  slack_user_objects = [
551
559
  SlackObject(pk=pk, name=name)
@@ -559,7 +567,7 @@ def _update_usergroup_users_from_state(
559
567
  f"Following usernames are incorrect for usergroup {desired_ug_state.usergroup} and could not be matched with slack users {desired_ug_state.user_names - set(s.name for s in slack_user_objects)}"
560
568
  )
561
569
  error_occurred = True
562
- return
570
+ return 0
563
571
 
564
572
  for user in desired_ug_state.user_names - current_ug_state.user_names:
565
573
  logging.info([
@@ -584,7 +592,7 @@ def _update_usergroup_users_from_state(
584
592
  logging.info(
585
593
  f"Usergroup {desired_ug_state.usergroup} does not exist yet. Skipping for now."
586
594
  )
587
- return
595
+ return 0
588
596
  slack_client.update_usergroup_users(
589
597
  id=ugid,
590
598
  users_list=sorted([s.pk for s in slack_user_objects]),
@@ -596,6 +604,7 @@ def _update_usergroup_users_from_state(
596
604
  # sensitive updates.
597
605
  logging.error(error)
598
606
  error_occurred = True
607
+ return 1
599
608
 
600
609
 
601
610
  def _update_usergroup_from_state(
@@ -603,9 +612,10 @@ def _update_usergroup_from_state(
603
612
  desired_ug_state: State,
604
613
  slack_client: SlackApi,
605
614
  dry_run: bool = True,
606
- ) -> None:
615
+ ) -> int:
607
616
  """Update a Slack usergroup."""
608
617
  global error_occurred # noqa: PLW0603
618
+ change_detected = False
609
619
  if (
610
620
  current_ug_state.channel_names == desired_ug_state.channel_names
611
621
  and current_ug_state.description == desired_ug_state.description
@@ -613,7 +623,7 @@ def _update_usergroup_from_state(
613
623
  logging.debug(
614
624
  f"No usergroup channel/description changes detected for {desired_ug_state.usergroup}",
615
625
  )
616
- return
626
+ return 0
617
627
 
618
628
  slack_channel_objects = [
619
629
  SlackObject(pk=pk, name=name)
@@ -635,6 +645,7 @@ def _update_usergroup_from_state(
635
645
  # return
636
646
 
637
647
  for channel in desired_channel_names - current_ug_state.channel_names:
648
+ change_detected = True
638
649
  logging.info([
639
650
  "add_channel_to_usergroup",
640
651
  desired_ug_state.workspace,
@@ -643,6 +654,7 @@ def _update_usergroup_from_state(
643
654
  ])
644
655
 
645
656
  for channel in current_ug_state.channel_names - desired_channel_names:
657
+ change_detected = True
646
658
  logging.info([
647
659
  "del_channel_from_usergroup",
648
660
  desired_ug_state.workspace,
@@ -651,6 +663,7 @@ def _update_usergroup_from_state(
651
663
  ])
652
664
 
653
665
  if current_ug_state.description != desired_ug_state.description:
666
+ change_detected = True
654
667
  logging.info([
655
668
  "update_usergroup_description",
656
669
  desired_ug_state.workspace,
@@ -658,14 +671,14 @@ def _update_usergroup_from_state(
658
671
  desired_ug_state.description,
659
672
  ])
660
673
 
661
- if not dry_run:
674
+ if not dry_run and change_detected:
662
675
  try:
663
676
  ugid = slack_client.get_usergroup_id(desired_ug_state.usergroup)
664
677
  if not ugid:
665
678
  logging.info(
666
679
  f"Usergroup {desired_ug_state.usergroup} does not exist yet. Skipping for now."
667
680
  )
668
- return
681
+ return 0
669
682
  slack_client.update_usergroup(
670
683
  id=ugid,
671
684
  channels_list=sorted(s.pk for s in slack_channel_objects),
@@ -674,6 +687,8 @@ def _update_usergroup_from_state(
674
687
  except SlackApiError as error:
675
688
  logging.error(error)
676
689
  error_occurred = True
690
+ return 1
691
+ return 0
677
692
 
678
693
 
679
694
  def act(
@@ -681,35 +696,37 @@ def act(
681
696
  desired_state: SlackState,
682
697
  slack_map: SlackMap,
683
698
  dry_run: bool = True,
684
- ) -> None:
699
+ ) -> int:
685
700
  """Reconcile the differences between the desired and current state for
686
701
  Slack usergroups."""
702
+ apply_count = 0
687
703
  for workspace, desired_ws_state in desired_state.items():
688
704
  for usergroup, desired_ug_state in desired_ws_state.items():
689
705
  current_ug_state: State = current_state.get(workspace, {}).get(
690
706
  usergroup, State()
691
707
  )
692
708
 
693
- _create_usergroups(
709
+ apply_count += _create_usergroups(
694
710
  current_ug_state,
695
711
  desired_ug_state,
696
712
  slack_client=slack_map[workspace].slack,
697
713
  dry_run=dry_run,
698
714
  )
699
715
 
700
- _update_usergroup_users_from_state(
716
+ apply_count += _update_usergroup_users_from_state(
701
717
  current_ug_state,
702
718
  desired_ug_state,
703
719
  slack_client=slack_map[workspace].slack,
704
720
  dry_run=dry_run,
705
721
  )
706
722
 
707
- _update_usergroup_from_state(
723
+ apply_count += _update_usergroup_from_state(
708
724
  current_ug_state,
709
725
  desired_ug_state,
710
726
  slack_client=slack_map[workspace].slack,
711
727
  dry_run=dry_run,
712
728
  )
729
+ return apply_count
713
730
 
714
731
 
715
732
  def get_permissions(query_func: Callable) -> list[PermissionSlackUsergroupV1]:
@@ -731,10 +748,22 @@ def get_clusters(query_func: Callable) -> list[ClusterV1]:
731
748
  return clusters_query(query_func=query_func).clusters or []
732
749
 
733
750
 
751
+ class RunnerParams(TypedDict):
752
+ dry_run: bool
753
+ slack_map: SlackMap
754
+ desired_state: SlackState
755
+ clusters: list[ClusterV1]
756
+ workspace_name: Optional[str]
757
+ usergroup_name: Optional[str]
758
+
759
+
734
760
  def run(
735
761
  dry_run: bool,
736
762
  workspace_name: Optional[str] = None,
737
763
  usergroup_name: Optional[str] = None,
764
+ enable_extended_early_exit: bool = False,
765
+ extended_early_exit_cache_ttl_seconds: int = 3600,
766
+ log_cached_log_output: bool = False,
738
767
  ) -> None:
739
768
  global error_occurred # noqa: PLW0603
740
769
  error_occurred = False
@@ -777,6 +806,45 @@ def run(
777
806
  # merge the two desired states recursively
778
807
  desired_state = deep_update(desired_state, desired_state_cluster_usergroups)
779
808
 
809
+ runner_params: RunnerParams = dict(
810
+ dry_run=dry_run,
811
+ slack_map=slack_map,
812
+ desired_state=desired_state,
813
+ clusters=clusters,
814
+ workspace_name=workspace_name,
815
+ usergroup_name=usergroup_name,
816
+ )
817
+
818
+ if enable_extended_early_exit:
819
+ extended_early_exit_run(
820
+ QONTRACT_INTEGRATION,
821
+ INTEGRATION_VERSION,
822
+ dry_run,
823
+ desired_state,
824
+ "",
825
+ extended_early_exit_cache_ttl_seconds,
826
+ logging.getLogger(),
827
+ runner,
828
+ runner_params=runner_params,
829
+ log_cached_log_output=log_cached_log_output,
830
+ secret_reader=secret_reader,
831
+ )
832
+ else:
833
+ runner(**runner_params)
834
+
835
+ if error_occurred:
836
+ logging.error("Error(s) occurred.")
837
+ sys.exit(1)
838
+
839
+
840
+ def runner(
841
+ dry_run: bool,
842
+ slack_map: SlackMap,
843
+ desired_state: SlackState,
844
+ clusters: list[ClusterV1],
845
+ workspace_name: Optional[str] = None,
846
+ usergroup_name: Optional[str] = None,
847
+ ) -> ExtendedEarlyExitRunnerResult:
780
848
  current_state = get_current_state(
781
849
  slack_map=slack_map,
782
850
  desired_workspace_name=workspace_name,
@@ -787,15 +855,13 @@ def run(
787
855
  if integration_is_enabled(QONTRACT_INTEGRATION, cluster)
788
856
  ],
789
857
  )
790
- act(
858
+ apply_count = act(
791
859
  current_state=current_state,
792
860
  desired_state=desired_state,
793
861
  slack_map=slack_map,
794
862
  dry_run=dry_run,
795
863
  )
796
- if error_occurred:
797
- logging.error("Error(s) occurred.")
798
- sys.exit(1)
864
+ return ExtendedEarlyExitRunnerResult(payload={}, applied_count=apply_count)
799
865
 
800
866
 
801
867
  def early_exit_desired_state(*args: Any, **kwargs: Any) -> dict[str, Any]:
File without changes
@@ -0,0 +1,63 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Iterable
4
+ from dataclasses import dataclass
5
+
6
+ from reconcile.typed_queries.saas_files import SaasFile
7
+ from reconcile.utils.secret_reader import HasSecret
8
+
9
+
10
+ @dataclass
11
+ class SaasTarget:
12
+ app_name: str
13
+ repo_url: str
14
+ namespace_name: str
15
+ target_name: str
16
+ ref: str
17
+ auth_code: HasSecret | None
18
+
19
+
20
+ @dataclass
21
+ class Channel:
22
+ name: str
23
+ subscribers: list[SaasTarget]
24
+ publishers: list[SaasTarget]
25
+
26
+
27
+ def build_channels(saas_files: Iterable[SaasFile]) -> list[Channel]:
28
+ channels: dict[str, Channel] = {}
29
+ for saas_file in saas_files:
30
+ for resource_template in saas_file.resource_templates:
31
+ for target in resource_template.targets:
32
+ if not target.promotion:
33
+ continue
34
+ if not (target.promotion.publish or target.promotion.subscribe):
35
+ continue
36
+ auth_code = (
37
+ saas_file.authentication.code if saas_file.authentication else None
38
+ )
39
+ target_name = target.name if target.name else "NoName"
40
+ saas_target = SaasTarget(
41
+ app_name=saas_file.app.name,
42
+ repo_url=resource_template.url,
43
+ ref=target.ref,
44
+ auth_code=auth_code,
45
+ namespace_name=target.namespace.name,
46
+ target_name=target_name,
47
+ )
48
+
49
+ for channel in target.promotion.publish or []:
50
+ if channel not in channels:
51
+ channels[channel] = Channel(
52
+ name=channel, subscribers=[], publishers=[]
53
+ )
54
+ channels[channel].publishers.append(saas_target)
55
+
56
+ for channel in target.promotion.subscribe or []:
57
+ if channel not in channels:
58
+ channels[channel] = Channel(
59
+ name=channel, subscribers=[], publishers=[]
60
+ )
61
+ channels[channel].subscribers.append(saas_target)
62
+
63
+ return list(channels.values())
@@ -0,0 +1,130 @@
1
+ from collections.abc import Iterable
2
+ from dataclasses import dataclass
3
+
4
+ from sretoolbox.utils import threaded
5
+
6
+ from reconcile.typed_queries.saas_files import SaasFile
7
+ from reconcile.utils.secret_reader import HasSecret
8
+ from reconcile.utils.vcs import VCS
9
+ from tools.saas_metrics_exporter.commit_distance.channel import (
10
+ Channel,
11
+ SaasTarget,
12
+ build_channels,
13
+ )
14
+ from tools.saas_metrics_exporter.commit_distance.metrics import SaasCommitDistanceGauge
15
+
16
+
17
+ @dataclass
18
+ class Distance:
19
+ publisher: SaasTarget
20
+ subscriber: SaasTarget
21
+ channel: Channel
22
+ distance: int = 0
23
+
24
+
25
+ @dataclass
26
+ class CommitDistanceMetric:
27
+ value: float
28
+ metric: SaasCommitDistanceGauge
29
+
30
+
31
+ @dataclass
32
+ class ThreadData:
33
+ repo_url: str
34
+ auth_code: HasSecret | None
35
+ ref_from: str
36
+ ref_to: str
37
+ distance: int = 0
38
+
39
+ def __hash__(self) -> int:
40
+ return hash((self.repo_url, self.ref_from, self.ref_to))
41
+
42
+
43
+ class CommitDistanceFetcher:
44
+ def __init__(self, vcs: VCS):
45
+ self._vcs = vcs
46
+
47
+ def _data_key(self, repo_url: str, ref_from: str, ref_to: str) -> str:
48
+ return f"{repo_url}/{ref_from}/{ref_to}"
49
+
50
+ def _calculate_commit_distance(self, data: ThreadData) -> None:
51
+ if data.ref_from == data.ref_to:
52
+ data.distance = 0
53
+ return
54
+
55
+ commits = self._vcs.get_commits_between(
56
+ repo_url=data.repo_url,
57
+ auth_code=data.auth_code,
58
+ commit_from=data.ref_from,
59
+ commit_to=data.ref_to,
60
+ )
61
+ data.distance = len(commits)
62
+
63
+ def _populate_distances(
64
+ self, distances: Iterable[Distance], thread_data: Iterable[ThreadData]
65
+ ) -> None:
66
+ m = {
67
+ self._data_key(
68
+ repo_url=d.repo_url, ref_from=d.ref_from, ref_to=d.ref_to
69
+ ): d.distance
70
+ for d in thread_data
71
+ }
72
+ for distance in distances:
73
+ distance.distance = m[
74
+ self._data_key(
75
+ repo_url=distance.publisher.repo_url,
76
+ ref_from=distance.subscriber.ref,
77
+ ref_to=distance.publisher.ref,
78
+ )
79
+ ]
80
+
81
+ def fetch(
82
+ self, saas_files: Iterable[SaasFile], thread_pool_size: int
83
+ ) -> list[CommitDistanceMetric]:
84
+ channels = build_channels(saas_files=saas_files)
85
+ distances: list[Distance] = []
86
+ thread_data: set[ThreadData] = set()
87
+
88
+ for channel in channels:
89
+ for subscriber in channel.subscribers:
90
+ for publisher in channel.publishers:
91
+ thread_data.add(
92
+ ThreadData(
93
+ repo_url=publisher.repo_url,
94
+ auth_code=publisher.auth_code,
95
+ ref_from=subscriber.ref,
96
+ ref_to=publisher.ref,
97
+ )
98
+ )
99
+ distances.append(
100
+ Distance(
101
+ publisher=publisher,
102
+ subscriber=subscriber,
103
+ channel=channel,
104
+ )
105
+ )
106
+
107
+ threaded.run(
108
+ self._calculate_commit_distance,
109
+ thread_data,
110
+ thread_pool_size=thread_pool_size,
111
+ )
112
+
113
+ self._populate_distances(distances=distances, thread_data=thread_data)
114
+
115
+ commit_distance_metrics = [
116
+ CommitDistanceMetric(
117
+ value=float(distance.distance),
118
+ metric=SaasCommitDistanceGauge(
119
+ channel=distance.channel.name,
120
+ app=distance.publisher.app_name,
121
+ publisher=distance.publisher.target_name,
122
+ publisher_namespace=distance.publisher.namespace_name,
123
+ subscriber=distance.subscriber.target_name,
124
+ subscriber_namespace=distance.subscriber.namespace_name,
125
+ ),
126
+ )
127
+ for distance in distances
128
+ ]
129
+
130
+ return commit_distance_metrics
@@ -0,0 +1,26 @@
1
+ from pydantic import BaseModel
2
+
3
+ from reconcile.utils.metrics import (
4
+ GaugeMetric,
5
+ )
6
+
7
+
8
+ class SaasBaseMetric(BaseModel):
9
+ "Base class for Saas metrics"
10
+
11
+ integration: str = "saas_metrics_exporter"
12
+
13
+
14
+ class SaasCommitDistanceGauge(SaasBaseMetric, GaugeMetric):
15
+ "Gauge for the commit distance between saas targets in a channel"
16
+
17
+ channel: str
18
+ publisher: str
19
+ publisher_namespace: str
20
+ subscriber: str
21
+ subscriber_namespace: str
22
+ app: str
23
+
24
+ @classmethod
25
+ def name(cls) -> str:
26
+ return "commit_distance"
@@ -0,0 +1,99 @@
1
+ from __future__ import annotations
2
+
3
+ from collections.abc import Callable
4
+
5
+ import click
6
+
7
+ from reconcile.cli import (
8
+ config_file,
9
+ dry_run,
10
+ log_level,
11
+ )
12
+ from reconcile.typed_queries.app_interface_repo_url import get_app_interface_repo_url
13
+ from reconcile.typed_queries.app_interface_vault_settings import (
14
+ get_app_interface_vault_settings,
15
+ )
16
+ from reconcile.typed_queries.github_orgs import get_github_orgs
17
+ from reconcile.typed_queries.gitlab_instances import get_gitlab_instances
18
+ from reconcile.typed_queries.saas_files import get_saas_files
19
+ from reconcile.utils import metrics
20
+ from reconcile.utils.defer import defer
21
+ from reconcile.utils.runtime.environment import init_env
22
+ from reconcile.utils.secret_reader import create_secret_reader
23
+ from reconcile.utils.vcs import VCS
24
+ from tools.saas_metrics_exporter.commit_distance.commit_distance import (
25
+ CommitDistanceFetcher,
26
+ )
27
+
28
+
29
+ class SaasMetricsExporter:
30
+ """
31
+ This tool is responsible for exposing/exporting saas metrics and data.
32
+
33
+ Note, that by design we store metrics exporters as a tool in the tools directory.
34
+ """
35
+
36
+ def __init__(self, vcs: VCS, dry_run: bool) -> None:
37
+ self._vcs = vcs
38
+ self._dry_run = dry_run
39
+
40
+ @staticmethod
41
+ def create(dry_run: bool) -> SaasMetricsExporter:
42
+ vault_settings = get_app_interface_vault_settings()
43
+ secret_reader = create_secret_reader(use_vault=vault_settings.vault)
44
+ vcs = VCS(
45
+ secret_reader=secret_reader,
46
+ github_orgs=get_github_orgs(),
47
+ gitlab_instances=get_gitlab_instances(),
48
+ app_interface_repo_url=get_app_interface_repo_url(),
49
+ dry_run=dry_run,
50
+ )
51
+ return SaasMetricsExporter(vcs=vcs, dry_run=dry_run)
52
+
53
+ @defer
54
+ def run(
55
+ self,
56
+ env_name: str | None,
57
+ app_name: str | None,
58
+ thread_pool_size: int,
59
+ defer: Callable | None = None,
60
+ ) -> None:
61
+ saas_files = get_saas_files(env_name=env_name, app_name=app_name)
62
+ if defer:
63
+ defer(self._vcs.cleanup)
64
+
65
+ commit_distance_fetcher = CommitDistanceFetcher(vcs=self._vcs)
66
+ commit_distance_metrics = commit_distance_fetcher.fetch(
67
+ saas_files=saas_files, thread_pool_size=thread_pool_size
68
+ )
69
+ for m in commit_distance_metrics:
70
+ metrics.set_gauge(
71
+ metric=m.metric,
72
+ value=m.value,
73
+ )
74
+
75
+
76
+ @click.command()
77
+ @click.option("--env-name", default=None, help="environment to filter saas files by")
78
+ @click.option("--app-name", default=None, help="app to filter saas files by")
79
+ @click.option("--thread-pool-size", default=1, help="threadpool size")
80
+ @dry_run
81
+ @config_file
82
+ @log_level
83
+ def main(
84
+ env_name: str | None,
85
+ app_name: str | None,
86
+ dry_run: bool,
87
+ thread_pool_size: int,
88
+ configfile: str,
89
+ log_level: str | None,
90
+ ) -> None:
91
+ init_env(log_level=log_level, config_file=configfile)
92
+ exporter = SaasMetricsExporter.create(dry_run=dry_run)
93
+ exporter.run(
94
+ env_name=env_name, app_name=app_name, thread_pool_size=thread_pool_size
95
+ )
96
+
97
+
98
+ if __name__ == "__main__":
99
+ main() # pylint: disable=no-value-for-parameter
tools/test/conftest.py ADDED
@@ -0,0 +1,40 @@
1
+ from collections.abc import (
2
+ Callable,
3
+ MutableMapping,
4
+ )
5
+ from typing import (
6
+ Any,
7
+ Optional,
8
+ )
9
+
10
+ import pytest
11
+ from pydantic import BaseModel
12
+ from pydantic.error_wrappers import ValidationError
13
+
14
+ from reconcile.utils.models import data_default_none
15
+
16
+
17
+ class GQLClassFactoryError(Exception):
18
+ pass
19
+
20
+
21
+ @pytest.fixture
22
+ def gql_class_factory() -> (
23
+ Callable[
24
+ [type[BaseModel], Optional[MutableMapping[str, Any]]],
25
+ BaseModel,
26
+ ]
27
+ ):
28
+ """Create a GQL class from a fixture and set default values to None."""
29
+
30
+ def _gql_class_factory(
31
+ klass: type[BaseModel], data: Optional[MutableMapping[str, Any]] = None
32
+ ) -> BaseModel:
33
+ try:
34
+ return klass(**data_default_none(klass, data or {}))
35
+ except ValidationError as e:
36
+ msg = "[gql_class_factory] Your given data does not match the class ...\n"
37
+ msg += "\n".join([str(m) for m in list(e.raw_errors)])
38
+ raise GQLClassFactoryError(msg) from e
39
+
40
+ return _gql_class_factory