tgwrap 0.8.12__py3-none-any.whl → 0.11.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tgwrap/analyze.py +62 -18
- tgwrap/cli.py +117 -25
- tgwrap/deploy.py +10 -3
- tgwrap/inspector-resources-template.yml +63 -0
- tgwrap/inspector.py +438 -0
- tgwrap/main.py +583 -126
- tgwrap/printer.py +3 -0
- {tgwrap-0.8.12.dist-info → tgwrap-0.11.2.dist-info}/METADATA +163 -6
- tgwrap-0.11.2.dist-info/RECORD +13 -0
- {tgwrap-0.8.12.dist-info → tgwrap-0.11.2.dist-info}/WHEEL +1 -1
- tgwrap-0.8.12.dist-info/RECORD +0 -11
- {tgwrap-0.8.12.dist-info → tgwrap-0.11.2.dist-info}/LICENSE +0 -0
- {tgwrap-0.8.12.dist-info → tgwrap-0.11.2.dist-info}/entry_points.txt +0 -0
tgwrap/main.py
CHANGED
@@ -17,6 +17,7 @@ import sys
|
|
17
17
|
import subprocess
|
18
18
|
import shlex
|
19
19
|
import shutil
|
20
|
+
import requests
|
20
21
|
import re
|
21
22
|
import tempfile
|
22
23
|
import json
|
@@ -24,6 +25,7 @@ import yaml
|
|
24
25
|
import threading
|
25
26
|
import queue
|
26
27
|
import multiprocessing
|
28
|
+
import traceback
|
27
29
|
import click
|
28
30
|
import networkx as nx
|
29
31
|
import hcl2
|
@@ -34,13 +36,14 @@ from datetime import datetime, timezone
|
|
34
36
|
from .printer import Printer
|
35
37
|
from .analyze import run_analyze
|
36
38
|
from .deploy import prepare_deploy_config, run_sync
|
39
|
+
from .inspector import AzureInspector
|
40
|
+
|
37
41
|
class DateTimeEncoder(json.JSONEncoder):
|
38
42
|
def default(self, obj):
|
39
43
|
if isinstance(obj, datetime):
|
40
44
|
return obj.isoformat()
|
41
45
|
return super().default(obj)
|
42
46
|
|
43
|
-
|
44
47
|
class TgWrap():
|
45
48
|
"""
|
46
49
|
A wrapper around terragrunt with the sole purpose to make it a bit
|
@@ -50,7 +53,6 @@ class TgWrap():
|
|
50
53
|
TERRAGRUNT_FILE='terragrunt.hcl'
|
51
54
|
VERSION_FILE="version.hcl"
|
52
55
|
LATEST_VERSION='latest'
|
53
|
-
LOCATE_VERSION_FILE_MAX_LEVELS=3
|
54
56
|
PLANFILE_NAME="planfile"
|
55
57
|
TG_SOURCE_VAR="TERRAGRUNT_SOURCE"
|
56
58
|
TG_SOURCE_MAP_VAR="TERRAGRUNT_SOURCE_MAP"
|
@@ -83,9 +85,13 @@ class TgWrap():
|
|
83
85
|
)
|
84
86
|
self.tg_source_indicator = None
|
85
87
|
|
88
|
+
# terragrunt do now prefer opentofu but we want this to be a conscious decision
|
89
|
+
if not os.environ.get('TERRAGRUNT_TFPATH'):
|
90
|
+
os.environ['TERRAGRUNT_TFPATH'] = 'terraform'
|
91
|
+
|
86
92
|
def load_yaml_file(self, filepath):
|
87
93
|
try:
|
88
|
-
with open(filepath, 'r') as file:
|
94
|
+
with open(filepath.strip(), 'r') as file:
|
89
95
|
return yaml.safe_load(file)
|
90
96
|
except yaml.parser.ParserError as e:
|
91
97
|
self.printer.error(f'Cannot parse YAML file {filepath}, check syntax please!')
|
@@ -102,8 +108,8 @@ class TgWrap():
|
|
102
108
|
'info': '{base_command} terragrunt-info --terragrunt-non-interactive {update} {upgrade} {common}',
|
103
109
|
'plan': '{base_command} {command} --terragrunt-non-interactive -out={planfile_name} {lock_level} {update} {parallelism} {common}',
|
104
110
|
'apply': '{base_command} {command} {non_interactive} {no_auto_approve} {update} {parallelism} {common} {planfile}',
|
105
|
-
'show': '{base_command} {command} --terragrunt-non-interactive {
|
106
|
-
'destroy': '{base_command} {command} {non_interactive} {no_auto_approve} {parallelism} {common} {planfile}',
|
111
|
+
'show': '{base_command} {command} --terragrunt-non-interactive {common} {planfile_name}',
|
112
|
+
'destroy': '{base_command} {command} --terragrunt-no-destroy-dependencies-check {non_interactive} {no_auto_approve} {parallelism} {common} {planfile}',
|
107
113
|
}
|
108
114
|
|
109
115
|
lock_stmt = ''
|
@@ -215,9 +221,12 @@ class TgWrap():
|
|
215
221
|
include_dirs = [dir.lstrip(f'.{os.path.sep}') for dir in include_dirs]
|
216
222
|
exclude_dirs = [dir.lstrip(f'.{os.path.sep}') for dir in exclude_dirs]
|
217
223
|
|
224
|
+
# Below doesn't seem to work, at least when using `analyze`
|
225
|
+
# Not sure it has been added here in the first place
|
226
|
+
|
218
227
|
# if the dir is not ending on '/*', add it
|
219
|
-
include_dirs = [dir.rstrip(f'.{os.path.sep}*') + f'{os.path.sep}*' for dir in include_dirs]
|
220
|
-
exclude_dirs = [dir.rstrip(f'.{os.path.sep}*') + f'{os.path.sep}*' for dir in exclude_dirs]
|
228
|
+
# include_dirs = [dir.rstrip(f'.{os.path.sep}*') + f'{os.path.sep}*' for dir in include_dirs]
|
229
|
+
# exclude_dirs = [dir.rstrip(f'.{os.path.sep}*') + f'{os.path.sep}*' for dir in exclude_dirs]
|
221
230
|
|
222
231
|
common_path = os.path.commonpath([os.path.abspath(working_dir), os.path.abspath(directory)])
|
223
232
|
self.printer.verbose(f'Common path for dir {directory}: {common_path}')
|
@@ -365,7 +374,7 @@ class TgWrap():
|
|
365
374
|
|
366
375
|
return graph
|
367
376
|
|
368
|
-
def _clone_repo(self,
|
377
|
+
def _clone_repo(self, repo, target_dir, version_tag=None):
|
369
378
|
"""Clones the repo, possibly a specific version, into a temp directory"""
|
370
379
|
|
371
380
|
def get_tags(target_dir):
|
@@ -445,9 +454,7 @@ class TgWrap():
|
|
445
454
|
return is_latest, is_branch, is_tag
|
446
455
|
|
447
456
|
# clone the repo
|
448
|
-
repo = manifest['git_repository']
|
449
457
|
self.printer.verbose(f'Clone repo {repo}')
|
450
|
-
|
451
458
|
cmd = f"git clone {repo} {target_dir}"
|
452
459
|
rc = subprocess.run(
|
453
460
|
shlex.split(cmd),
|
@@ -476,7 +483,7 @@ class TgWrap():
|
|
476
483
|
working_dir=target_dir,
|
477
484
|
)
|
478
485
|
|
479
|
-
self.printer.header(f'
|
486
|
+
self.printer.header(f'Fetch repo using reference {version_tag}')
|
480
487
|
|
481
488
|
if is_latest:
|
482
489
|
pass # nothing to do, we already have latest
|
@@ -656,7 +663,7 @@ class TgWrap():
|
|
656
663
|
self, command, exclude_external_dependencies, start_at_step, dry_run,
|
657
664
|
parallel_execution=False, ask_for_confirmation=False, collect_output_file=None,
|
658
665
|
backwards=False, working_dir=None, include_dirs=[], exclude_dirs=[],
|
659
|
-
use_native_terraform=False, add_to_workdir=None,
|
666
|
+
use_native_terraform=False, add_to_workdir=None, continue_on_error=False,
|
660
667
|
):
|
661
668
|
"Runs the desired command in the directories as defined in the directed graph"
|
662
669
|
|
@@ -753,7 +760,7 @@ class TgWrap():
|
|
753
760
|
progress=progress,
|
754
761
|
)
|
755
762
|
|
756
|
-
if stop_processing:
|
763
|
+
if stop_processing and not continue_on_error:
|
757
764
|
self.printer.warning(f"Processing needs to be stopped at step {step_nbr}.")
|
758
765
|
self.printer.normal(
|
759
766
|
f"After you've fixed the problem, you can continue where you left off by adding '--start-at-step {step_nbr}'."
|
@@ -775,10 +782,204 @@ class TgWrap():
|
|
775
782
|
total_items = sum(len(group) for group in groups)
|
776
783
|
self.printer.verbose(f'Executed {group_nbr} groups and {total_items} steps')
|
777
784
|
|
785
|
+
def _get_access_token(self):
|
786
|
+
"""Retrieve an access token"""
|
787
|
+
|
788
|
+
#
|
789
|
+
# Everything we do here, can be done using native python. And probably this is preferable as well.
|
790
|
+
# But I have decided to follow (at least for now) the overall approach of the app and that is
|
791
|
+
# executing systems commands.
|
792
|
+
# This does require the az cli to be installed, but that is a fair assumption if you are working
|
793
|
+
# with terragrunt/terraform and want to post the analyze results to an Azure Data Collection Endpoint.
|
794
|
+
# However, not ruling out this will change, but then the change should be transparant.
|
795
|
+
#
|
796
|
+
|
797
|
+
# Get the Azure information
|
798
|
+
rc = subprocess.run(
|
799
|
+
shlex.split('az account show'),
|
800
|
+
check=True,
|
801
|
+
stdout=subprocess.PIPE,
|
802
|
+
stderr=sys.stderr,
|
803
|
+
)
|
804
|
+
self.printer.verbose(rc)
|
805
|
+
|
806
|
+
# Do a few checks
|
807
|
+
if rc.returncode != 0:
|
808
|
+
raise Exception(f'Could not get Azure account info')
|
809
|
+
|
810
|
+
# Get the ouptut
|
811
|
+
output = json.loads(rc.stdout.decode())
|
812
|
+
if output.get('environmentName') != 'AzureCloud':
|
813
|
+
raise Exception(f'Environment is not an Azure cloud:\n{json.dumps(output, indent=2)}')
|
814
|
+
|
815
|
+
tenant_id = output.get('tenantId')
|
816
|
+
if not tenant_id:
|
817
|
+
raise Exception(f'Could not determine Azure tenant id:\n{json.dumps(output, indent=2)}')
|
818
|
+
|
819
|
+
principal = output.get('user').get('name')
|
820
|
+
if not principal:
|
821
|
+
raise Exception(f'Could not determine principal:\n{json.dumps(output, indent=2)}')
|
822
|
+
|
823
|
+
# TOKEN=$(az account get-access-token --scope "https://monitor.azure.com//.default" | jq -r '.accessToken')
|
824
|
+
# Get the Azure OAUTH token
|
825
|
+
rc = subprocess.run(
|
826
|
+
shlex.split('az account get-access-token --scope "https://monitor.azure.com//.default"'),
|
827
|
+
check=True,
|
828
|
+
stdout=subprocess.PIPE,
|
829
|
+
stderr=sys.stderr,
|
830
|
+
)
|
831
|
+
self.printer.verbose(rc.returncode) # do not print the token to output
|
832
|
+
|
833
|
+
# Do a few checks
|
834
|
+
if rc.returncode != 0:
|
835
|
+
raise Exception(f'Could not get Azure OAUTH token')
|
836
|
+
|
837
|
+
# Get the ouptut
|
838
|
+
output = json.loads(rc.stdout.decode())
|
839
|
+
token = output.get('accessToken')
|
840
|
+
if not token:
|
841
|
+
raise Exception(f'Could not retrieve an access token:\n{json.dumps(output, indent=2)}')
|
842
|
+
|
843
|
+
return principal, token
|
844
|
+
|
845
|
+
def _post_to_dce(self, data_collection_endpoint, payload, token=None):
|
846
|
+
|
847
|
+
if not token:
|
848
|
+
_, token = self._get_access_token()
|
849
|
+
|
850
|
+
# DCE payload must be submitted as an arry
|
851
|
+
if not isinstance(payload, list):
|
852
|
+
dce_payload = [payload]
|
853
|
+
else:
|
854
|
+
dce_payload = payload
|
855
|
+
|
856
|
+
self.printer.verbose('About to log:')
|
857
|
+
self.printer.verbose(f'- to: {data_collection_endpoint}')
|
858
|
+
self.printer.verbose(f'- payload:\n{json.dumps(dce_payload, indent=2)}')
|
859
|
+
|
860
|
+
# now do the actual post
|
861
|
+
try:
|
862
|
+
headers = {
|
863
|
+
'Authorization': f"Bearer {token}",
|
864
|
+
'Content-Type': 'application/json',
|
865
|
+
}
|
866
|
+
resp = requests.post(
|
867
|
+
url=data_collection_endpoint,
|
868
|
+
headers=headers,
|
869
|
+
json=dce_payload,
|
870
|
+
)
|
871
|
+
|
872
|
+
resp.raise_for_status()
|
873
|
+
self.printer.success('Analyze results logged to DCE', print_line_before=True)
|
874
|
+
|
875
|
+
except requests.exceptions.RequestException as e:
|
876
|
+
# we warn but continue
|
877
|
+
self.printer.warning(f'Error while posting the analyze results ({type(e)}): {e}', print_line_before=True)
|
878
|
+
except Exception as e:
|
879
|
+
self.printer.error(f'Unexpected error: {e}')
|
880
|
+
if self.printer.print_verbose:
|
881
|
+
raise(e)
|
882
|
+
sys.exit(1)
|
883
|
+
|
884
|
+
def _post_analyze_results_to_dce(self, data_collection_endpoint:str, payload:object):
|
885
|
+
"""
|
886
|
+
Posts the payload to the given (Azure) data collection endpoint
|
887
|
+
"""
|
888
|
+
|
889
|
+
def mask_basic_auth(url):
|
890
|
+
# Regular expression to match basic authentication credentials in URL
|
891
|
+
auth_pattern = re.compile(r"(https?://)([^:@]+):([^:@]+)@(.+)")
|
892
|
+
# Return the url without the basic auth part
|
893
|
+
return auth_pattern.sub(r"\1\4", url)
|
894
|
+
|
895
|
+
principal, token = self._get_access_token()
|
896
|
+
|
897
|
+
# Get the repo info
|
898
|
+
rc = subprocess.run(
|
899
|
+
shlex.split('git config --get remote.origin.url'),
|
900
|
+
check=True,
|
901
|
+
stdout=subprocess.PIPE,
|
902
|
+
stderr=sys.stderr,
|
903
|
+
)
|
904
|
+
self.printer.verbose(rc)
|
905
|
+
|
906
|
+
# Do a few checks
|
907
|
+
if rc.returncode != 0:
|
908
|
+
raise Exception(f'Could not get git repo info')
|
909
|
+
|
910
|
+
# Get the ouptut
|
911
|
+
repo = rc.stdout.decode().rstrip('\n')
|
912
|
+
if not repo:
|
913
|
+
raise Exception(f'Could not get git repo info: {repo}')
|
914
|
+
|
915
|
+
# Remove the basic auth info if it is part of the url
|
916
|
+
repo = mask_basic_auth(repo)
|
917
|
+
|
918
|
+
# Get the current path in the repo
|
919
|
+
rc = subprocess.run(
|
920
|
+
shlex.split('git rev-parse --show-prefix'),
|
921
|
+
check=True,
|
922
|
+
stdout=subprocess.PIPE,
|
923
|
+
stderr=sys.stderr,
|
924
|
+
)
|
925
|
+
self.printer.verbose(rc)
|
926
|
+
|
927
|
+
# Do a few checks
|
928
|
+
if rc.returncode != 0:
|
929
|
+
raise Exception(f'Could not get current scope')
|
930
|
+
|
931
|
+
# Get the ouptut
|
932
|
+
scope = rc.stdout.decode().rstrip('\n')
|
933
|
+
if not scope:
|
934
|
+
raise Exception(f'Could not get scope: {scope}')
|
935
|
+
|
936
|
+
# So now we have everything, we can construct the final payload
|
937
|
+
payload = {
|
938
|
+
"scope": scope,
|
939
|
+
"principal": principal,
|
940
|
+
"repo": repo,
|
941
|
+
"creations": payload.get("summary").get("creations"),
|
942
|
+
"updates": payload.get("summary").get("updates"),
|
943
|
+
"deletions": payload.get("summary").get("deletions"),
|
944
|
+
"minor": payload.get("summary").get("minor"),
|
945
|
+
"medium": payload.get("summary").get("medium"),
|
946
|
+
"major": payload.get("summary").get("major"),
|
947
|
+
"unknown": payload.get("summary").get("unknown"),
|
948
|
+
"total": payload.get("summary").get("total"),
|
949
|
+
"score": payload.get("summary").get("score"),
|
950
|
+
"details": payload.get('details'),
|
951
|
+
}
|
952
|
+
self._post_to_dce(
|
953
|
+
payload=payload,
|
954
|
+
data_collection_endpoint=data_collection_endpoint,
|
955
|
+
token=token,
|
956
|
+
)
|
957
|
+
|
958
|
+
self.printer.verbose('Done')
|
959
|
+
|
778
960
|
def run(self, command, debug, dry_run, no_lock, update, upgrade,
|
779
961
|
planfile, auto_approve, clean, working_dir, terragrunt_args):
|
780
962
|
""" Executes a terragrunt command on a single module """
|
781
963
|
|
964
|
+
def extract_source_value(terragrunt_file_content):
|
965
|
+
# Regular expression to capture the terraform block
|
966
|
+
terraform_block_pattern = re.compile(r'terraform\s*\{(.*?)\n\}', re.DOTALL)
|
967
|
+
|
968
|
+
# Regular expression to capture the 'source' key and its value
|
969
|
+
source_pattern = re.compile(r'source\s*=\s*"(.*?)(?<!\\)"', re.DOTALL)
|
970
|
+
|
971
|
+
# Find the terraform block
|
972
|
+
terraform_block_match = terraform_block_pattern.search(terragrunt_file_content)
|
973
|
+
if terraform_block_match:
|
974
|
+
terraform_block = terraform_block_match.group(1)
|
975
|
+
|
976
|
+
# Search for the 'source' key within the block
|
977
|
+
source_match = source_pattern.search(terraform_block)
|
978
|
+
if source_match:
|
979
|
+
return source_match.group(1) # Return the value of 'source'
|
980
|
+
else:
|
981
|
+
raise ValueError('Could not locate the terragrunt source value')
|
982
|
+
|
782
983
|
self.printer.verbose(f"Attempting to execute 'run {command}'")
|
783
984
|
if terragrunt_args:
|
784
985
|
self.printer.verbose(f"- with additional parameters: {' '.join(terragrunt_args)}")
|
@@ -786,6 +987,7 @@ class TgWrap():
|
|
786
987
|
check_for_file=self.TERRAGRUNT_FILE
|
787
988
|
if working_dir:
|
788
989
|
check_for_file = os.path.join(working_dir, check_for_file)
|
990
|
+
|
789
991
|
if not os.path.isfile(check_for_file):
|
790
992
|
self.printer.error(
|
791
993
|
f"{check_for_file} not found, this seems not to be a terragrunt module directory!"
|
@@ -796,13 +998,15 @@ class TgWrap():
|
|
796
998
|
source_module = None
|
797
999
|
with open(check_for_file, 'r') as file:
|
798
1000
|
try:
|
799
|
-
content =
|
800
|
-
source = content
|
1001
|
+
content = file.read()
|
1002
|
+
source = extract_source_value(content)
|
1003
|
+
|
801
1004
|
# get the source part, typically the last part after the double /.
|
802
1005
|
# also remove a potential version element from it.
|
803
1006
|
source_module = re.sub(r'\${[^}]*}', '', source.split('//')[::-1][0])
|
804
1007
|
except Exception as e:
|
805
|
-
self.printer.
|
1008
|
+
self.printer.warning(f'Could not parse terragrunt.hcl, but we fall back to default behaviour.')
|
1009
|
+
self.printer.verbose(f'error (of type {type(e)}) raised')
|
806
1010
|
pass
|
807
1011
|
|
808
1012
|
cmd = self._construct_command(
|
@@ -884,14 +1088,14 @@ class TgWrap():
|
|
884
1088
|
# tgwrap state mv 'azuread_group.this["viewers"]' 'azuread_group.this["readers"]'
|
885
1089
|
rc = subprocess.run(
|
886
1090
|
shlex.split(cmd, posix=False),
|
887
|
-
cwd=cwd,
|
1091
|
+
cwd=cwd if cwd else None,
|
888
1092
|
)
|
889
1093
|
self.printer.verbose(rc)
|
890
1094
|
|
891
1095
|
sys.exit(rc.returncode)
|
892
1096
|
|
893
1097
|
def run_all(self, command, debug, dry_run, no_lock, update, upgrade,
|
894
|
-
exclude_external_dependencies, step_by_step, planfile, auto_approve, clean,
|
1098
|
+
exclude_external_dependencies, step_by_step, continue_on_error, planfile, auto_approve, clean,
|
895
1099
|
working_dir, start_at_step, limit_parallelism, include_dirs, exclude_dirs, terragrunt_args):
|
896
1100
|
""" Executes a terragrunt command across multiple modules """
|
897
1101
|
|
@@ -903,10 +1107,6 @@ class TgWrap():
|
|
903
1107
|
modifying_command = (command.lower() in ['apply', 'destroy'])
|
904
1108
|
auto_approve = auto_approve if modifying_command else True
|
905
1109
|
|
906
|
-
# if the dir is not ending on '/*', add it
|
907
|
-
include_dirs = [dir.rstrip(f'.{os.path.sep}*') + f'{os.path.sep}*' for dir in include_dirs]
|
908
|
-
exclude_dirs = [dir.rstrip(f'.{os.path.sep}*') + f'{os.path.sep}*' for dir in exclude_dirs]
|
909
|
-
|
910
1110
|
cmd = self._construct_command(
|
911
1111
|
command=command,
|
912
1112
|
allow_no_run_all=False,
|
@@ -933,11 +1133,16 @@ class TgWrap():
|
|
933
1133
|
f'This command will be executed for each individual module:\n$ {cmd}'
|
934
1134
|
)
|
935
1135
|
|
1136
|
+
# if the dir is not ending on '/*', add it
|
1137
|
+
include_dirs = [dir.rstrip(f'.{os.path.sep}*') + f'{os.path.sep}*' for dir in include_dirs]
|
1138
|
+
exclude_dirs = [dir.rstrip(f'.{os.path.sep}*') + f'{os.path.sep}*' for dir in exclude_dirs]
|
1139
|
+
|
936
1140
|
self._run_di_graph(
|
937
1141
|
command=cmd,
|
938
1142
|
exclude_external_dependencies=exclude_external_dependencies,
|
939
1143
|
working_dir=working_dir,
|
940
1144
|
ask_for_confirmation=(not auto_approve),
|
1145
|
+
continue_on_error=continue_on_error,
|
941
1146
|
dry_run=dry_run,
|
942
1147
|
start_at_step=start_at_step,
|
943
1148
|
backwards=True if command.lower() in ['destroy'] else False,
|
@@ -1000,8 +1205,8 @@ class TgWrap():
|
|
1000
1205
|
self.printer.verbose(rc)
|
1001
1206
|
|
1002
1207
|
def analyze(self, exclude_external_dependencies, working_dir, start_at_step,
|
1003
|
-
out, analyze_config, parallel_execution,
|
1004
|
-
|
1208
|
+
out, analyze_config, parallel_execution, ignore_attributes, include_dirs, exclude_dirs,
|
1209
|
+
planfile_dir, data_collection_endpoint, terragrunt_args):
|
1005
1210
|
""" Analyzes the plan files """
|
1006
1211
|
|
1007
1212
|
def calculate_score(major: int, medium: int, minor: int) -> float :
|
@@ -1039,19 +1244,14 @@ class TgWrap():
|
|
1039
1244
|
cmd = f"terraform show -json {self.PLANFILE_NAME}"
|
1040
1245
|
|
1041
1246
|
config = None
|
1042
|
-
if
|
1043
|
-
self.printer.warning(
|
1044
|
-
f"Analyze config file is not set, this is required for checking for unauthorized deletions and drift detection scores!"
|
1045
|
-
)
|
1046
|
-
else:
|
1247
|
+
if analyze_config:
|
1047
1248
|
self.printer.verbose(
|
1048
1249
|
f"\nAnalyze using config {analyze_config}"
|
1049
1250
|
)
|
1050
1251
|
config = self.load_yaml_file(analyze_config)
|
1051
1252
|
|
1052
1253
|
ts_validation_successful = True
|
1053
|
-
|
1054
|
-
drifts = {}
|
1254
|
+
details = {}
|
1055
1255
|
try:
|
1056
1256
|
# then run it and capture the output
|
1057
1257
|
with tempfile.NamedTemporaryFile(mode='w+', prefix='tgwrap-', delete=False) as f:
|
@@ -1082,7 +1282,6 @@ class TgWrap():
|
|
1082
1282
|
except IndexError:
|
1083
1283
|
self.printer.warning(f'Could not determine planfile: {line[:100]}')
|
1084
1284
|
|
1085
|
-
|
1086
1285
|
try:
|
1087
1286
|
# plan file could be empty (except for new line) if module is skipped
|
1088
1287
|
if len(plan_file) > 1:
|
@@ -1092,11 +1291,13 @@ class TgWrap():
|
|
1092
1291
|
if 'exception' in data:
|
1093
1292
|
raise Exception(data['exception'])
|
1094
1293
|
|
1095
|
-
|
1294
|
+
details[module], ts_success = run_analyze(
|
1096
1295
|
config=config,
|
1097
1296
|
data=data,
|
1098
1297
|
verbose=self.printer.print_verbose,
|
1298
|
+
ignore_attributes=ignore_attributes,
|
1099
1299
|
)
|
1300
|
+
|
1100
1301
|
if not ts_success:
|
1101
1302
|
ts_validation_successful = False
|
1102
1303
|
else:
|
@@ -1113,6 +1314,7 @@ class TgWrap():
|
|
1113
1314
|
"creations": 0,
|
1114
1315
|
"updates": 0,
|
1115
1316
|
"deletions": 0,
|
1317
|
+
"outputs": 0,
|
1116
1318
|
"minor": 0,
|
1117
1319
|
"medium": 0,
|
1118
1320
|
"major": 0,
|
@@ -1122,10 +1324,15 @@ class TgWrap():
|
|
1122
1324
|
}
|
1123
1325
|
|
1124
1326
|
self.printer.header("Analysis results:", print_line_before=True)
|
1125
|
-
for key, value in
|
1327
|
+
for key, value in details.items():
|
1328
|
+
# if we want to ignore a few attributes
|
1329
|
+
if ignore_attributes:
|
1330
|
+
value['updates'] = [item for item in value['updates'] if item not in value['ignorable_updates']]
|
1331
|
+
|
1126
1332
|
self.printer.header(f'Module: {key}')
|
1127
|
-
if not value["all"]:
|
1333
|
+
if not value["all"] and not value["outputs"]:
|
1128
1334
|
self.printer.success('No changes detected')
|
1335
|
+
|
1129
1336
|
if value["unauthorized"]:
|
1130
1337
|
self.printer.error('Unauthorized deletions:')
|
1131
1338
|
for m in value["unauthorized"]:
|
@@ -1145,45 +1352,70 @@ class TgWrap():
|
|
1145
1352
|
for m in value["updates"]:
|
1146
1353
|
total_drifts["updates"] = total_drifts["updates"] + 1
|
1147
1354
|
self.printer.normal(f'-> {m}')
|
1355
|
+
if value["ignorable_updates"]:
|
1356
|
+
if self.printer.print_verbose:
|
1357
|
+
self.printer.normal('Updates (ignored):')
|
1358
|
+
for m in value["ignorable_updates"]:
|
1359
|
+
self.printer.normal(f'-> {m}')
|
1360
|
+
else:
|
1361
|
+
self.printer.normal(f'Updates (ignored): {len(value["ignorable_updates"])} resources (add --verbose to see them)')
|
1362
|
+
if value["outputs"]:
|
1363
|
+
self.printer.normal('Output changes:')
|
1364
|
+
for m in value["outputs"]:
|
1365
|
+
total_drifts["outputs"] = total_drifts["outputs"] + 1
|
1366
|
+
self.printer.normal(f'-> {m}')
|
1148
1367
|
|
1149
|
-
|
1150
|
-
|
1151
|
-
|
1152
|
-
|
1153
|
-
|
1154
|
-
|
1155
|
-
|
1156
|
-
|
1157
|
-
|
1158
|
-
|
1159
|
-
|
1160
|
-
|
1161
|
-
|
1162
|
-
|
1163
|
-
|
1164
|
-
|
1165
|
-
|
1166
|
-
if total_drifts["unknown"] > 0:
|
1167
|
-
self.printer.warning(f"For {total_drifts['unknown']} resources, drift score is not configured, please update configuration!")
|
1168
|
-
self.printer.warning('- Unknowns:')
|
1169
|
-
for key, value in changes.items():
|
1170
|
-
for m in value["unknowns"]:
|
1171
|
-
self.printer.warning(f' -> {m}')
|
1368
|
+
if not analyze_config:
|
1369
|
+
self.printer.error(
|
1370
|
+
f"Analyze config file is not set, this is required for checking for unauthorized deletions and drift detection scores!",
|
1371
|
+
print_line_before=True,
|
1372
|
+
)
|
1373
|
+
else:
|
1374
|
+
for key, value in details.items():
|
1375
|
+
for type in ["minor", "medium", "major", "unknown", "total"]:
|
1376
|
+
total_drifts[type] += value["drifts"][type]
|
1377
|
+
|
1378
|
+
# the formula below is just a way to achieve a numeric results that is coming from the various drift categories
|
1379
|
+
value['drifts']['score'] = calculate_score(
|
1380
|
+
major = value['drifts']['major'],
|
1381
|
+
medium = value['drifts']['medium'],
|
1382
|
+
minor = value['drifts']['minor'],
|
1383
|
+
)
|
1384
|
+
value['drifts']['score'] = value['drifts']['major'] * 10 + value['drifts']['medium'] + value['drifts']['minor'] / 10
|
1172
1385
|
|
1173
|
-
|
1386
|
+
# the formula below is just a way to achieve a numeric results that is coming from the various drift categories
|
1387
|
+
total_drift_score = total_drifts['major'] * 10 + total_drifts['medium'] + total_drifts['minor'] / 10
|
1388
|
+
total_drifts['score'] = total_drift_score
|
1389
|
+
|
1390
|
+
self.printer.header(f"Drift score: {total_drift_score} ({total_drifts['major']}.{total_drifts['medium']}.{total_drifts['minor']})")
|
1391
|
+
if total_drifts["unknown"] > 0:
|
1392
|
+
self.printer.warning(f"For {total_drifts['unknown']} resources, drift score is not configured, please update configuration!")
|
1393
|
+
self.printer.warning('- Unknowns:')
|
1394
|
+
for key, value in details.items():
|
1395
|
+
for m in value["unknowns"]:
|
1396
|
+
self.printer.warning(f' -> {m}')
|
1397
|
+
|
1398
|
+
if out or data_collection_endpoint:
|
1174
1399
|
# in the output we convert the dict of dicts to a list of dicts as it makes processing
|
1175
1400
|
# (e.g. by telegraph) easier.
|
1176
1401
|
output = {
|
1177
|
-
"
|
1402
|
+
"details": [],
|
1178
1403
|
"summary": {},
|
1179
1404
|
}
|
1180
|
-
for key, value in
|
1405
|
+
for key, value in details.items():
|
1181
1406
|
value['module'] = key
|
1182
|
-
output["
|
1407
|
+
output["details"].append(value)
|
1183
1408
|
|
1184
1409
|
output["summary"] = total_drifts
|
1185
1410
|
|
1186
|
-
|
1411
|
+
if out:
|
1412
|
+
print(json.dumps(output, indent=4))
|
1413
|
+
|
1414
|
+
if data_collection_endpoint:
|
1415
|
+
self._post_analyze_results_to_dce(
|
1416
|
+
data_collection_endpoint=data_collection_endpoint,
|
1417
|
+
payload=output,
|
1418
|
+
)
|
1187
1419
|
|
1188
1420
|
if not ts_validation_successful:
|
1189
1421
|
self.printer.error("Analysis detected unauthorised deletions, please check your configuration!!!")
|
@@ -1299,7 +1531,7 @@ class TgWrap():
|
|
1299
1531
|
source_config_dir = None
|
1300
1532
|
|
1301
1533
|
version_tag, _, _ = self._clone_repo(
|
1302
|
-
|
1534
|
+
repo=manifest['git_repository'],
|
1303
1535
|
target_dir=temp_dir,
|
1304
1536
|
version_tag=version_tag,
|
1305
1537
|
)
|
@@ -1327,6 +1559,8 @@ class TgWrap():
|
|
1327
1559
|
pass
|
1328
1560
|
|
1329
1561
|
deploy_actions = {}
|
1562
|
+
deploy_global_configs = include_global_config_files
|
1563
|
+
target_stage_found = False
|
1330
1564
|
# now go through the deploy configurations and apply the one that is relevant
|
1331
1565
|
for key, value in manifest['deploy'].items():
|
1332
1566
|
if target_stage not in value['applies_to_stages']:
|
@@ -1345,9 +1579,11 @@ class TgWrap():
|
|
1345
1579
|
tg_file_name=self.TERRAGRUNT_FILE,
|
1346
1580
|
verbose=self.printer.print_verbose,
|
1347
1581
|
)
|
1348
|
-
)
|
1582
|
+
)
|
1583
|
+
deploy_global_configs = value.get('include_global_config_files', deploy_global_configs)
|
1584
|
+
target_stage_found = True
|
1349
1585
|
|
1350
|
-
if
|
1586
|
+
if target_stage_found and deploy_global_configs:
|
1351
1587
|
for gc, global_config in manifest.get('global_config_files', {}).items():
|
1352
1588
|
self.printer.verbose(f'Found global config : {gc}')
|
1353
1589
|
|
@@ -1370,42 +1606,46 @@ class TgWrap():
|
|
1370
1606
|
else:
|
1371
1607
|
self.printer.verbose(f'Skipping global configs')
|
1372
1608
|
|
1373
|
-
|
1374
|
-
|
1375
|
-
|
1376
|
-
|
1377
|
-
|
1378
|
-
|
1379
|
-
|
1380
|
-
|
1381
|
-
|
1382
|
-
|
1383
|
-
|
1384
|
-
|
1385
|
-
|
1386
|
-
|
1387
|
-
|
1388
|
-
|
1389
|
-
|
1390
|
-
|
1391
|
-
|
1392
|
-
|
1393
|
-
|
1394
|
-
|
1395
|
-
|
1396
|
-
|
1609
|
+
if deploy_actions:
|
1610
|
+
self.printer.header('Modules to deploy:')
|
1611
|
+
self.printer.normal(f'-> git repository: {manifest["git_repository"]}')
|
1612
|
+
self.printer.normal(f'-> version tag: {version_tag}')
|
1613
|
+
self.printer.normal('Modules:')
|
1614
|
+
for key, value in deploy_actions.items():
|
1615
|
+
self.printer.normal(f'--> {key}')
|
1616
|
+
|
1617
|
+
if not auto_approve:
|
1618
|
+
response = input("\nDo you want to continue? (y/N) ")
|
1619
|
+
# if response.lower() != "y":
|
1620
|
+
# sys.exit(1)
|
1621
|
+
|
1622
|
+
if auto_approve or response.lower() == "y":
|
1623
|
+
for key, value in deploy_actions.items():
|
1624
|
+
run_sync(
|
1625
|
+
source_path=value['source'],
|
1626
|
+
target_path=value['target'],
|
1627
|
+
excludes=value.get('excludes', []),
|
1628
|
+
include_lock_file=True,
|
1629
|
+
auto_approve=True,
|
1630
|
+
dry_run=dry_run,
|
1631
|
+
clean=False,
|
1632
|
+
terragrunt_file=self.TERRAGRUNT_FILE,
|
1633
|
+
verbose=self.printer.print_verbose,
|
1634
|
+
)
|
1397
1635
|
|
1398
|
-
|
1399
|
-
|
1400
|
-
|
1401
|
-
|
1402
|
-
|
1403
|
-
|
1404
|
-
|
1405
|
-
|
1636
|
+
if not dry_run:
|
1637
|
+
# write the version file
|
1638
|
+
with open(os.path.join(target_dir, self.VERSION_FILE), 'w') as f:
|
1639
|
+
f.write(f"""
|
1640
|
+
locals {{
|
1641
|
+
version_tag="{version_tag}"
|
1642
|
+
}}
|
1643
|
+
""")
|
1644
|
+
else:
|
1645
|
+
self.printer.normal('Nothing to do')
|
1406
1646
|
|
1407
|
-
|
1408
|
-
|
1647
|
+
# clean up the cache in the deployed directory to avoid strange issues when planning
|
1648
|
+
self.clean(working_dir=target_dir)
|
1409
1649
|
|
1410
1650
|
except KeyError as e:
|
1411
1651
|
self.printer.error(f'Error interpreting the manifest file. Please ensure it uses the proper format. Could not find element: {e}')
|
@@ -1421,30 +1661,72 @@ class TgWrap():
|
|
1421
1661
|
except Exception:
|
1422
1662
|
pass
|
1423
1663
|
|
1424
|
-
def check_deployments(self,
|
1664
|
+
def check_deployments(self, repo_url, levels_deep, working_dir, out):
|
1425
1665
|
""" Check the freshness of deployed configuration versions against the platform repository """
|
1426
1666
|
|
1427
|
-
def locate_version_files(current_directory, found_files=[], root_directory=None, level=1):
|
1667
|
+
def locate_version_files(current_directory, found_files=[], root_directory=None, level=1, git_status=''):
|
1428
1668
|
" This tries to find a version file in the current directory, or a given number of directories beneath it"
|
1429
1669
|
|
1670
|
+
# do not include hidden directories
|
1671
|
+
if os.path.basename(current_directory).startswith('.'):
|
1672
|
+
return found_files
|
1673
|
+
|
1430
1674
|
if not root_directory:
|
1431
1675
|
root_directory = current_directory
|
1432
1676
|
|
1677
|
+
if not git_status:
|
1678
|
+
self.printer.verbose(f'Check for git status in directory {current_directory}')
|
1679
|
+
# Execute 'git status' to get an overview of the current status
|
1680
|
+
cmd = "git status"
|
1681
|
+
rc = subprocess.run(
|
1682
|
+
shlex.split(cmd),
|
1683
|
+
cwd=current_directory,
|
1684
|
+
universal_newlines=True,
|
1685
|
+
stdout=subprocess.PIPE,
|
1686
|
+
stderr=subprocess.PIPE,
|
1687
|
+
)
|
1688
|
+
output = ('stdout: ' + rc.stdout + 'stderr: ' + rc.stderr).lower()
|
1689
|
+
if 'not a git repository' in output:
|
1690
|
+
pass
|
1691
|
+
elif 'branch is up to date' in output:
|
1692
|
+
git_status = 'up to date; '
|
1693
|
+
elif 'head detached' in output:
|
1694
|
+
git_status = 'head detached; '
|
1695
|
+
elif 'untracked files' in output:
|
1696
|
+
git_status = git_status + 'untracked files; '
|
1697
|
+
elif 'changes to be committed' in output:
|
1698
|
+
git_status = git_status + 'staged changes; '
|
1699
|
+
elif 'changes not staged for commit' in output:
|
1700
|
+
git_status = git_status + 'unstaged changes; '
|
1701
|
+
elif 'branch is ahead of' in output:
|
1702
|
+
git_status = git_status + 'ahead of remote; '
|
1703
|
+
elif 'branch is behind of' in output:
|
1704
|
+
git_status = git_status + 'behind remote; '
|
1705
|
+
elif 'unmerged paths' in output:
|
1706
|
+
git_status = git_status + 'merge conflicts; '
|
1707
|
+
|
1433
1708
|
for entry in os.listdir(current_directory):
|
1434
1709
|
full_entry = os.path.join(current_directory, entry)
|
1435
|
-
|
1710
|
+
|
1711
|
+
if os.path.isdir(full_entry) and level <= levels_deep:
|
1436
1712
|
found_files = locate_version_files(
|
1437
1713
|
current_directory=full_entry,
|
1438
1714
|
found_files=found_files,
|
1439
1715
|
root_directory=root_directory,
|
1440
1716
|
level=level+1,
|
1717
|
+
git_status=git_status,
|
1441
1718
|
)
|
1442
1719
|
elif entry == self.VERSION_FILE:
|
1443
|
-
found_files.append(
|
1720
|
+
found_files.append(
|
1721
|
+
{
|
1722
|
+
'path': os.path.relpath(current_directory, root_directory),
|
1723
|
+
'git_status': git_status,
|
1724
|
+
}
|
1725
|
+
)
|
1444
1726
|
|
1445
1727
|
return found_files
|
1446
1728
|
|
1447
|
-
def
|
1729
|
+
def get_all_versions(repo_dir, min_version=None):
|
1448
1730
|
"Get all the version tags from the repo including their data"
|
1449
1731
|
|
1450
1732
|
# Execute 'git tag' command to get a list of all tags
|
@@ -1475,25 +1757,27 @@ class TgWrap():
|
|
1475
1757
|
try:
|
1476
1758
|
# do we have a working dir?
|
1477
1759
|
working_dir = working_dir if working_dir else os.getcwd()
|
1478
|
-
self.printer.header(f'Check released versions ({
|
1760
|
+
self.printer.header(f'Check released versions (max {levels_deep} levels deep) in directory: {working_dir}')
|
1479
1761
|
|
1480
|
-
|
1762
|
+
found_files = locate_version_files(working_dir)
|
1481
1763
|
|
1482
1764
|
versions = []
|
1483
|
-
for
|
1765
|
+
for result in found_files:
|
1484
1766
|
# Determine the deployed version as defined in the version file
|
1485
|
-
with open(os.path.join(working_dir,
|
1767
|
+
with open(os.path.join(working_dir, result['path'], self.VERSION_FILE), 'r') as file:
|
1768
|
+
# todo: replace this with regex as it is (now) the only reason we use this lib
|
1486
1769
|
content = hcl2.load(file)
|
1487
1770
|
try:
|
1488
1771
|
version_tag = content['locals'][0]['version_tag']
|
1489
1772
|
versions.append(
|
1490
1773
|
{
|
1491
|
-
'path':
|
1774
|
+
'path': result['path'],
|
1775
|
+
'git_status': result['git_status'],
|
1492
1776
|
'tag': version_tag
|
1493
1777
|
}
|
1494
1778
|
)
|
1495
1779
|
except KeyError as e:
|
1496
|
-
versions.append({
|
1780
|
+
versions.append({result: 'unknown'})
|
1497
1781
|
|
1498
1782
|
self.printer.verbose(f'Detected versions: {versions}')
|
1499
1783
|
|
@@ -1510,11 +1794,14 @@ class TgWrap():
|
|
1510
1794
|
self.printer.verbose(f'Detected minimum version {min_version} and maximum version {max_version}')
|
1511
1795
|
|
1512
1796
|
temp_dir = os.path.join(tempfile.mkdtemp(prefix='tgwrap-'), "tg-source")
|
1513
|
-
|
1514
|
-
|
1797
|
+
self._clone_repo(
|
1798
|
+
repo=repo_url,
|
1799
|
+
target_dir=temp_dir,
|
1800
|
+
version_tag='latest',
|
1801
|
+
)
|
1515
1802
|
|
1516
1803
|
# determine the version tag from the repo, including their date
|
1517
|
-
all_versions =
|
1804
|
+
all_versions = get_all_versions(repo_dir=temp_dir, min_version=min_version['tag'])
|
1518
1805
|
|
1519
1806
|
# so now we can determine how old the deployed versions are
|
1520
1807
|
now = datetime.now(timezone.utc)
|
@@ -1528,31 +1815,45 @@ class TgWrap():
|
|
1528
1815
|
version['days_since_release'] = (now - release_date).days
|
1529
1816
|
|
1530
1817
|
self.printer.header(
|
1531
|
-
'Deployed versions:' if len(versions) > 0 else 'No deployed versions detected'
|
1818
|
+
'Deployed versions:' if len(versions) > 0 else 'No deployed versions detected',
|
1819
|
+
print_line_before=True,
|
1532
1820
|
)
|
1533
|
-
|
1821
|
+
|
1822
|
+
# sort the list based on its path
|
1823
|
+
versions = sorted(versions, key=lambda x: x['path'])
|
1824
|
+
|
1534
1825
|
for version in versions:
|
1535
1826
|
days_since_release = version.get("days_since_release", 0)
|
1536
1827
|
message = f'-> {version["path"]}: {version["tag"]} (released {days_since_release} days ago)'
|
1537
1828
|
if version['release_date'] == 'unknown':
|
1538
1829
|
self.printer.normal(message)
|
1539
|
-
elif days_since_release >
|
1830
|
+
elif days_since_release > 120:
|
1540
1831
|
self.printer.error(message)
|
1541
|
-
elif days_since_release >
|
1542
|
-
self.printer.
|
1543
|
-
elif days_since_release <
|
1832
|
+
elif days_since_release > 80:
|
1833
|
+
self.printer.warning(message)
|
1834
|
+
elif days_since_release < 40:
|
1544
1835
|
self.printer.success(message)
|
1545
1836
|
else:
|
1546
1837
|
self.printer.normal(message)
|
1547
1838
|
|
1839
|
+
if version.get('git_status'):
|
1840
|
+
message = f'WARNING: git status: {version["git_status"].strip()}'
|
1841
|
+
if not 'up to date' in message:
|
1842
|
+
self.printer.warning(message)
|
1843
|
+
|
1548
1844
|
self.printer.normal("\n") # just to get an empty line :-/
|
1549
1845
|
self.printer.warning("""
|
1550
1846
|
Note:
|
1551
1847
|
This result only says something about the freshness of the deployed configurations,
|
1552
1848
|
but not whether the actual resources are in sync with these.
|
1849
|
+
|
1553
1850
|
Check the drift of these configurations with the actual deployments by
|
1554
1851
|
planning and analyzing the results.
|
1555
|
-
|
1852
|
+
|
1853
|
+
Also, it uses the locally checked out repositories, make sure these are pulled so that
|
1854
|
+
this reflect the most up to date situation!
|
1855
|
+
""",
|
1856
|
+
print_line_before=True, print_line_after=True)
|
1556
1857
|
|
1557
1858
|
if out:
|
1558
1859
|
# use the regular printer, to avoid it being sent to stderr
|
@@ -1574,15 +1875,88 @@ Note:
|
|
1574
1875
|
except Exception:
|
1575
1876
|
pass
|
1576
1877
|
|
1577
|
-
def show_graph(self, backwards, exclude_external_dependencies, working_dir, include_dirs, exclude_dirs, terragrunt_args):
|
1878
|
+
def show_graph(self, backwards, exclude_external_dependencies, analyze, working_dir, include_dirs, exclude_dirs, terragrunt_args):
|
1578
1879
|
""" Shows the dependencies of a project """
|
1579
1880
|
|
1881
|
+
def set_json_dumps_default(obj):
|
1882
|
+
if isinstance(obj, set):
|
1883
|
+
return list(obj)
|
1884
|
+
raise TypeError
|
1885
|
+
|
1886
|
+
def calculate_dependencies(graph):
|
1887
|
+
dependencies = {}
|
1888
|
+
for node in graph.nodes:
|
1889
|
+
out_degree = graph.out_degree(node)
|
1890
|
+
in_degree = graph.in_degree(node)
|
1891
|
+
total_degree = out_degree + in_degree
|
1892
|
+
dependencies[node] = {
|
1893
|
+
'dependencies': out_degree,
|
1894
|
+
'dependent_on_it': in_degree,
|
1895
|
+
'total': total_degree,
|
1896
|
+
}
|
1897
|
+
|
1898
|
+
return dependencies
|
1899
|
+
|
1900
|
+
def calculate_graph_metrics(graph):
|
1901
|
+
|
1902
|
+
metrics = {}
|
1903
|
+
|
1904
|
+
# Degree centrality
|
1905
|
+
metric = {
|
1906
|
+
'values': dict(sorted(nx.degree_centrality(graph).items(), key=lambda item: item[1], reverse=True)),
|
1907
|
+
'description': 'Shows the degree of each node relative to the number of nodes in the graph',
|
1908
|
+
}
|
1909
|
+
sorted_dependencies = sorted(dependencies.items(), key=lambda x: x[1]['total'], reverse=True)
|
1910
|
+
metrics['degree_centrality'] = metric
|
1911
|
+
|
1912
|
+
# Betweenness centrality
|
1913
|
+
metric = {
|
1914
|
+
'values': dict(sorted(nx.betweenness_centrality(graph).items(), key=lambda item: item[1], reverse=True)),
|
1915
|
+
'description': 'Indicates nodes that frequently lie on shortest paths between other nodes',
|
1916
|
+
}
|
1917
|
+
metrics['betweenness_centrality'] = metric
|
1918
|
+
|
1919
|
+
# Closeness centrality
|
1920
|
+
metric = {
|
1921
|
+
'values': dict(sorted(nx.closeness_centrality(graph).items(), key=lambda item: item[1], reverse=True)),
|
1922
|
+
'description': 'Reflects how quickly a node can reach other nodes in the graph',
|
1923
|
+
}
|
1924
|
+
metrics['closeness_centrality'] = metric
|
1925
|
+
|
1926
|
+
# Strongly Connected Components (SCC)
|
1927
|
+
metric = {
|
1928
|
+
'values': list(nx.strongly_connected_components(graph)),
|
1929
|
+
'description': 'Lists sets of nodes that are mutually reachable',
|
1930
|
+
}
|
1931
|
+
metrics['strongly_connected_components'] = metric
|
1932
|
+
|
1933
|
+
# Weakly Connected Components (WCC)
|
1934
|
+
metric = {
|
1935
|
+
'values': list(nx.weakly_connected_components(graph)),
|
1936
|
+
'description': 'Lists sets of nodes that are connected disregarding edge directions',
|
1937
|
+
}
|
1938
|
+
metrics['weakly_connected_components'] = metric
|
1939
|
+
|
1940
|
+
# Average Path Length (only if the graph is connected)
|
1941
|
+
if nx.is_strongly_connected(graph):
|
1942
|
+
metric = {
|
1943
|
+
'values': nx.average_shortest_path_length(graph),
|
1944
|
+
'description': 'Shows the average shortest path length, indicating the graph\'s efficiency',
|
1945
|
+
}
|
1946
|
+
metrics['average_path_length'] = metric
|
1947
|
+
|
1948
|
+
return metrics
|
1949
|
+
|
1580
1950
|
self.printer.verbose(f"Attempting to show dependencies")
|
1581
1951
|
if terragrunt_args:
|
1582
1952
|
self.printer.verbose(f"- with additional parameters: {' '.join(terragrunt_args)}")
|
1583
1953
|
|
1584
1954
|
"Runs the desired command in the directories as defined in the directed graph"
|
1585
1955
|
graph = self._get_di_graph(backwards=backwards, working_dir=working_dir)
|
1956
|
+
try:
|
1957
|
+
graph.remove_node(r'\n')
|
1958
|
+
except nx.exception.NetworkXError:
|
1959
|
+
pass
|
1586
1960
|
|
1587
1961
|
# first go through the groups and clean up where needed
|
1588
1962
|
groups = self._prepare_groups(
|
@@ -1602,12 +1976,33 @@ Note:
|
|
1602
1976
|
for directory in group:
|
1603
1977
|
self.printer.normal(f"- {directory}")
|
1604
1978
|
|
1979
|
+
if analyze:
|
1980
|
+
self.printer.header("Graph analysis", print_line_before=True)
|
1981
|
+
|
1982
|
+
self.printer.bold("Dependencies counts:", print_line_before=True)
|
1983
|
+
dependencies = calculate_dependencies(graph)
|
1984
|
+
sorted_dependencies = sorted(dependencies.items(), key=lambda x: x[1]['total'], reverse=True)
|
1985
|
+
for node, counts in sorted_dependencies:
|
1986
|
+
msg = f"""
|
1987
|
+
{node} ->
|
1988
|
+
\ttotal: {counts['total']}
|
1989
|
+
\tdependent on: {counts['dependent_on_it']}
|
1990
|
+
\tdependencies: {counts['dependencies']}
|
1991
|
+
"""
|
1992
|
+
self.printer.normal(msg)
|
1993
|
+
|
1994
|
+
metrics = calculate_graph_metrics(graph)
|
1995
|
+
for metric, item in metrics.items():
|
1996
|
+
self.printer.bold(f'Metric: {metric}')
|
1997
|
+
self.printer.normal(f'Description: {item["description"]}')
|
1998
|
+
self.printer.normal(json.dumps(item['values'], indent=2, default=set_json_dumps_default))
|
1999
|
+
|
1605
2000
|
def clean(self, working_dir):
|
1606
2001
|
""" Clean the temporary files of a terragrunt/terraform project """
|
1607
2002
|
|
1608
|
-
cmd = 'find . -name ".terragrunt-cache" -type d -exec rm -rf {} \; ; ' + \
|
1609
|
-
'find . -name ".terraform" -type d -exec rm -rf {} \; ; ' + \
|
1610
|
-
'find . -name "terragrunt-debug*" -type f -exec rm -rf {} \;'
|
2003
|
+
cmd = r'find . -name ".terragrunt-cache" -type d -exec rm -rf {} \; ; ' + \
|
2004
|
+
r'find . -name ".terraform" -type d -exec rm -rf {} \; ; ' + \
|
2005
|
+
r'find . -name "terragrunt-debug*" -type f -exec rm -rf {} \;'
|
1611
2006
|
|
1612
2007
|
# we see the behaviour that with cleaning up large directories, it returns errorcode=1 upon first try
|
1613
2008
|
# never to shy away from a questionable solution to make your life easier, we just run it again :-)
|
@@ -1656,7 +2051,7 @@ Note:
|
|
1656
2051
|
current_release = match.group(1)
|
1657
2052
|
if current_release not in release_commits:
|
1658
2053
|
# remove the part between ()
|
1659
|
-
pattern = re.compile('\(.*?\) ')
|
2054
|
+
pattern = re.compile(r'\(.*?\) ')
|
1660
2055
|
updated_entry = pattern.sub('', entry)
|
1661
2056
|
release_commits[current_release] = [updated_entry]
|
1662
2057
|
elif current_release:
|
@@ -1712,3 +2107,65 @@ Note:
|
|
1712
2107
|
# use the regular printer, to avoid it being sent to stderr
|
1713
2108
|
print(changelog)
|
1714
2109
|
|
2110
|
+
def inspect(self, domain:str,substack:str, stage:str, azure_subscription_id:str, config_file:str,
|
2111
|
+
out:bool, data_collection_endpoint:str):
|
2112
|
+
""" Inspects the status of an Azure deployment """
|
2113
|
+
|
2114
|
+
inspector = AzureInspector(
|
2115
|
+
subscription_id=azure_subscription_id,
|
2116
|
+
domain=domain,
|
2117
|
+
substack=substack,
|
2118
|
+
stage=stage,
|
2119
|
+
config_file=config_file,
|
2120
|
+
verbose=self.printer.print_verbose,
|
2121
|
+
)
|
2122
|
+
|
2123
|
+
try:
|
2124
|
+
results = inspector.inspect()
|
2125
|
+
|
2126
|
+
# Report the status
|
2127
|
+
exit_code = 0
|
2128
|
+
self.printer.header('Inspection status:', print_line_before=True)
|
2129
|
+
for k,v in results.items():
|
2130
|
+
msg = f"""{v['type']}: {k}
|
2131
|
+
-> Resource: {v.get('inspect_status_code', 'NC')} ({v.get('inspect_message', 'not found')})""" # only since python 3.12 you can use things like \t and \n in an f-string
|
2132
|
+
if 'rbac_assignment_status_code' in v:
|
2133
|
+
msg = msg + f"""
|
2134
|
+
-> RBAC: {v['rbac_assignment_status_code']} ({v.get('rbac_assignment_message')})"
|
2135
|
+
""" # only since python 3.12 you can use things like \t and \n in an f-string
|
2136
|
+
if v['inspect_status_code'] != 'OK' or v.get('rbac_assignment_status_code', 'OK') == 'NOK':
|
2137
|
+
self.printer.error(msg=msg)
|
2138
|
+
exit_code += 1
|
2139
|
+
else:
|
2140
|
+
self.printer.success(msg=msg)
|
2141
|
+
|
2142
|
+
if out or data_collection_endpoint:
|
2143
|
+
# convert results to something DCE understands, and add the inputs
|
2144
|
+
payload = []
|
2145
|
+
for key, value in results.items():
|
2146
|
+
value_with_key = value.copy()
|
2147
|
+
value_with_key["resource_type"] = value_with_key.pop("type")
|
2148
|
+
value_with_key["resource"] = key
|
2149
|
+
value_with_key["domain"] = domain
|
2150
|
+
value_with_key["substack"] = substack
|
2151
|
+
value_with_key["stage"] = stage
|
2152
|
+
value_with_key["subscription_id"] = azure_subscription_id
|
2153
|
+
payload.append(value_with_key)
|
2154
|
+
|
2155
|
+
if out:
|
2156
|
+
print(json.dumps(payload, indent=2))
|
2157
|
+
|
2158
|
+
if data_collection_endpoint:
|
2159
|
+
self._post_to_dce(
|
2160
|
+
data_collection_endpoint=data_collection_endpoint,
|
2161
|
+
payload=payload,
|
2162
|
+
)
|
2163
|
+
|
2164
|
+
return exit_code
|
2165
|
+
except Exception as e:
|
2166
|
+
self.printer.normal(f'Exception occurred: {e}')
|
2167
|
+
|
2168
|
+
if self.printer.print_verbose:
|
2169
|
+
traceback.print_exc()
|
2170
|
+
|
2171
|
+
return -1
|