qontract-reconcile 0.10.2.dev160__py3-none-any.whl → 0.10.2.dev173__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qontract_reconcile-0.10.2.dev160.dist-info → qontract_reconcile-0.10.2.dev173.dist-info}/METADATA +2 -2
- {qontract_reconcile-0.10.2.dev160.dist-info → qontract_reconcile-0.10.2.dev173.dist-info}/RECORD +30 -23
- reconcile/acs_rbac.py +1 -0
- reconcile/cli.py +4 -6
- reconcile/dashdotdb_slo.py +45 -156
- reconcile/gcp_image_mirror.py +252 -0
- reconcile/gitlab_housekeeping.py +1 -1
- reconcile/gql_definitions/common/saas_files.py +49 -0
- reconcile/gql_definitions/dashdotdb_slo/slo_documents_query.py +15 -67
- reconcile/gql_definitions/fragments/container_image_mirror.py +33 -0
- reconcile/gql_definitions/fragments/saas_slo_document.py +82 -0
- reconcile/gql_definitions/gcp/__init__.py +0 -0
- reconcile/gql_definitions/gcp/gcp_docker_repos.py +128 -0
- reconcile/gql_definitions/gcp/gcp_projects.py +77 -0
- reconcile/gql_definitions/introspection.json +380 -230
- reconcile/quay_mirror.py +3 -42
- reconcile/quay_mirror_org.py +3 -2
- reconcile/slack_base.py +2 -2
- reconcile/utils/dynatrace/client.py +0 -31
- reconcile/utils/quay_mirror.py +42 -0
- reconcile/utils/saasherder/interfaces.py +2 -0
- reconcile/utils/saasherder/saasherder.py +5 -0
- reconcile/utils/slack_api.py +3 -1
- reconcile/utils/slo_document_manager.py +278 -0
- reconcile/utils/terrascript_aws_client.py +57 -0
- tools/{sd_app_sre_alert_report.py → alert_report.py} +1 -1
- tools/cli_commands/erv2.py +61 -0
- tools/qontract_cli.py +15 -5
- reconcile/gcr_mirror.py +0 -278
- {qontract_reconcile-0.10.2.dev160.dist-info → qontract_reconcile-0.10.2.dev173.dist-info}/WHEEL +0 -0
- {qontract_reconcile-0.10.2.dev160.dist-info → qontract_reconcile-0.10.2.dev173.dist-info}/entry_points.txt +0 -0
tools/cli_commands/erv2.py
CHANGED
@@ -123,6 +123,7 @@ class Erv2Cli:
|
|
123
123
|
m_inventory.get_from_spec(spec), spec, self._er_settings
|
124
124
|
)
|
125
125
|
)
|
126
|
+
self.module_type = m_inventory.get_from_spec(spec).module_type
|
126
127
|
self._resource = f.create_external_resource(spec, self._module_configuration)
|
127
128
|
f.validate_external_resource(self._resource, self._module_configuration)
|
128
129
|
|
@@ -221,6 +222,66 @@ class Erv2Cli:
|
|
221
222
|
print(e.stdout.decode("utf-8"))
|
222
223
|
raise
|
223
224
|
|
225
|
+
def build_terraform(self, credentials: Path) -> None:
|
226
|
+
input_file = self.temp / "input.json"
|
227
|
+
input_file.write_text(self.input_data)
|
228
|
+
tf_module = "tfmodule"
|
229
|
+
|
230
|
+
# delete previous ERv2 container
|
231
|
+
run(["docker", "rm", "-f", "erv2"], capture_output=True, check=True)
|
232
|
+
|
233
|
+
try:
|
234
|
+
with task(self.progress_spinner, "-- Running terraform"):
|
235
|
+
run(["docker", "pull", self.image], check=True)
|
236
|
+
run(
|
237
|
+
[
|
238
|
+
"docker",
|
239
|
+
"run",
|
240
|
+
"--name",
|
241
|
+
"erv2",
|
242
|
+
"-v",
|
243
|
+
f"{input_file!s}:/inputs/input.json:Z",
|
244
|
+
"-v",
|
245
|
+
f"{credentials!s}:/credentials:Z",
|
246
|
+
"-v",
|
247
|
+
f"{self.temp}:/work:Z",
|
248
|
+
"-e",
|
249
|
+
"AWS_SHARED_CREDENTIALS_FILE=/credentials",
|
250
|
+
"-e",
|
251
|
+
f"TERRAFORM_MODULE_WORK_DIR=/tmp/{tf_module}",
|
252
|
+
"-e",
|
253
|
+
"LOCAL_STATE=False",
|
254
|
+
self.image,
|
255
|
+
],
|
256
|
+
check=True,
|
257
|
+
capture_output=True,
|
258
|
+
)
|
259
|
+
|
260
|
+
with task(self.progress_spinner, "-- Copying the terraform module"):
|
261
|
+
run(
|
262
|
+
[
|
263
|
+
"docker",
|
264
|
+
"cp",
|
265
|
+
f"erv2:/tmp/{tf_module}",
|
266
|
+
str(self.temp),
|
267
|
+
],
|
268
|
+
check=True,
|
269
|
+
capture_output=True,
|
270
|
+
)
|
271
|
+
|
272
|
+
# move all assets to local workdir
|
273
|
+
src_dir = self.temp / tf_module
|
274
|
+
for item in src_dir.iterdir():
|
275
|
+
if item.name != ".terraform":
|
276
|
+
item.rename(self.temp / item.name)
|
277
|
+
|
278
|
+
except CalledProcessError as e:
|
279
|
+
if e.stderr:
|
280
|
+
print(e.stderr.decode("utf-8"))
|
281
|
+
if e.stdout:
|
282
|
+
print(e.stdout.decode("utf-8"))
|
283
|
+
raise
|
284
|
+
|
224
285
|
def enter_shell(self, credentials: Path) -> None:
|
225
286
|
"""Run the ERv2 container and enter the shell."""
|
226
287
|
input_file = self.temp / "input.json"
|
tools/qontract_cli.py
CHANGED
@@ -798,6 +798,11 @@ def ocm_addon_upgrade_policies(ctx: click.core.Context) -> None:
|
|
798
798
|
|
799
799
|
|
800
800
|
@get.command()
|
801
|
+
@click.option(
|
802
|
+
"--channel",
|
803
|
+
help="Specifies the channel that alerts stores",
|
804
|
+
type=str,
|
805
|
+
)
|
801
806
|
@click.option(
|
802
807
|
"--days",
|
803
808
|
help="Days to consider for the report. Cannot be used with timestamp options.",
|
@@ -816,13 +821,14 @@ def ocm_addon_upgrade_policies(ctx: click.core.Context) -> None:
|
|
816
821
|
type=int,
|
817
822
|
)
|
818
823
|
@click.pass_context
|
819
|
-
def
|
824
|
+
def alert_report(
|
820
825
|
ctx: click.core.Context,
|
826
|
+
channel: str | None,
|
821
827
|
days: int | None,
|
822
828
|
from_timestamp: int | None,
|
823
829
|
to_timestamp: int | None,
|
824
830
|
) -> None:
|
825
|
-
import tools.
|
831
|
+
import tools.alert_report as report
|
826
832
|
|
827
833
|
if days:
|
828
834
|
if from_timestamp or to_timestamp:
|
@@ -845,7 +851,9 @@ def sd_app_sre_alert_report(
|
|
845
851
|
sys.exit(1)
|
846
852
|
|
847
853
|
slack = slackapi_from_queries(
|
848
|
-
integration_name=report.QONTRACT_INTEGRATION,
|
854
|
+
integration_name=report.QONTRACT_INTEGRATION,
|
855
|
+
init_usergroups=False,
|
856
|
+
channel=channel,
|
849
857
|
)
|
850
858
|
alerts = report.group_alerts(
|
851
859
|
slack.get_flat_conversation_history(
|
@@ -4381,8 +4389,10 @@ def migrate(ctx: click.Context, dry_run: bool, skip_build: bool) -> None:
|
|
4381
4389
|
|
4382
4390
|
with task(progress, "(erv2) Building the terraform configuration"):
|
4383
4391
|
if not skip_build:
|
4384
|
-
|
4385
|
-
|
4392
|
+
if erv2cli.module_type == "cdktf":
|
4393
|
+
erv2cli.build_cdktf(credentials_file)
|
4394
|
+
else:
|
4395
|
+
erv2cli.build_terraform(credentials_file)
|
4386
4396
|
erv2_tf_cli = TerraformCli(
|
4387
4397
|
temp_erv2, dry_run=dry_run, progress_spinner=progress
|
4388
4398
|
)
|
reconcile/gcr_mirror.py
DELETED
@@ -1,278 +0,0 @@
|
|
1
|
-
import base64
|
2
|
-
import logging
|
3
|
-
import os
|
4
|
-
import re
|
5
|
-
import tempfile
|
6
|
-
import time
|
7
|
-
from collections import defaultdict
|
8
|
-
from typing import Any, Self
|
9
|
-
|
10
|
-
import requests
|
11
|
-
from sretoolbox.container import (
|
12
|
-
Image,
|
13
|
-
Skopeo,
|
14
|
-
)
|
15
|
-
from sretoolbox.container.image import ImageComparisonError
|
16
|
-
from sretoolbox.container.skopeo import SkopeoCmdError
|
17
|
-
|
18
|
-
from reconcile import queries
|
19
|
-
from reconcile.utils import gql
|
20
|
-
from reconcile.utils.secret_reader import SecretReader
|
21
|
-
|
22
|
-
_LOG = logging.getLogger(__name__)
|
23
|
-
|
24
|
-
QONTRACT_INTEGRATION = "gcr-mirror"
|
25
|
-
REQUEST_TIMEOUT = 60
|
26
|
-
|
27
|
-
|
28
|
-
class QuayMirror:
|
29
|
-
GCR_PROJECT_CATALOG_QUERY = """
|
30
|
-
{
|
31
|
-
projects: gcp_projects_v1 {
|
32
|
-
name
|
33
|
-
pushCredentials {
|
34
|
-
path
|
35
|
-
field
|
36
|
-
version
|
37
|
-
format
|
38
|
-
}
|
39
|
-
}
|
40
|
-
}
|
41
|
-
"""
|
42
|
-
|
43
|
-
GCR_REPOS_QUERY = """
|
44
|
-
{
|
45
|
-
apps: apps_v1 {
|
46
|
-
gcrRepos {
|
47
|
-
project {
|
48
|
-
name
|
49
|
-
}
|
50
|
-
items {
|
51
|
-
name
|
52
|
-
mirror {
|
53
|
-
url
|
54
|
-
pullCredentials {
|
55
|
-
path
|
56
|
-
field
|
57
|
-
version
|
58
|
-
format
|
59
|
-
}
|
60
|
-
tags
|
61
|
-
tagsExclude
|
62
|
-
}
|
63
|
-
}
|
64
|
-
}
|
65
|
-
}
|
66
|
-
}
|
67
|
-
"""
|
68
|
-
|
69
|
-
def __init__(self, dry_run: bool = False) -> None:
|
70
|
-
self.dry_run = dry_run
|
71
|
-
self.gqlapi = gql.get_api()
|
72
|
-
settings = queries.get_app_interface_settings()
|
73
|
-
self.secret_reader = SecretReader(settings=settings)
|
74
|
-
self.skopeo_cli = Skopeo(dry_run)
|
75
|
-
self.push_creds = self._get_push_creds()
|
76
|
-
self.session = requests.Session()
|
77
|
-
|
78
|
-
def __enter__(self) -> Self:
|
79
|
-
return self
|
80
|
-
|
81
|
-
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
|
82
|
-
self.session.close()
|
83
|
-
|
84
|
-
def run(self) -> None:
|
85
|
-
sync_tasks = self.process_sync_tasks()
|
86
|
-
for org, data in sync_tasks.items():
|
87
|
-
for item in data:
|
88
|
-
try:
|
89
|
-
self.skopeo_cli.copy(
|
90
|
-
src_image=item["mirror_url"],
|
91
|
-
src_creds=item["mirror_creds"],
|
92
|
-
dst_image=item["image_url"],
|
93
|
-
dest_creds=self.push_creds[org],
|
94
|
-
)
|
95
|
-
except SkopeoCmdError as details:
|
96
|
-
_LOG.error("[%s]", details)
|
97
|
-
|
98
|
-
def process_repos_query(self) -> dict[str, list[dict[str, Any]]]:
|
99
|
-
result = self.gqlapi.query(self.GCR_REPOS_QUERY)
|
100
|
-
|
101
|
-
summary = defaultdict(list)
|
102
|
-
|
103
|
-
for app in result["apps"]:
|
104
|
-
gcr_repos = app.get("gcrRepos")
|
105
|
-
|
106
|
-
if gcr_repos is None:
|
107
|
-
continue
|
108
|
-
|
109
|
-
for gcr_repo in gcr_repos:
|
110
|
-
project = gcr_repo["project"]["name"]
|
111
|
-
server_url = gcr_repo["project"].get("serverUrl") or "gcr.io"
|
112
|
-
for item in gcr_repo["items"]:
|
113
|
-
if item["mirror"] is None:
|
114
|
-
continue
|
115
|
-
|
116
|
-
summary[project].append({
|
117
|
-
"name": item["name"],
|
118
|
-
"mirror": item["mirror"],
|
119
|
-
"server_url": server_url,
|
120
|
-
})
|
121
|
-
|
122
|
-
return summary
|
123
|
-
|
124
|
-
@staticmethod
|
125
|
-
def sync_tag(
|
126
|
-
tags: list[str] | None, tags_exclude: list[str] | None, candidate: str
|
127
|
-
) -> bool:
|
128
|
-
if tags is not None:
|
129
|
-
# When tags is defined, we don't look at tags_exclude
|
130
|
-
return any(re.match(tag, candidate) for tag in tags)
|
131
|
-
|
132
|
-
if tags_exclude is not None:
|
133
|
-
for tag_exclude in tags_exclude:
|
134
|
-
if re.match(tag_exclude, candidate):
|
135
|
-
return False
|
136
|
-
return True
|
137
|
-
|
138
|
-
# Both tags and tags_exclude are None, so
|
139
|
-
# tag must be synced
|
140
|
-
return True
|
141
|
-
|
142
|
-
def process_sync_tasks(self) -> dict[str, list[dict[str, Any]]]:
|
143
|
-
eight_hours = 28800 # 60 * 60 * 8
|
144
|
-
is_deep_sync = self._is_deep_sync(interval=eight_hours)
|
145
|
-
|
146
|
-
summary = self.process_repos_query()
|
147
|
-
|
148
|
-
sync_tasks = defaultdict(list)
|
149
|
-
for org, data in summary.items():
|
150
|
-
for item in data:
|
151
|
-
image = Image(
|
152
|
-
f"{item['server_url']}/{org}/{item['name']}",
|
153
|
-
session=self.session,
|
154
|
-
timeout=REQUEST_TIMEOUT,
|
155
|
-
)
|
156
|
-
|
157
|
-
mirror_url = item["mirror"]["url"]
|
158
|
-
|
159
|
-
username = None
|
160
|
-
password = None
|
161
|
-
mirror_creds = None
|
162
|
-
if item["mirror"]["pullCredentials"] is not None:
|
163
|
-
pull_credentials = item["mirror"]["pullCredentials"]
|
164
|
-
raw_data = self.secret_reader.read_all(pull_credentials)
|
165
|
-
username = raw_data["user"]
|
166
|
-
password = raw_data["token"]
|
167
|
-
mirror_creds = f"{username}:{password}"
|
168
|
-
|
169
|
-
image_mirror = Image(
|
170
|
-
mirror_url,
|
171
|
-
username=username,
|
172
|
-
password=password,
|
173
|
-
session=self.session,
|
174
|
-
timeout=REQUEST_TIMEOUT,
|
175
|
-
)
|
176
|
-
|
177
|
-
tags = item["mirror"].get("tags")
|
178
|
-
tags_exclude = item["mirror"].get("tagsExclude")
|
179
|
-
|
180
|
-
for tag in image_mirror:
|
181
|
-
if not self.sync_tag(
|
182
|
-
tags=tags, tags_exclude=tags_exclude, candidate=tag
|
183
|
-
):
|
184
|
-
continue
|
185
|
-
|
186
|
-
upstream = image_mirror[tag]
|
187
|
-
downstream = image[tag]
|
188
|
-
if tag not in image:
|
189
|
-
_LOG.debug(
|
190
|
-
"Image %s and mirror %s are out off sync",
|
191
|
-
downstream,
|
192
|
-
upstream,
|
193
|
-
)
|
194
|
-
sync_tasks[org].append({
|
195
|
-
"mirror_url": str(upstream),
|
196
|
-
"mirror_creds": mirror_creds,
|
197
|
-
"image_url": str(downstream),
|
198
|
-
})
|
199
|
-
continue
|
200
|
-
|
201
|
-
# Deep (slow) check only in non dry-run mode
|
202
|
-
if self.dry_run:
|
203
|
-
_LOG.debug(
|
204
|
-
"Image %s and mirror %s are in sync", downstream, upstream
|
205
|
-
)
|
206
|
-
continue
|
207
|
-
|
208
|
-
# Deep (slow) check only from time to time
|
209
|
-
if not is_deep_sync:
|
210
|
-
_LOG.debug(
|
211
|
-
"Image %s and mirror %s are in sync", downstream, upstream
|
212
|
-
)
|
213
|
-
continue
|
214
|
-
|
215
|
-
try:
|
216
|
-
if downstream == upstream:
|
217
|
-
_LOG.debug(
|
218
|
-
"Image %s and mirror %s are in sync",
|
219
|
-
downstream,
|
220
|
-
upstream,
|
221
|
-
)
|
222
|
-
continue
|
223
|
-
except ImageComparisonError as details:
|
224
|
-
_LOG.error("[%s]", details)
|
225
|
-
continue
|
226
|
-
|
227
|
-
_LOG.debug(
|
228
|
-
"Image %s and mirror %s are out of sync", downstream, upstream
|
229
|
-
)
|
230
|
-
sync_tasks[org].append({
|
231
|
-
"mirror_url": str(upstream),
|
232
|
-
"mirror_creds": mirror_creds,
|
233
|
-
"image_url": str(downstream),
|
234
|
-
})
|
235
|
-
|
236
|
-
return sync_tasks
|
237
|
-
|
238
|
-
def _is_deep_sync(self, interval: int) -> bool:
|
239
|
-
control_file_name = "qontract-reconcile-gcr-mirror.timestamp"
|
240
|
-
control_file_path = os.path.join(tempfile.gettempdir(), control_file_name)
|
241
|
-
try:
|
242
|
-
with open(control_file_path, encoding="locale") as file_obj:
|
243
|
-
last_deep_sync = float(file_obj.read())
|
244
|
-
except FileNotFoundError:
|
245
|
-
self._record_timestamp(control_file_path)
|
246
|
-
return True
|
247
|
-
|
248
|
-
next_deep_sync = last_deep_sync + interval
|
249
|
-
if time.time() >= next_deep_sync:
|
250
|
-
self._record_timestamp(control_file_path)
|
251
|
-
return True
|
252
|
-
|
253
|
-
return False
|
254
|
-
|
255
|
-
@staticmethod
|
256
|
-
def _record_timestamp(path: str) -> None:
|
257
|
-
with open(path, "w", encoding="locale") as file_object:
|
258
|
-
file_object.write(str(time.time()))
|
259
|
-
|
260
|
-
def _get_push_creds(self) -> dict[str, str]:
|
261
|
-
result = self.gqlapi.query(self.GCR_PROJECT_CATALOG_QUERY)
|
262
|
-
|
263
|
-
creds = {}
|
264
|
-
for project_data in result["projects"]:
|
265
|
-
push_secret = project_data["pushCredentials"]
|
266
|
-
if push_secret is None:
|
267
|
-
continue
|
268
|
-
|
269
|
-
raw_data = self.secret_reader.read_all(push_secret)
|
270
|
-
project = project_data["name"]
|
271
|
-
token = base64.b64decode(raw_data["token"]).decode()
|
272
|
-
creds[project] = f"{raw_data['user']}:{token}"
|
273
|
-
return creds
|
274
|
-
|
275
|
-
|
276
|
-
def run(dry_run: bool) -> None:
|
277
|
-
with QuayMirror(dry_run) as gcr_mirror:
|
278
|
-
gcr_mirror.run()
|
{qontract_reconcile-0.10.2.dev160.dist-info → qontract_reconcile-0.10.2.dev173.dist-info}/WHEEL
RENAMED
File without changes
|
File without changes
|