qontract-reconcile 0.10.2.dev160__py3-none-any.whl → 0.10.2.dev173__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {qontract_reconcile-0.10.2.dev160.dist-info → qontract_reconcile-0.10.2.dev173.dist-info}/METADATA +2 -2
- {qontract_reconcile-0.10.2.dev160.dist-info → qontract_reconcile-0.10.2.dev173.dist-info}/RECORD +30 -23
- reconcile/acs_rbac.py +1 -0
- reconcile/cli.py +4 -6
- reconcile/dashdotdb_slo.py +45 -156
- reconcile/gcp_image_mirror.py +252 -0
- reconcile/gitlab_housekeeping.py +1 -1
- reconcile/gql_definitions/common/saas_files.py +49 -0
- reconcile/gql_definitions/dashdotdb_slo/slo_documents_query.py +15 -67
- reconcile/gql_definitions/fragments/container_image_mirror.py +33 -0
- reconcile/gql_definitions/fragments/saas_slo_document.py +82 -0
- reconcile/gql_definitions/gcp/__init__.py +0 -0
- reconcile/gql_definitions/gcp/gcp_docker_repos.py +128 -0
- reconcile/gql_definitions/gcp/gcp_projects.py +77 -0
- reconcile/gql_definitions/introspection.json +380 -230
- reconcile/quay_mirror.py +3 -42
- reconcile/quay_mirror_org.py +3 -2
- reconcile/slack_base.py +2 -2
- reconcile/utils/dynatrace/client.py +0 -31
- reconcile/utils/quay_mirror.py +42 -0
- reconcile/utils/saasherder/interfaces.py +2 -0
- reconcile/utils/saasherder/saasherder.py +5 -0
- reconcile/utils/slack_api.py +3 -1
- reconcile/utils/slo_document_manager.py +278 -0
- reconcile/utils/terrascript_aws_client.py +57 -0
- tools/{sd_app_sre_alert_report.py → alert_report.py} +1 -1
- tools/cli_commands/erv2.py +61 -0
- tools/qontract_cli.py +15 -5
- reconcile/gcr_mirror.py +0 -278
- {qontract_reconcile-0.10.2.dev160.dist-info → qontract_reconcile-0.10.2.dev173.dist-info}/WHEEL +0 -0
- {qontract_reconcile-0.10.2.dev160.dist-info → qontract_reconcile-0.10.2.dev173.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,252 @@
|
|
1
|
+
import base64
|
2
|
+
import logging
|
3
|
+
import os
|
4
|
+
import tempfile
|
5
|
+
import time
|
6
|
+
from typing import Any, Self
|
7
|
+
|
8
|
+
import requests
|
9
|
+
from pydantic import BaseModel
|
10
|
+
from sretoolbox.container import (
|
11
|
+
Image,
|
12
|
+
Skopeo,
|
13
|
+
)
|
14
|
+
from sretoolbox.container.image import ImageComparisonError
|
15
|
+
from sretoolbox.container.skopeo import SkopeoCmdError
|
16
|
+
|
17
|
+
import reconcile.gql_definitions.gcp.gcp_docker_repos as gql_gcp_repos
|
18
|
+
import reconcile.gql_definitions.gcp.gcp_projects as gql_gcp_projects
|
19
|
+
from reconcile import queries
|
20
|
+
from reconcile.gql_definitions.fragments.container_image_mirror import (
|
21
|
+
ContainerImageMirror,
|
22
|
+
)
|
23
|
+
from reconcile.gql_definitions.fragments.vault_secret import VaultSecret
|
24
|
+
from reconcile.utils import gql
|
25
|
+
from reconcile.utils.quay_mirror import record_timestamp, sync_tag
|
26
|
+
from reconcile.utils.secret_reader import SecretReader
|
27
|
+
|
28
|
+
QONTRACT_INTEGRATION = "gcp-image-mirror"
|
29
|
+
REQUEST_TIMEOUT = 60
|
30
|
+
GCR_SECRET_PREFIX = "gcr_"
|
31
|
+
AR_SECRET_PREFIX = "ar_"
|
32
|
+
|
33
|
+
|
34
|
+
class ImageSyncItem(BaseModel):
|
35
|
+
mirror: ContainerImageMirror
|
36
|
+
destination_url: str
|
37
|
+
org_name: str
|
38
|
+
|
39
|
+
|
40
|
+
class SyncTask(BaseModel):
|
41
|
+
mirror_creds: str | None = None
|
42
|
+
source_url: str
|
43
|
+
dest_url: str
|
44
|
+
org_name: str
|
45
|
+
|
46
|
+
|
47
|
+
class QuayMirror:
|
48
|
+
def __init__(self, dry_run: bool = False) -> None:
|
49
|
+
self.dry_run = dry_run
|
50
|
+
self.gqlapi = gql.get_api()
|
51
|
+
settings = queries.get_app_interface_settings()
|
52
|
+
self.secret_reader = SecretReader(settings=settings)
|
53
|
+
self.skopeo_cli = Skopeo(dry_run)
|
54
|
+
self.push_creds = self._get_push_creds()
|
55
|
+
self.session = requests.Session()
|
56
|
+
|
57
|
+
def __enter__(self) -> Self:
|
58
|
+
return self
|
59
|
+
|
60
|
+
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
|
61
|
+
self.session.close()
|
62
|
+
|
63
|
+
def run(self) -> None:
|
64
|
+
gql_result = gql_gcp_repos.query(query_func=self.gqlapi.query)
|
65
|
+
processed_repos = self.process_repos_to_sync(gql_result)
|
66
|
+
sync_tasks = self.process_sync_tasks(processed_repos)
|
67
|
+
|
68
|
+
for task in sync_tasks:
|
69
|
+
try:
|
70
|
+
dest_creds = self.push_creds[f"{GCR_SECRET_PREFIX}{task.org_name}"]
|
71
|
+
if "pkg.dev" in task.dest_url:
|
72
|
+
dest_creds = self.push_creds[f"{AR_SECRET_PREFIX}{task.org_name}"]
|
73
|
+
|
74
|
+
self.skopeo_cli.copy(
|
75
|
+
src_image=task.source_url,
|
76
|
+
src_creds=task.mirror_creds,
|
77
|
+
dst_image=task.dest_url,
|
78
|
+
dest_creds=dest_creds,
|
79
|
+
)
|
80
|
+
except SkopeoCmdError as details:
|
81
|
+
logging.error("[%s]", details)
|
82
|
+
|
83
|
+
# processes the GQL repos to come up with a list of items that need to be synced
|
84
|
+
def process_repos_to_sync(
|
85
|
+
self, repos: gql_gcp_repos.GcpDockerReposQueryData
|
86
|
+
) -> list[ImageSyncItem]:
|
87
|
+
summary = list[ImageSyncItem]()
|
88
|
+
if repos.apps:
|
89
|
+
for app in repos.apps:
|
90
|
+
if app.gcr_repos:
|
91
|
+
for gcr_project in app.gcr_repos:
|
92
|
+
for gcr_repo in gcr_project.items:
|
93
|
+
if gcr_repo.mirror:
|
94
|
+
project_name = gcr_project.project.name
|
95
|
+
summary.append(
|
96
|
+
ImageSyncItem(
|
97
|
+
mirror=gcr_repo.mirror,
|
98
|
+
destination_url=f"gcr.io/{project_name}/{gcr_repo.name}",
|
99
|
+
org_name=project_name,
|
100
|
+
)
|
101
|
+
)
|
102
|
+
if app.artifact_registry_mirrors:
|
103
|
+
for ar_project in app.artifact_registry_mirrors:
|
104
|
+
for ar_repo in ar_project.items:
|
105
|
+
summary.append(
|
106
|
+
ImageSyncItem(
|
107
|
+
mirror=ar_repo.mirror,
|
108
|
+
destination_url=ar_repo.image_url,
|
109
|
+
org_name=ar_project.project.name,
|
110
|
+
)
|
111
|
+
)
|
112
|
+
|
113
|
+
return summary
|
114
|
+
|
115
|
+
# second layer of processing that matches up pull/push creds with each repo and determines what tags need to be synced
|
116
|
+
def process_sync_tasks(self, repos_to_sync: list[ImageSyncItem]) -> list[SyncTask]:
|
117
|
+
eight_hours = 28800 # 60 * 60 * 8
|
118
|
+
is_deep_sync = self._is_deep_sync(interval=eight_hours)
|
119
|
+
|
120
|
+
sync_tasks = list[SyncTask]()
|
121
|
+
for item in repos_to_sync:
|
122
|
+
image = Image(
|
123
|
+
f"{item.destination_url}",
|
124
|
+
session=self.session,
|
125
|
+
timeout=REQUEST_TIMEOUT,
|
126
|
+
)
|
127
|
+
|
128
|
+
mirror_url = item.mirror.url
|
129
|
+
|
130
|
+
username = None
|
131
|
+
password = None
|
132
|
+
mirror_creds = None
|
133
|
+
pull_credentials = item.mirror.pull_credentials
|
134
|
+
if pull_credentials:
|
135
|
+
raw_data = self.secret_reader.read_all(pull_credentials.dict())
|
136
|
+
username = raw_data["user"]
|
137
|
+
password = raw_data["token"]
|
138
|
+
mirror_creds = f"{username}:{password}"
|
139
|
+
|
140
|
+
image_mirror = Image(
|
141
|
+
mirror_url,
|
142
|
+
username=username,
|
143
|
+
password=password,
|
144
|
+
session=self.session,
|
145
|
+
timeout=REQUEST_TIMEOUT,
|
146
|
+
)
|
147
|
+
|
148
|
+
for tag in image_mirror:
|
149
|
+
if not sync_tag(
|
150
|
+
tags=item.mirror.tags,
|
151
|
+
tags_exclude=item.mirror.tags_exclude,
|
152
|
+
candidate=tag,
|
153
|
+
):
|
154
|
+
continue
|
155
|
+
|
156
|
+
# the Image class allows you to fetch Image information at a specific tag with a get operator
|
157
|
+
upstream = image_mirror[tag]
|
158
|
+
downstream = image[tag]
|
159
|
+
if tag not in image:
|
160
|
+
logging.debug(
|
161
|
+
f"Image {image.image}: {downstream} and mirror {upstream} are out of sync"
|
162
|
+
)
|
163
|
+
sync_tasks.append(
|
164
|
+
SyncTask(
|
165
|
+
source_url=str(upstream),
|
166
|
+
mirror_creds=mirror_creds,
|
167
|
+
dest_url=str(downstream),
|
168
|
+
org_name=item.org_name,
|
169
|
+
)
|
170
|
+
)
|
171
|
+
continue
|
172
|
+
|
173
|
+
# Deep (slow) check only in non dry-run mode
|
174
|
+
if self.dry_run:
|
175
|
+
logging.debug(
|
176
|
+
f"Image {image.image}: {downstream} and mirror {upstream} are in sync"
|
177
|
+
)
|
178
|
+
continue
|
179
|
+
|
180
|
+
# Deep (slow) check only from time to time
|
181
|
+
if not is_deep_sync:
|
182
|
+
logging.debug(
|
183
|
+
f"Image {image.image}: {downstream} and mirror {upstream} are in sync"
|
184
|
+
)
|
185
|
+
continue
|
186
|
+
|
187
|
+
try:
|
188
|
+
if downstream == upstream:
|
189
|
+
logging.debug(
|
190
|
+
f"Image {image.image}: {downstream} and mirror {upstream} are in sync",
|
191
|
+
)
|
192
|
+
continue
|
193
|
+
except ImageComparisonError as details:
|
194
|
+
logging.error("[%s]", details)
|
195
|
+
continue
|
196
|
+
|
197
|
+
logging.debug(
|
198
|
+
f"Image {image.image}: {downstream} and mirror {upstream} are out of sync"
|
199
|
+
)
|
200
|
+
sync_tasks.append(
|
201
|
+
SyncTask(
|
202
|
+
source_url=str(upstream),
|
203
|
+
mirror_creds=mirror_creds,
|
204
|
+
dest_url=str(downstream),
|
205
|
+
org_name=item.org_name,
|
206
|
+
)
|
207
|
+
)
|
208
|
+
|
209
|
+
return sync_tasks
|
210
|
+
|
211
|
+
def _is_deep_sync(self, interval: int) -> bool:
|
212
|
+
control_file_name = "qontract-reconcile-gcp-image-mirror.timestamp"
|
213
|
+
control_file_path = os.path.join(tempfile.gettempdir(), control_file_name)
|
214
|
+
try:
|
215
|
+
with open(control_file_path, encoding="locale") as file_obj:
|
216
|
+
last_deep_sync = float(file_obj.read())
|
217
|
+
except FileNotFoundError:
|
218
|
+
record_timestamp(control_file_path)
|
219
|
+
return True
|
220
|
+
|
221
|
+
next_deep_sync = last_deep_sync + interval
|
222
|
+
if time.time() >= next_deep_sync:
|
223
|
+
record_timestamp(control_file_path)
|
224
|
+
return True
|
225
|
+
|
226
|
+
return False
|
227
|
+
|
228
|
+
def _decode_push_secret(self, secret: VaultSecret) -> str:
|
229
|
+
raw_data = self.secret_reader.read_all(secret.dict())
|
230
|
+
token = base64.b64decode(raw_data["token"]).decode()
|
231
|
+
return f"{raw_data['user']}:{token}"
|
232
|
+
|
233
|
+
def _get_push_creds(self) -> dict[str, str]:
|
234
|
+
result = gql_gcp_projects.query(query_func=self.gqlapi.query)
|
235
|
+
|
236
|
+
creds = dict[str, str]()
|
237
|
+
if result.gcp_projects:
|
238
|
+
for project_data in result.gcp_projects:
|
239
|
+
# support old pull secret for backwards compatibility (although they are both using artifact registry on the backend)
|
240
|
+
if project_data.gcr_push_credentials:
|
241
|
+
creds[f"{GCR_SECRET_PREFIX}{project_data.name}"] = (
|
242
|
+
self._decode_push_secret(project_data.gcr_push_credentials)
|
243
|
+
)
|
244
|
+
creds[f"{AR_SECRET_PREFIX}{project_data.name}"] = (
|
245
|
+
self._decode_push_secret(project_data.artifact_push_credentials)
|
246
|
+
)
|
247
|
+
return creds
|
248
|
+
|
249
|
+
|
250
|
+
def run(dry_run: bool) -> None:
|
251
|
+
with QuayMirror(dry_run) as gcp_image_mirror:
|
252
|
+
gcp_image_mirror.run()
|
reconcile/gitlab_housekeeping.py
CHANGED
@@ -229,7 +229,7 @@ def verify_on_demand_tests(
|
|
229
229
|
commit.id,
|
230
230
|
])
|
231
231
|
if not dry_run and state_change:
|
232
|
-
markdown_report = f"On-demand Tests: \n\n All necessary tests have
|
232
|
+
markdown_report = f"On-demand Tests: \n\n All necessary tests have passed for latest [commit]({commit.web_url})\n"
|
233
233
|
gl.delete_merge_request_comments(mr, startswith="On-demand Tests:")
|
234
234
|
gl.add_comment_to_merge_request(mr, markdown_report)
|
235
235
|
state.add(state_key, remaining_tests, force=True)
|
@@ -18,6 +18,7 @@ from pydantic import ( # noqa: F401 # pylint: disable=W0611
|
|
18
18
|
)
|
19
19
|
|
20
20
|
from reconcile.gql_definitions.fragments.oc_connection_cluster import OcConnectionCluster
|
21
|
+
from reconcile.gql_definitions.fragments.saas_slo_document import SLODocument
|
21
22
|
from reconcile.gql_definitions.fragments.saas_target_namespace import SaasTargetNamespace
|
22
23
|
from reconcile.gql_definitions.fragments.vault_secret import VaultSecret
|
23
24
|
|
@@ -53,6 +54,50 @@ fragment OcConnectionCluster on Cluster_v1 {
|
|
53
54
|
}
|
54
55
|
}
|
55
56
|
|
57
|
+
fragment SLODocument on SLODocument_v1 {
|
58
|
+
name
|
59
|
+
namespaces {
|
60
|
+
prometheusAccess {
|
61
|
+
url
|
62
|
+
username {
|
63
|
+
... VaultSecret
|
64
|
+
}
|
65
|
+
password {
|
66
|
+
... VaultSecret
|
67
|
+
}
|
68
|
+
}
|
69
|
+
namespace {
|
70
|
+
name
|
71
|
+
app {
|
72
|
+
name
|
73
|
+
}
|
74
|
+
cluster {
|
75
|
+
name
|
76
|
+
automationToken {
|
77
|
+
... VaultSecret
|
78
|
+
}
|
79
|
+
prometheusUrl
|
80
|
+
spec {
|
81
|
+
private
|
82
|
+
}
|
83
|
+
}
|
84
|
+
}
|
85
|
+
SLONamespace {
|
86
|
+
name
|
87
|
+
}
|
88
|
+
}
|
89
|
+
slos {
|
90
|
+
name
|
91
|
+
expr
|
92
|
+
SLIType
|
93
|
+
SLOParameters {
|
94
|
+
window
|
95
|
+
}
|
96
|
+
SLOTarget
|
97
|
+
SLOTargetUnit
|
98
|
+
}
|
99
|
+
}
|
100
|
+
|
56
101
|
fragment SaasTargetNamespace on Namespace_v1 {
|
57
102
|
name
|
58
103
|
labels
|
@@ -253,6 +298,9 @@ query SaasFiles {
|
|
253
298
|
namespace {
|
254
299
|
...SaasTargetNamespace
|
255
300
|
}
|
301
|
+
slos {
|
302
|
+
...SLODocument
|
303
|
+
}
|
256
304
|
namespaceSelector {
|
257
305
|
jsonPathSelectors {
|
258
306
|
include
|
@@ -512,6 +560,7 @@ class SaasResourceTemplateTargetV2(ConfiguredBaseModel):
|
|
512
560
|
path: Optional[str] = Field(..., alias="path")
|
513
561
|
name: Optional[str] = Field(..., alias="name")
|
514
562
|
namespace: Optional[SaasTargetNamespace] = Field(..., alias="namespace")
|
563
|
+
slos: Optional[list[SLODocument]] = Field(..., alias="slos")
|
515
564
|
namespace_selector: Optional[SaasResourceTemplateTargetNamespaceSelectorV1] = Field(..., alias="namespaceSelector")
|
516
565
|
provider: Optional[str] = Field(..., alias="provider")
|
517
566
|
ref: str = Field(..., alias="ref")
|
@@ -17,19 +17,11 @@ from pydantic import ( # noqa: F401 # pylint: disable=W0611
|
|
17
17
|
Json,
|
18
18
|
)
|
19
19
|
|
20
|
-
from reconcile.gql_definitions.fragments.
|
20
|
+
from reconcile.gql_definitions.fragments.saas_slo_document import SLODocument
|
21
21
|
|
22
22
|
|
23
23
|
DEFINITION = """
|
24
|
-
fragment
|
25
|
-
path
|
26
|
-
field
|
27
|
-
version
|
28
|
-
format
|
29
|
-
}
|
30
|
-
|
31
|
-
query SLODocuments {
|
32
|
-
slo_documents: slo_document_v1 {
|
24
|
+
fragment SLODocument on SLODocument_v1 {
|
33
25
|
name
|
34
26
|
namespaces {
|
35
27
|
prometheusAccess {
|
@@ -71,6 +63,18 @@ query SLODocuments {
|
|
71
63
|
SLOTarget
|
72
64
|
SLOTargetUnit
|
73
65
|
}
|
66
|
+
}
|
67
|
+
|
68
|
+
fragment VaultSecret on VaultSecret_v1 {
|
69
|
+
path
|
70
|
+
field
|
71
|
+
version
|
72
|
+
format
|
73
|
+
}
|
74
|
+
|
75
|
+
query SLODocuments {
|
76
|
+
slo_documents: slo_document_v1 {
|
77
|
+
... SLODocument
|
74
78
|
}
|
75
79
|
}
|
76
80
|
"""
|
@@ -82,64 +86,8 @@ class ConfiguredBaseModel(BaseModel):
|
|
82
86
|
extra=Extra.forbid
|
83
87
|
|
84
88
|
|
85
|
-
class SLOExternalPrometheusAccessV1(ConfiguredBaseModel):
|
86
|
-
url: str = Field(..., alias="url")
|
87
|
-
username: Optional[VaultSecret] = Field(..., alias="username")
|
88
|
-
password: Optional[VaultSecret] = Field(..., alias="password")
|
89
|
-
|
90
|
-
|
91
|
-
class AppV1(ConfiguredBaseModel):
|
92
|
-
name: str = Field(..., alias="name")
|
93
|
-
|
94
|
-
|
95
|
-
class ClusterSpecV1(ConfiguredBaseModel):
|
96
|
-
private: bool = Field(..., alias="private")
|
97
|
-
|
98
|
-
|
99
|
-
class ClusterV1(ConfiguredBaseModel):
|
100
|
-
name: str = Field(..., alias="name")
|
101
|
-
automation_token: Optional[VaultSecret] = Field(..., alias="automationToken")
|
102
|
-
prometheus_url: str = Field(..., alias="prometheusUrl")
|
103
|
-
spec: Optional[ClusterSpecV1] = Field(..., alias="spec")
|
104
|
-
|
105
|
-
|
106
|
-
class NamespaceV1(ConfiguredBaseModel):
|
107
|
-
name: str = Field(..., alias="name")
|
108
|
-
app: AppV1 = Field(..., alias="app")
|
109
|
-
cluster: ClusterV1 = Field(..., alias="cluster")
|
110
|
-
|
111
|
-
|
112
|
-
class SLONamespacesV1_NamespaceV1(ConfiguredBaseModel):
|
113
|
-
name: str = Field(..., alias="name")
|
114
|
-
|
115
|
-
|
116
|
-
class SLONamespacesV1(ConfiguredBaseModel):
|
117
|
-
prometheus_access: Optional[SLOExternalPrometheusAccessV1] = Field(..., alias="prometheusAccess")
|
118
|
-
namespace: NamespaceV1 = Field(..., alias="namespace")
|
119
|
-
slo_namespace: Optional[SLONamespacesV1_NamespaceV1] = Field(..., alias="SLONamespace")
|
120
|
-
|
121
|
-
|
122
|
-
class SLODocumentSLOSLOParametersV1(ConfiguredBaseModel):
|
123
|
-
window: str = Field(..., alias="window")
|
124
|
-
|
125
|
-
|
126
|
-
class SLODocumentSLOV1(ConfiguredBaseModel):
|
127
|
-
name: str = Field(..., alias="name")
|
128
|
-
expr: str = Field(..., alias="expr")
|
129
|
-
sli_type: str = Field(..., alias="SLIType")
|
130
|
-
slo_parameters: SLODocumentSLOSLOParametersV1 = Field(..., alias="SLOParameters")
|
131
|
-
slo_target: float = Field(..., alias="SLOTarget")
|
132
|
-
slo_target_unit: str = Field(..., alias="SLOTargetUnit")
|
133
|
-
|
134
|
-
|
135
|
-
class SLODocumentV1(ConfiguredBaseModel):
|
136
|
-
name: str = Field(..., alias="name")
|
137
|
-
namespaces: list[SLONamespacesV1] = Field(..., alias="namespaces")
|
138
|
-
slos: Optional[list[SLODocumentSLOV1]] = Field(..., alias="slos")
|
139
|
-
|
140
|
-
|
141
89
|
class SLODocumentsQueryData(ConfiguredBaseModel):
|
142
|
-
slo_documents: Optional[list[
|
90
|
+
slo_documents: Optional[list[SLODocument]] = Field(..., alias="slo_documents")
|
143
91
|
|
144
92
|
|
145
93
|
def query(query_func: Callable, **kwargs: Any) -> SLODocumentsQueryData:
|
@@ -0,0 +1,33 @@
|
|
1
|
+
"""
|
2
|
+
Generated by qenerate plugin=pydantic_v1. DO NOT MODIFY MANUALLY!
|
3
|
+
"""
|
4
|
+
from collections.abc import Callable # noqa: F401 # pylint: disable=W0611
|
5
|
+
from datetime import datetime # noqa: F401 # pylint: disable=W0611
|
6
|
+
from enum import Enum # noqa: F401 # pylint: disable=W0611
|
7
|
+
from typing import ( # noqa: F401 # pylint: disable=W0611
|
8
|
+
Any,
|
9
|
+
Optional,
|
10
|
+
Union,
|
11
|
+
)
|
12
|
+
|
13
|
+
from pydantic import ( # noqa: F401 # pylint: disable=W0611
|
14
|
+
BaseModel,
|
15
|
+
Extra,
|
16
|
+
Field,
|
17
|
+
Json,
|
18
|
+
)
|
19
|
+
|
20
|
+
from reconcile.gql_definitions.fragments.vault_secret import VaultSecret
|
21
|
+
|
22
|
+
|
23
|
+
class ConfiguredBaseModel(BaseModel):
|
24
|
+
class Config:
|
25
|
+
smart_union=True
|
26
|
+
extra=Extra.forbid
|
27
|
+
|
28
|
+
|
29
|
+
class ContainerImageMirror(ConfiguredBaseModel):
|
30
|
+
url: str = Field(..., alias="url")
|
31
|
+
pull_credentials: Optional[VaultSecret] = Field(..., alias="pullCredentials")
|
32
|
+
tags: Optional[list[str]] = Field(..., alias="tags")
|
33
|
+
tags_exclude: Optional[list[str]] = Field(..., alias="tagsExclude")
|
@@ -0,0 +1,82 @@
|
|
1
|
+
"""
|
2
|
+
Generated by qenerate plugin=pydantic_v1. DO NOT MODIFY MANUALLY!
|
3
|
+
"""
|
4
|
+
from collections.abc import Callable # noqa: F401 # pylint: disable=W0611
|
5
|
+
from datetime import datetime # noqa: F401 # pylint: disable=W0611
|
6
|
+
from enum import Enum # noqa: F401 # pylint: disable=W0611
|
7
|
+
from typing import ( # noqa: F401 # pylint: disable=W0611
|
8
|
+
Any,
|
9
|
+
Optional,
|
10
|
+
Union,
|
11
|
+
)
|
12
|
+
|
13
|
+
from pydantic import ( # noqa: F401 # pylint: disable=W0611
|
14
|
+
BaseModel,
|
15
|
+
Extra,
|
16
|
+
Field,
|
17
|
+
Json,
|
18
|
+
)
|
19
|
+
|
20
|
+
from reconcile.gql_definitions.fragments.vault_secret import VaultSecret
|
21
|
+
|
22
|
+
|
23
|
+
class ConfiguredBaseModel(BaseModel):
|
24
|
+
class Config:
|
25
|
+
smart_union=True
|
26
|
+
extra=Extra.forbid
|
27
|
+
|
28
|
+
|
29
|
+
class SLOExternalPrometheusAccessV1(ConfiguredBaseModel):
|
30
|
+
url: str = Field(..., alias="url")
|
31
|
+
username: Optional[VaultSecret] = Field(..., alias="username")
|
32
|
+
password: Optional[VaultSecret] = Field(..., alias="password")
|
33
|
+
|
34
|
+
|
35
|
+
class AppV1(ConfiguredBaseModel):
|
36
|
+
name: str = Field(..., alias="name")
|
37
|
+
|
38
|
+
|
39
|
+
class ClusterSpecV1(ConfiguredBaseModel):
|
40
|
+
private: bool = Field(..., alias="private")
|
41
|
+
|
42
|
+
|
43
|
+
class ClusterV1(ConfiguredBaseModel):
|
44
|
+
name: str = Field(..., alias="name")
|
45
|
+
automation_token: Optional[VaultSecret] = Field(..., alias="automationToken")
|
46
|
+
prometheus_url: str = Field(..., alias="prometheusUrl")
|
47
|
+
spec: Optional[ClusterSpecV1] = Field(..., alias="spec")
|
48
|
+
|
49
|
+
|
50
|
+
class NamespaceV1(ConfiguredBaseModel):
|
51
|
+
name: str = Field(..., alias="name")
|
52
|
+
app: AppV1 = Field(..., alias="app")
|
53
|
+
cluster: ClusterV1 = Field(..., alias="cluster")
|
54
|
+
|
55
|
+
|
56
|
+
class SLONamespacesV1_NamespaceV1(ConfiguredBaseModel):
|
57
|
+
name: str = Field(..., alias="name")
|
58
|
+
|
59
|
+
|
60
|
+
class SLONamespacesV1(ConfiguredBaseModel):
|
61
|
+
prometheus_access: Optional[SLOExternalPrometheusAccessV1] = Field(..., alias="prometheusAccess")
|
62
|
+
namespace: NamespaceV1 = Field(..., alias="namespace")
|
63
|
+
slo_namespace: Optional[SLONamespacesV1_NamespaceV1] = Field(..., alias="SLONamespace")
|
64
|
+
|
65
|
+
|
66
|
+
class SLODocumentSLOSLOParametersV1(ConfiguredBaseModel):
|
67
|
+
window: str = Field(..., alias="window")
|
68
|
+
|
69
|
+
|
70
|
+
class SLODocumentSLOV1(ConfiguredBaseModel):
|
71
|
+
name: str = Field(..., alias="name")
|
72
|
+
expr: str = Field(..., alias="expr")
|
73
|
+
sli_type: str = Field(..., alias="SLIType")
|
74
|
+
slo_parameters: SLODocumentSLOSLOParametersV1 = Field(..., alias="SLOParameters")
|
75
|
+
slo_target: float = Field(..., alias="SLOTarget")
|
76
|
+
slo_target_unit: str = Field(..., alias="SLOTargetUnit")
|
77
|
+
|
78
|
+
|
79
|
+
class SLODocument(ConfiguredBaseModel):
|
80
|
+
name: str = Field(..., alias="name")
|
81
|
+
namespaces: list[SLONamespacesV1] = Field(..., alias="namespaces")
|
82
|
+
slos: Optional[list[SLODocumentSLOV1]] = Field(..., alias="slos")
|
File without changes
|
@@ -0,0 +1,128 @@
|
|
1
|
+
"""
|
2
|
+
Generated by qenerate plugin=pydantic_v1. DO NOT MODIFY MANUALLY!
|
3
|
+
"""
|
4
|
+
from collections.abc import Callable # noqa: F401 # pylint: disable=W0611
|
5
|
+
from datetime import datetime # noqa: F401 # pylint: disable=W0611
|
6
|
+
from enum import Enum # noqa: F401 # pylint: disable=W0611
|
7
|
+
from typing import ( # noqa: F401 # pylint: disable=W0611
|
8
|
+
Any,
|
9
|
+
Optional,
|
10
|
+
Union,
|
11
|
+
)
|
12
|
+
|
13
|
+
from pydantic import ( # noqa: F401 # pylint: disable=W0611
|
14
|
+
BaseModel,
|
15
|
+
Extra,
|
16
|
+
Field,
|
17
|
+
Json,
|
18
|
+
)
|
19
|
+
|
20
|
+
from reconcile.gql_definitions.fragments.container_image_mirror import ContainerImageMirror
|
21
|
+
|
22
|
+
|
23
|
+
DEFINITION = """
|
24
|
+
fragment ContainerImageMirror on ContainerImageMirror_v1 {
|
25
|
+
url
|
26
|
+
pullCredentials {
|
27
|
+
...VaultSecret
|
28
|
+
}
|
29
|
+
tags
|
30
|
+
tagsExclude
|
31
|
+
}
|
32
|
+
|
33
|
+
fragment VaultSecret on VaultSecret_v1 {
|
34
|
+
path
|
35
|
+
field
|
36
|
+
version
|
37
|
+
format
|
38
|
+
}
|
39
|
+
|
40
|
+
query GcpDockerRepos {
|
41
|
+
apps: apps_v1 {
|
42
|
+
gcrRepos {
|
43
|
+
project {
|
44
|
+
name
|
45
|
+
}
|
46
|
+
items {
|
47
|
+
name
|
48
|
+
mirror {
|
49
|
+
...ContainerImageMirror
|
50
|
+
}
|
51
|
+
}
|
52
|
+
}
|
53
|
+
artifactRegistryMirrors {
|
54
|
+
project {
|
55
|
+
name
|
56
|
+
}
|
57
|
+
items {
|
58
|
+
imageURL
|
59
|
+
mirror {
|
60
|
+
...ContainerImageMirror
|
61
|
+
}
|
62
|
+
}
|
63
|
+
}
|
64
|
+
}
|
65
|
+
}
|
66
|
+
"""
|
67
|
+
|
68
|
+
|
69
|
+
class ConfiguredBaseModel(BaseModel):
|
70
|
+
class Config:
|
71
|
+
smart_union=True
|
72
|
+
extra=Extra.forbid
|
73
|
+
|
74
|
+
|
75
|
+
class GcpProjectV1(ConfiguredBaseModel):
|
76
|
+
name: str = Field(..., alias="name")
|
77
|
+
|
78
|
+
|
79
|
+
class AppGcrReposItemsV1(ConfiguredBaseModel):
|
80
|
+
name: str = Field(..., alias="name")
|
81
|
+
mirror: Optional[ContainerImageMirror] = Field(..., alias="mirror")
|
82
|
+
|
83
|
+
|
84
|
+
class AppGcrReposV1(ConfiguredBaseModel):
|
85
|
+
project: GcpProjectV1 = Field(..., alias="project")
|
86
|
+
items: list[AppGcrReposItemsV1] = Field(..., alias="items")
|
87
|
+
|
88
|
+
|
89
|
+
class AppArtifactRegistryMirrorsV1_GcpProjectV1(ConfiguredBaseModel):
|
90
|
+
name: str = Field(..., alias="name")
|
91
|
+
|
92
|
+
|
93
|
+
class AppArtifactRegistryMirrorsItemsV1(ConfiguredBaseModel):
|
94
|
+
image_url: str = Field(..., alias="imageURL")
|
95
|
+
mirror: ContainerImageMirror = Field(..., alias="mirror")
|
96
|
+
|
97
|
+
|
98
|
+
class AppArtifactRegistryMirrorsV1(ConfiguredBaseModel):
|
99
|
+
project: AppArtifactRegistryMirrorsV1_GcpProjectV1 = Field(..., alias="project")
|
100
|
+
items: list[AppArtifactRegistryMirrorsItemsV1] = Field(..., alias="items")
|
101
|
+
|
102
|
+
|
103
|
+
class AppV1(ConfiguredBaseModel):
|
104
|
+
gcr_repos: Optional[list[AppGcrReposV1]] = Field(..., alias="gcrRepos")
|
105
|
+
artifact_registry_mirrors: Optional[list[AppArtifactRegistryMirrorsV1]] = Field(..., alias="artifactRegistryMirrors")
|
106
|
+
|
107
|
+
|
108
|
+
class GcpDockerReposQueryData(ConfiguredBaseModel):
|
109
|
+
apps: Optional[list[AppV1]] = Field(..., alias="apps")
|
110
|
+
|
111
|
+
|
112
|
+
def query(query_func: Callable, **kwargs: Any) -> GcpDockerReposQueryData:
|
113
|
+
"""
|
114
|
+
This is a convenience function which queries and parses the data into
|
115
|
+
concrete types. It should be compatible with most GQL clients.
|
116
|
+
You do not have to use it to consume the generated data classes.
|
117
|
+
Alternatively, you can also mime and alternate the behavior
|
118
|
+
of this function in the caller.
|
119
|
+
|
120
|
+
Parameters:
|
121
|
+
query_func (Callable): Function which queries your GQL Server
|
122
|
+
kwargs: optional arguments that will be passed to the query function
|
123
|
+
|
124
|
+
Returns:
|
125
|
+
GcpDockerReposQueryData: queried data parsed into generated classes
|
126
|
+
"""
|
127
|
+
raw_data: dict[Any, Any] = query_func(DEFINITION, **kwargs)
|
128
|
+
return GcpDockerReposQueryData(**raw_data)
|