qontract-reconcile 0.10.1rc1020__py3-none-any.whl → 0.10.1rc1021__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,64 @@
1
+ Metadata-Version: 2.1
2
+ Name: qontract-reconcile
3
+ Version: 0.10.1rc1021
4
+ Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
5
+ Home-page: https://github.com/app-sre/qontract-reconcile
6
+ Author: Red Hat App-SRE Team
7
+ Author-email: sd-app-sre@redhat.com
8
+ License: Apache License 2.0
9
+ Classifier: Development Status :: 2 - Pre-Alpha
10
+ Classifier: Programming Language :: Python
11
+ Classifier: Programming Language :: Python :: 3
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Requires-Python: >=3.11
14
+ Requires-Dist: sretoolbox~=2.5
15
+ Requires-Dist: Click<9.0,>=7.0
16
+ Requires-Dist: gql==3.1.0
17
+ Requires-Dist: toml<0.11.0,>=0.10.0
18
+ Requires-Dist: jsonpath-rw<1.5.0,>=1.4.0
19
+ Requires-Dist: PyGithub<1.59,>=1.58
20
+ Requires-Dist: hvac<0.8.0,>=0.7.0
21
+ Requires-Dist: ldap3<2.10.0,>=2.9.1
22
+ Requires-Dist: anymarkup<0.9.0,>=0.7.0
23
+ Requires-Dist: python-gitlab~=4.6
24
+ Requires-Dist: semver~=3.0
25
+ Requires-Dist: boto3==1.34.94
26
+ Requires-Dist: botocore==1.34.94
27
+ Requires-Dist: urllib3~=2.2
28
+ Requires-Dist: slack-sdk<4.0,>=3.10
29
+ Requires-Dist: pypd<1.2.0,>=1.1.0
30
+ Requires-Dist: Jinja2<3.2.0,>=2.10.1
31
+ Requires-Dist: jira~=3.1
32
+ Requires-Dist: pyOpenSSL~=23.0
33
+ Requires-Dist: ruamel.yaml<0.18.0,>=0.17.22
34
+ Requires-Dist: terrascript==0.9.0
35
+ Requires-Dist: tabulate<0.9.0,>=0.8.6
36
+ Requires-Dist: UnleashClient~=5.11
37
+ Requires-Dist: prometheus-client~=0.8
38
+ Requires-Dist: sentry-sdk~=2.0
39
+ Requires-Dist: jenkins-job-builder~=4.3.0
40
+ Requires-Dist: parse==1.18.0
41
+ Requires-Dist: sendgrid<6.5.0,>=6.4.8
42
+ Requires-Dist: dnspython~=2.1
43
+ Requires-Dist: requests~=2.32
44
+ Requires-Dist: kubernetes~=24.0
45
+ Requires-Dist: websocket-client<0.55.0,>=0.35
46
+ Requires-Dist: sshtunnel>=0.4.0
47
+ Requires-Dist: croniter<1.1.0,>=1.0.15
48
+ Requires-Dist: pydantic~=1.10.6
49
+ Requires-Dist: MarkupSafe==2.1.1
50
+ Requires-Dist: filetype~=1.2.0
51
+ Requires-Dist: psycopg2~=2.9
52
+ Requires-Dist: packaging~=23.1
53
+ Requires-Dist: deepdiff==6.7.1
54
+ Requires-Dist: jsonpath-ng==1.5.3
55
+ Requires-Dist: networkx~=2.8
56
+ Requires-Dist: rich<14.0.0,>=13.3.0
57
+ Requires-Dist: dateparser~=1.1.7
58
+ Requires-Dist: pyjwt~=2.7
59
+ Requires-Dist: requests-oauthlib~=1.3
60
+ Requires-Dist: dt==1.1.61
61
+ Requires-Dist: jsonpatch~=1.33
62
+ Requires-Dist: jsonpointer~=2.4
63
+ Requires-Dist: yamllint==1.34.0
64
+
@@ -10,7 +10,7 @@ reconcile/aws_iam_password_reset.py,sha256=q96mwr2KeEQ5bpNniGlgIMZTxiuLSodcYfX-t
10
10
  reconcile/aws_support_cases_sos.py,sha256=Jk6_XjDeJSYxgRGqcEAOcynt9qJF2r5HPIPcSKmoBv8,2974
11
11
  reconcile/blackbox_exporter_endpoint_monitoring.py,sha256=O1wFp52EyF538c6txaWBs8eMtUIy19gyHZ6VzJ6QXS8,3512
12
12
  reconcile/checkpoint.py,sha256=_JhMxrye5BgkRMxWYuf7Upli6XayPINKSsuo3ynHTRc,5010
13
- reconcile/cli.py,sha256=lLVw-FxEUR8zU6UAKZzJk7XwRbjsXPaGUAQqXuBYSaU,105825
13
+ reconcile/cli.py,sha256=rfT0jrWjDjvbUE0JWVvSladV2vTKqBBdNrthlH2Ekyc,106979
14
14
  reconcile/closedbox_endpoint_monitoring_base.py,sha256=rLh16BOlBOxTmJ8Si3wWyyEpmMlhh4Znx1Gc36qsmOc,4865
15
15
  reconcile/cluster_deployment_mapper.py,sha256=5gumAaRCcFXsabUJ1dnuUy9WrP_FEEM5JnOnE8ch9sE,2326
16
16
  reconcile/dashdotdb_base.py,sha256=l34QDu1G96_Ctnh7ZXdxXgSeCE93GQMdLAkWxmN6vDA,4775
@@ -185,6 +185,10 @@ reconcile/dynatrace_token_provider/integration.py,sha256=QY-k5vsbBOm80yW_RB6G2JZ
185
185
  reconcile/dynatrace_token_provider/metrics.py,sha256=xiKkl8fTEBQaXJelGCPNTZhHAWdO1M3pCXNr_Tei63c,1285
186
186
  reconcile/dynatrace_token_provider/model.py,sha256=gkpqo5rRRueBXnIMjp4EEHqBUBuU65TRI8zpdb8GJ0A,241
187
187
  reconcile/dynatrace_token_provider/ocm.py,sha256=iHMsgbsLs-dlrB9UXmWNDF7E4UDe49JOsLa9rnowKfo,4282
188
+ reconcile/endpoints_discovery/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
189
+ reconcile/endpoints_discovery/integration.py,sha256=OFdSno8C5VVOoXALI5BFxHA3hfIhTZ55lJaMa0jLt0s,12210
190
+ reconcile/endpoints_discovery/merge_request.py,sha256=NktpwMUoQ9TvBzOeHSPC09OqxWmgZBZVy-Eqmsm_vrA,2909
191
+ reconcile/endpoints_discovery/merge_request_manager.py,sha256=oY3lovy5DcUgHNEDzWrbOPabzZtaGMns6YLl5ipZJ0c,6346
188
192
  reconcile/external_resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
189
193
  reconcile/external_resources/aws.py,sha256=JvjKaABy2Pg8u8Lq82Acv4zMvpE3_qGKes7OG-zlHOM,2956
190
194
  reconcile/external_resources/factories.py,sha256=DXgaLxoO87zZ76VOpRpu2GeYGhsbfOnOx5mrzgo4Gf4,4767
@@ -280,6 +284,8 @@ reconcile/gql_definitions/dashdotdb_slo/slo_documents_query.py,sha256=zUa-CmpOwi
280
284
  reconcile/gql_definitions/dynatrace_token_provider/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
281
285
  reconcile/gql_definitions/dynatrace_token_provider/dynatrace_bootstrap_tokens.py,sha256=5gTuAnR2rnx2k6Rn7FMEAzw6GCZ6F5HZbqkmJ9-3NI4,2244
282
286
  reconcile/gql_definitions/dynatrace_token_provider/token_specs.py,sha256=XGsMuB8gowRpqJjkD_KRomx-1OswzyWbF4qjVdhionk,2555
287
+ reconcile/gql_definitions/endpoints_discovery/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
288
+ reconcile/gql_definitions/endpoints_discovery/namespaces.py,sha256=FqJ0H7NdsIm5BgVnuJV9wLcj7i667VhCN559tWJ-WsA,3054
283
289
  reconcile/gql_definitions/external_resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
284
290
  reconcile/gql_definitions/external_resources/aws_accounts.py,sha256=XR69j9dpTQ0gv8y-AZN7AJ0dPvO-wbHscyCDgrax6Bk,2046
285
291
  reconcile/gql_definitions/external_resources/external_resources_modules.py,sha256=g2KB2wRnb8zF7xCmDJJFmiRdE4z4aYa9HtY3vCBVwMA,2441
@@ -859,8 +865,8 @@ tools/test/test_qontract_cli.py,sha256=_D61RFGAN5x44CY1tYbouhlGXXABwYfxKSWSQx3Jr
859
865
  tools/test/test_saas_promotion_state.py,sha256=dy4kkSSAQ7bC0Xp2CociETGN-2aABEfL6FU5D9Jl00Y,6056
860
866
  tools/test/test_sd_app_sre_alert_report.py,sha256=v363r9zM7__0kR5K6mvJoGFcM9BvE33fWAayrqkpojA,2116
861
867
  tools/test/test_sre_checkpoints.py,sha256=SKqPPTl9ua0RFdSSofnoQX-JZE6dFLO3LRhfQzqtfh8,2607
862
- qontract_reconcile-0.10.1rc1020.dist-info/METADATA,sha256=nlUFKwjsUyVZVDAbzvIF_4FhV-g1XlrJk91Z7lLRsjI,2263
863
- qontract_reconcile-0.10.1rc1020.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
864
- qontract_reconcile-0.10.1rc1020.dist-info/entry_points.txt,sha256=GKQqCl2j2X1BJQ69een6rHcR26PmnxnONLNOQB-nRjY,491
865
- qontract_reconcile-0.10.1rc1020.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
866
- qontract_reconcile-0.10.1rc1020.dist-info/RECORD,,
868
+ qontract_reconcile-0.10.1rc1021.dist-info/METADATA,sha256=Ob87vLcMc2mnkX4nPY_xWLWfzwuDB_Hgp2zMZVknZds,2213
869
+ qontract_reconcile-0.10.1rc1021.dist-info/WHEEL,sha256=eOLhNAGa2EW3wWl_TU484h7q1UNgy0JXjjoqKoxAAQc,92
870
+ qontract_reconcile-0.10.1rc1021.dist-info/entry_points.txt,sha256=GKQqCl2j2X1BJQ69een6rHcR26PmnxnONLNOQB-nRjY,491
871
+ qontract_reconcile-0.10.1rc1021.dist-info/top_level.txt,sha256=l5ISPoXzt0SdR4jVdkfa7RPSKNc8zAHYWAnR-Dw8Ey8,24
872
+ qontract_reconcile-0.10.1rc1021.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: bdist_wheel (0.44.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
reconcile/cli.py CHANGED
@@ -1802,6 +1802,49 @@ def openshift_prometheus_rules(
1802
1802
  )
1803
1803
 
1804
1804
 
1805
+ @integration.command(short_help="Discover routes and update endpoints")
1806
+ @threaded()
1807
+ @binary(["oc"])
1808
+ @binary_version("oc", ["version", "--client"], OC_VERSION_REGEX, OC_VERSIONS)
1809
+ @internal()
1810
+ @use_jump_host()
1811
+ @cluster_name
1812
+ @namespace_name
1813
+ @click.option(
1814
+ "--endpoint-tmpl-resource",
1815
+ help="Resource name of the endpoint template in the app-interface.",
1816
+ required=False,
1817
+ )
1818
+ @click.pass_context
1819
+ def endpoints_discovery(
1820
+ ctx,
1821
+ thread_pool_size,
1822
+ internal,
1823
+ use_jump_host,
1824
+ cluster_name,
1825
+ namespace_name,
1826
+ endpoint_tmpl_resource,
1827
+ ):
1828
+ from reconcile.endpoints_discovery.integration import (
1829
+ EndpointsDiscoveryIntegration,
1830
+ EndpointsDiscoveryIntegrationParams,
1831
+ )
1832
+
1833
+ params = EndpointsDiscoveryIntegrationParams(
1834
+ thread_pool_size=thread_pool_size,
1835
+ internal=internal,
1836
+ use_jump_host=use_jump_host,
1837
+ cluster_name=cluster_name,
1838
+ namespace_name=namespace_name,
1839
+ )
1840
+ if endpoint_tmpl_resource:
1841
+ params.endpoint_tmpl_resource = endpoint_tmpl_resource
1842
+ run_class_integration(
1843
+ integration=EndpointsDiscoveryIntegration(params),
1844
+ ctx=ctx.obj,
1845
+ )
1846
+
1847
+
1805
1848
  @integration.command(short_help="Configures the teams and members in Quay.")
1806
1849
  @click.pass_context
1807
1850
  def quay_membership(ctx):
File without changes
@@ -0,0 +1,336 @@
1
+ import logging
2
+ from collections.abc import Callable, Iterable
3
+ from typing import TypedDict
4
+
5
+ import jinja2
6
+ from pydantic import BaseModel
7
+
8
+ from reconcile.endpoints_discovery.merge_request import Renderer, create_parser
9
+ from reconcile.endpoints_discovery.merge_request_manager import (
10
+ App,
11
+ Endpoint,
12
+ EndpointsToAdd,
13
+ EndpointsToChange,
14
+ EndpointsToDelete,
15
+ MergeRequestManager,
16
+ )
17
+ from reconcile.gql_definitions.endpoints_discovery.namespaces import (
18
+ AppEndPointsV1,
19
+ NamespaceV1,
20
+ )
21
+ from reconcile.gql_definitions.endpoints_discovery.namespaces import (
22
+ query as namespaces_query,
23
+ )
24
+ from reconcile.typed_queries.app_interface_repo_url import get_app_interface_repo_url
25
+ from reconcile.typed_queries.github_orgs import get_github_orgs
26
+ from reconcile.typed_queries.gitlab_instances import get_gitlab_instances
27
+ from reconcile.utils import gql
28
+ from reconcile.utils.defer import defer
29
+ from reconcile.utils.differ import diff_any_iterables
30
+ from reconcile.utils.disabled_integrations import integration_is_enabled
31
+ from reconcile.utils.extended_early_exit import (
32
+ ExtendedEarlyExitRunnerResult,
33
+ extended_early_exit_run,
34
+ )
35
+ from reconcile.utils.oc_map import OCMap, init_oc_map_from_namespaces
36
+ from reconcile.utils.ruamel import create_ruamel_instance
37
+ from reconcile.utils.runtime.integration import (
38
+ PydanticRunParams,
39
+ QontractReconcileIntegration,
40
+ )
41
+ from reconcile.utils.semver_helper import make_semver
42
+ from reconcile.utils.unleash import get_feature_toggle_state
43
+ from reconcile.utils.vcs import VCS
44
+
45
+ QONTRACT_INTEGRATION = "endpoints-discovery"
46
+ QONTRACT_INTEGRATION_VERSION = make_semver(1, 0, 0)
47
+
48
+
49
+ class EndpointsDiscoveryIntegrationParams(PydanticRunParams):
50
+ thread_pool_size: int = 10
51
+ internal: bool | None = None
52
+ use_jump_host: bool = True
53
+ cluster_name: set[str] | None = None
54
+ namespace_name: str | None = None
55
+ endpoint_tmpl_resource: str = "/endpoints-discovery/endpoint-template.yml"
56
+ # extended early exit parameters
57
+ enable_extended_early_exit: bool = False
58
+ extended_early_exit_cache_ttl_seconds: int = 7200 # run every 2 hours
59
+ log_cached_log_output: bool = False
60
+
61
+
62
+ class Route(BaseModel):
63
+ name: str
64
+ host: str
65
+ tls: bool
66
+
67
+ @property
68
+ def url(self) -> str:
69
+ return f"{self.host}:{443 if self.tls else 80}"
70
+
71
+
72
+ def endpoint_prefix(namespace: NamespaceV1) -> str:
73
+ return f"{QONTRACT_INTEGRATION}-{namespace.cluster.name}-{namespace.name}"
74
+
75
+
76
+ def compile_endpoint_name(endpoint_prefix: str, route: Route) -> str:
77
+ return f"{endpoint_prefix}-{route.name}"
78
+
79
+
80
+ def render_template(template: str, endpoint_name: str, route: Route) -> dict:
81
+ yml = create_ruamel_instance()
82
+ return yml.load(
83
+ jinja2.Template(
84
+ template,
85
+ undefined=jinja2.StrictUndefined,
86
+ trim_blocks=True,
87
+ lstrip_blocks=True,
88
+ keep_trailing_newline=True,
89
+ ).render({"endpoint_name": endpoint_name, "route": route})
90
+ )
91
+
92
+
93
+ class RunnerParams(TypedDict):
94
+ oc_map: OCMap
95
+ merge_request_manager: MergeRequestManager
96
+ endpoint_template: str
97
+ namespaces: Iterable[NamespaceV1]
98
+
99
+
100
+ class EndpointsDiscoveryIntegration(
101
+ QontractReconcileIntegration[EndpointsDiscoveryIntegrationParams]
102
+ ):
103
+ """Discover routes from all OpenShift clusters and update endPoints in app-interface."""
104
+
105
+ @property
106
+ def name(self) -> str:
107
+ return QONTRACT_INTEGRATION
108
+
109
+ def get_desired_state_shard_config(self) -> None:
110
+ """Sharding (per cluster) is not supported for this integration.
111
+
112
+ An application can have endpoints in multiple clusters and this may cause merge conflicts."""
113
+ return None
114
+
115
+ def get_namespaces(
116
+ self,
117
+ query_func: Callable,
118
+ cluster_names: Iterable[str] | None = None,
119
+ namespace_name: str | None = None,
120
+ ) -> list[NamespaceV1]:
121
+ """Return namespaces to consider for the integration."""
122
+ return [
123
+ ns
124
+ for ns in namespaces_query(query_func).namespaces or []
125
+ if integration_is_enabled(self.name, ns.cluster)
126
+ and (not cluster_names or ns.cluster.name in cluster_names)
127
+ and (not namespace_name or ns.name == namespace_name)
128
+ and not ns.delete
129
+ ]
130
+
131
+ def get_routes(self, oc_map: OCMap, namespace: NamespaceV1) -> list[Route]:
132
+ """Return the routes for the given namespace."""
133
+ oc = oc_map.get_cluster(namespace.cluster.name)
134
+ if not oc.project_exists(namespace.name):
135
+ logging.info(
136
+ f"{namespace.cluster.name}/{namespace.name}: Namespace does not exist (yet). Skipping for now!"
137
+ )
138
+ return []
139
+
140
+ return [
141
+ Route(
142
+ name=item["metadata"]["name"],
143
+ host=item["spec"]["host"],
144
+ tls=bool(item["spec"].get("tls")),
145
+ )
146
+ for item in oc.get_items(kind="Route", namespace=namespace.name)
147
+ ]
148
+
149
+ def get_endpoint_changes(
150
+ self,
151
+ app: str,
152
+ endpoint_prefix: str,
153
+ endpoint_template: str,
154
+ endpoints: Iterable[AppEndPointsV1],
155
+ routes: Iterable[Route],
156
+ ) -> tuple[EndpointsToAdd, EndpointsToChange, EndpointsToDelete]:
157
+ """Get all new/changed/deleted endpoints for the given namespace."""
158
+ if not routes and not endpoints:
159
+ # nothing to do
160
+ return [], [], []
161
+
162
+ diff = diff_any_iterables(
163
+ # exclude manual endpoints
164
+ current=[
165
+ endpoint
166
+ for endpoint in endpoints
167
+ if endpoint.name.startswith(endpoint_prefix)
168
+ ]
169
+ or [],
170
+ desired=routes,
171
+ # names are unique, so we can use them as keys
172
+ current_key=lambda endpoint: endpoint.name,
173
+ desired_key=lambda route: compile_endpoint_name(endpoint_prefix, route),
174
+ # compare the endpoint and route by url.
175
+ # we can't use other endpoint attributes because we don't want to query them.
176
+ # there is a note about that behavior in the template.
177
+ equal=lambda endpoint, route: endpoint.url == route.url,
178
+ )
179
+
180
+ endpoints_to_add = []
181
+ endpoints_to_change = []
182
+ endpoints_to_delete = []
183
+
184
+ for add in diff.add.values():
185
+ logging.info(f"{app}: Adding endpoint for route {add.name}")
186
+ endpoints_to_add.append(
187
+ Endpoint(
188
+ name=compile_endpoint_name(endpoint_prefix, add),
189
+ data=render_template(
190
+ endpoint_template,
191
+ endpoint_name=compile_endpoint_name(endpoint_prefix, add),
192
+ route=add,
193
+ ),
194
+ )
195
+ )
196
+
197
+ for pair in diff.change.values():
198
+ logging.info(
199
+ f"{app}: Changing endpoint {pair.current.name} for route {pair.desired.name}"
200
+ )
201
+ endpoints_to_change.append(
202
+ Endpoint(
203
+ name=pair.current.name,
204
+ data=render_template(
205
+ endpoint_template,
206
+ endpoint_name=compile_endpoint_name(
207
+ endpoint_prefix, pair.desired
208
+ ),
209
+ route=pair.desired,
210
+ ),
211
+ )
212
+ )
213
+ for delete in diff.delete.values():
214
+ logging.info(f"{app}: Deleting endpoint for route {delete.name}")
215
+ endpoints_to_delete.append(Endpoint(name=delete.name))
216
+ return endpoints_to_add, endpoints_to_change, endpoints_to_delete
217
+
218
+ def get_apps(
219
+ self, oc_map: OCMap, endpoint_template: str, namespaces: Iterable[NamespaceV1]
220
+ ) -> list[App]:
221
+ """Compile a list of apps with their endpoints to add, change and delete."""
222
+ apps: dict[str, App] = {}
223
+ for namespace in namespaces:
224
+ routes = self.get_routes(oc_map, namespace)
225
+ endpoints_to_add, endpoints_to_change, endpoints_to_delete = (
226
+ self.get_endpoint_changes(
227
+ app=namespace.app.name,
228
+ endpoint_prefix=endpoint_prefix(namespace),
229
+ endpoint_template=endpoint_template,
230
+ endpoints=namespace.app.end_points or [],
231
+ routes=routes,
232
+ )
233
+ )
234
+ # update the app with the endpoints per namespace
235
+ app = apps.setdefault(
236
+ namespace.app.path,
237
+ App(name=namespace.app.name, path=namespace.app.path),
238
+ )
239
+ app.endpoints_to_add += endpoints_to_add
240
+ app.endpoints_to_change += endpoints_to_change
241
+ app.endpoints_to_delete += endpoints_to_delete
242
+
243
+ # return only apps endpoint changes
244
+ return [
245
+ app
246
+ for app in apps.values()
247
+ if app.endpoints_to_add
248
+ or app.endpoints_to_change
249
+ or app.endpoints_to_delete
250
+ ]
251
+
252
+ def runner(
253
+ self,
254
+ oc_map: OCMap,
255
+ merge_request_manager: MergeRequestManager,
256
+ endpoint_template: str,
257
+ namespaces: Iterable[NamespaceV1],
258
+ ) -> ExtendedEarlyExitRunnerResult:
259
+ """Reconcile the endpoints for all namespaces."""
260
+ apps = self.get_apps(oc_map, endpoint_template, namespaces)
261
+ merge_request_manager.create_merge_request(apps=apps)
262
+ return ExtendedEarlyExitRunnerResult(payload={}, applied_count=len(apps))
263
+
264
+ @defer
265
+ def run(self, dry_run: bool, defer: Callable | None = None) -> None:
266
+ """Run the integration."""
267
+ gql_api = gql.get_api()
268
+ namespaces = self.get_namespaces(
269
+ gql_api.query,
270
+ cluster_names=self.params.cluster_name,
271
+ namespace_name=self.params.namespace_name,
272
+ )
273
+ if not namespaces:
274
+ # nothing to do
275
+ return
276
+
277
+ oc_map = init_oc_map_from_namespaces(
278
+ namespaces=namespaces,
279
+ secret_reader=self.secret_reader,
280
+ integration=QONTRACT_INTEGRATION,
281
+ use_jump_host=self.params.use_jump_host,
282
+ thread_pool_size=self.params.thread_pool_size,
283
+ internal=self.params.internal,
284
+ init_projects=True,
285
+ )
286
+
287
+ if defer:
288
+ defer(oc_map.cleanup)
289
+
290
+ vcs = VCS(
291
+ secret_reader=self.secret_reader,
292
+ github_orgs=get_github_orgs(),
293
+ gitlab_instances=get_gitlab_instances(),
294
+ app_interface_repo_url=get_app_interface_repo_url(),
295
+ dry_run=dry_run,
296
+ allow_deleting_mrs=True,
297
+ allow_opening_mrs=True,
298
+ )
299
+ if defer:
300
+ defer(vcs.cleanup)
301
+ merge_request_manager = MergeRequestManager(
302
+ vcs=vcs,
303
+ renderer=Renderer(),
304
+ parser=create_parser(),
305
+ auto_merge_enabled=get_feature_toggle_state(
306
+ integration_name=f"{self.name}-allow-auto-merge-mrs", default=False
307
+ ),
308
+ )
309
+ endpoint_template = gql_api.get_resource(
310
+ path=self.params.endpoint_tmpl_resource
311
+ )["content"]
312
+
313
+ runner_params: RunnerParams = {
314
+ "oc_map": oc_map,
315
+ "merge_request_manager": merge_request_manager,
316
+ "endpoint_template": endpoint_template,
317
+ "namespaces": namespaces,
318
+ }
319
+
320
+ if self.params.enable_extended_early_exit and get_feature_toggle_state(
321
+ f"{QONTRACT_INTEGRATION}-extended-early-exit", default=True
322
+ ):
323
+ extended_early_exit_run(
324
+ integration=QONTRACT_INTEGRATION,
325
+ integration_version=QONTRACT_INTEGRATION_VERSION,
326
+ dry_run=dry_run,
327
+ cache_source=self.get_early_exit_desired_state(),
328
+ shard="",
329
+ ttl_seconds=self.params.extended_early_exit_cache_ttl_seconds,
330
+ logger=logging.getLogger(),
331
+ runner=self.runner,
332
+ runner_params=runner_params,
333
+ log_cached_log_output=self.params.log_cached_log_output,
334
+ )
335
+ else:
336
+ self.runner(**runner_params)
@@ -0,0 +1,92 @@
1
+ import re
2
+ import string
3
+
4
+ from pydantic import BaseModel
5
+ from ruamel.yaml.compat import StringIO
6
+
7
+ from reconcile.utils.merge_request_manager.parser import Parser
8
+ from reconcile.utils.ruamel import create_ruamel_instance
9
+
10
+ VERSION = "1.0.0"
11
+ INTEGRATION = "endpoints-discovery"
12
+ LABEL = "ENDPOINTS-DISCOVERY"
13
+
14
+ INTEGRATION_REF = "integration"
15
+ VERSION_REF = "endpoints-discover-version"
16
+ HASH_REF = "hash"
17
+ COMPILED_REGEXES = {
18
+ i: re.compile(rf".*{i}: (.*)$", re.MULTILINE)
19
+ for i in [INTEGRATION_REF, VERSION_REF, HASH_REF]
20
+ }
21
+
22
+ PROMOTION_DATA_SEPARATOR = "**DO NOT MANUALLY CHANGE ANYTHING BELOW THIS LINE**"
23
+ DESC = string.Template(
24
+ f"""
25
+ This MR is triggered by app-interface's [endpoints-discovery](https://github.com/app-sre/qontract-reconcile/tree/master/reconcile/endpoints_discovery).
26
+
27
+ Please **do not remove the `{LABEL}` label** from this MR!
28
+
29
+ Parts of this description are used to manage the MR.
30
+
31
+ {PROMOTION_DATA_SEPARATOR}
32
+
33
+ * {INTEGRATION_REF}: {INTEGRATION}
34
+ * {VERSION_REF}: {VERSION}
35
+ * {HASH_REF}: $hash
36
+ """
37
+ )
38
+
39
+
40
+ class EPDInfo(BaseModel):
41
+ integration: str = INTEGRATION
42
+ hash: str
43
+
44
+
45
+ def create_parser() -> Parser:
46
+ return Parser[EPDInfo](
47
+ klass=EPDInfo,
48
+ compiled_regexes=COMPILED_REGEXES,
49
+ version_ref=VERSION_REF,
50
+ expected_version=VERSION,
51
+ data_separator=PROMOTION_DATA_SEPARATOR,
52
+ )
53
+
54
+
55
+ class Renderer:
56
+ """
57
+ This class is only concerned with rendering text for MRs.
58
+ Most logic evolves around ruamel yaml modification.
59
+ This class makes testing for MergeRequestManager easier.
60
+
61
+ Note, that this class is very susceptible to schema changes
62
+ as it mainly works on raw dicts.
63
+ """
64
+
65
+ def render_merge_request_content(
66
+ self,
67
+ current_content: str,
68
+ endpoints_to_add: list[dict],
69
+ endpoints_to_change: dict[str, dict],
70
+ endpoints_to_delete: list[str],
71
+ ) -> str:
72
+ """Update the app-interface app file for a merge request."""
73
+ yml = create_ruamel_instance(explicit_start=True)
74
+ content = yml.load(current_content)
75
+ app_endpoints = content.setdefault("endPoints", [])
76
+ for i, app_endpoint in enumerate(app_endpoints):
77
+ if app_endpoint["name"] in endpoints_to_delete:
78
+ app_endpoints.remove(app_endpoint)
79
+ if app_endpoint["name"] in endpoints_to_change:
80
+ app_endpoints[i] = endpoints_to_change[app_endpoint["name"]]
81
+ app_endpoints.extend(endpoints_to_add)
82
+ with StringIO() as stream:
83
+ yml.dump(content, stream)
84
+ return stream.getvalue()
85
+
86
+ def render_description(self, hash: str) -> str:
87
+ """Render the description for a merge request."""
88
+ return DESC.safe_substitute(EPDInfo(hash=hash).dict())
89
+
90
+ def render_title(self) -> str:
91
+ """Render the title for a merge request."""
92
+ return f"[auto] {INTEGRATION}: update application endpoints"
@@ -0,0 +1,175 @@
1
+ import hashlib
2
+ import json
3
+ import logging
4
+ from collections.abc import Sequence
5
+ from typing import Any, TypeAlias
6
+
7
+ from gitlab.exceptions import GitlabGetError
8
+ from pydantic import BaseModel
9
+
10
+ from reconcile.endpoints_discovery.merge_request import (
11
+ INTEGRATION,
12
+ INTEGRATION_REF,
13
+ LABEL,
14
+ EPDInfo,
15
+ Parser,
16
+ Renderer,
17
+ )
18
+ from reconcile.utils.gitlab_api import GitLabApi
19
+ from reconcile.utils.merge_request_manager.merge_request_manager import (
20
+ MergeRequestManagerBase,
21
+ )
22
+ from reconcile.utils.mr.base import MergeRequestBase
23
+ from reconcile.utils.mr.labels import AUTO_MERGE
24
+ from reconcile.utils.vcs import VCS
25
+
26
+
27
+ class EndpointsDiscoveryMR(MergeRequestBase):
28
+ name = "endpoints-discovery"
29
+
30
+ def __init__(self, title: str, description: str, labels: list[str]):
31
+ super().__init__()
32
+ self._title = title
33
+ self._description = description
34
+ self.labels = labels
35
+ self._commits: list[tuple[str, str, str]] = []
36
+
37
+ @property
38
+ def title(self) -> str:
39
+ return self._title
40
+
41
+ @property
42
+ def description(self) -> str:
43
+ return self._description
44
+
45
+ def add_commit(self, path: str, content: str, msg: str) -> None:
46
+ self._commits.append((path, content, msg))
47
+
48
+ def process(self, gitlab_cli: GitLabApi) -> None:
49
+ for path, content, msg in self._commits:
50
+ gitlab_cli.update_file(
51
+ branch_name=self.branch,
52
+ file_path=path,
53
+ commit_message=msg,
54
+ content=content,
55
+ )
56
+
57
+
58
+ class Endpoint(BaseModel):
59
+ # the current endpoint name to change or delete. It doesn't matter for new endpoints
60
+ name: str
61
+ # the endpoint data will be generated and rendered from the endpoint template resource
62
+ # see EndpointsDiscoveryIntegrationParams.endpoint_tmpl_resource
63
+ data: dict[str, Any] = {}
64
+
65
+ @property
66
+ def hash(self) -> str:
67
+ return hashlib.sha256(
68
+ json.dumps(self.dict(), sort_keys=True).encode()
69
+ ).hexdigest()
70
+
71
+
72
+ EndpointsToAdd: TypeAlias = list[Endpoint]
73
+ EndpointsToChange: TypeAlias = list[Endpoint]
74
+ EndpointsToDelete: TypeAlias = list[Endpoint]
75
+
76
+
77
+ class App(BaseModel):
78
+ name: str
79
+ path: str
80
+ endpoints_to_add: EndpointsToAdd = EndpointsToAdd()
81
+ endpoints_to_change: EndpointsToChange = EndpointsToChange()
82
+ endpoints_to_delete: EndpointsToDelete = EndpointsToDelete()
83
+
84
+ @property
85
+ def hash(self) -> str:
86
+ return hashlib.sha256(
87
+ f"""
88
+ {self.path}
89
+ {[i.hash for i in sorted(self.endpoints_to_add, key=lambda i: i.name)]}
90
+ {[i.hash for i in sorted(self.endpoints_to_change, key=lambda i: i.name)]}
91
+ {[i.hash for i in sorted(self.endpoints_to_delete, key=lambda i: i.name)]}
92
+ """.encode()
93
+ ).hexdigest()
94
+
95
+
96
+ def hash_apps(apps: Sequence[App]) -> str:
97
+ return hashlib.sha256(
98
+ ",".join(app.hash for app in sorted(apps, key=lambda i: i.name)).encode()
99
+ ).hexdigest()
100
+
101
+
102
+ class MergeRequestManager(MergeRequestManagerBase[EPDInfo]):
103
+ """
104
+ Manager for AVS merge requests. This class
105
+ is responsible for housekeeping (closing old/bad MRs) and
106
+ opening new MRs for external resources that have new versions.
107
+
108
+ For each external resource, there are exist just one MR to update
109
+ the version number in the App-Interface. Old or obsolete MRs are
110
+ closed automatically.
111
+ """
112
+
113
+ def __init__(
114
+ self, vcs: VCS, renderer: Renderer, parser: Parser, auto_merge_enabled: bool
115
+ ):
116
+ super().__init__(vcs, parser, LABEL)
117
+ self._renderer = renderer
118
+ self._auto_merge_enabled = auto_merge_enabled
119
+
120
+ def create_merge_request(self, apps: Sequence[App]) -> None:
121
+ """Open new MR (if not already present) for apps and close any outdated before."""
122
+ if not self._housekeeping_ran:
123
+ self.housekeeping()
124
+
125
+ apps_hash = hash_apps(apps)
126
+ # we support only one MR at a time for all apps
127
+ if mr := self._merge_request_already_exists({INTEGRATION_REF: INTEGRATION}):
128
+ if mr.mr_info.hash == apps_hash:
129
+ logging.info(
130
+ f"Found an open MR for {INTEGRATION} and it's up-to-date - doing nothing."
131
+ )
132
+ return None
133
+ logging.info(f"Found an outdated MR for {INTEGRATION} - closing it.")
134
+ self._vcs.close_app_interface_mr(
135
+ mr.raw, "Closing this MR because it's outdated."
136
+ )
137
+ # don't open a new MR right now, because the deletion of the old MRs could be
138
+ # disabled. In this case, we would end up with multiple open MRs for the
139
+ # same external resource.
140
+ return None
141
+
142
+ endpoints_discovery_mr = EndpointsDiscoveryMR(
143
+ title=self._renderer.render_title(),
144
+ description=self._renderer.render_description(hash=apps_hash),
145
+ labels=[LABEL] + ([AUTO_MERGE] if self._auto_merge_enabled else []),
146
+ )
147
+ for app in apps:
148
+ try:
149
+ content = self._vcs.get_file_content_from_app_interface_master(
150
+ file_path=app.path
151
+ )
152
+ except GitlabGetError as e:
153
+ if e.response_code == 404:
154
+ logging.error(
155
+ "The file %s does not exist anylonger. Most likely qontract-server data not in synch. This should resolve soon on its own.",
156
+ app.path,
157
+ )
158
+ return None
159
+ raise e
160
+ content = self._renderer.render_merge_request_content(
161
+ current_content=content,
162
+ endpoints_to_add=[item.data for item in app.endpoints_to_add],
163
+ endpoints_to_change={
164
+ item.name: item.data for item in app.endpoints_to_change
165
+ },
166
+ endpoints_to_delete=[item.name for item in app.endpoints_to_delete],
167
+ )
168
+ endpoints_discovery_mr.add_commit(
169
+ path=f"data/{app.path.lstrip('/')}",
170
+ content=content,
171
+ msg=f"endpoints-discovery: update application endpoints for {app.name}",
172
+ )
173
+
174
+ logging.info("Open MR for %d app(s)", len(apps))
175
+ self._vcs.open_app_interface_merge_request(mr=endpoints_discovery_mr)
@@ -0,0 +1,127 @@
1
+ """
2
+ Generated by qenerate plugin=pydantic_v1. DO NOT MODIFY MANUALLY!
3
+ """
4
+ from collections.abc import Callable # noqa: F401 # pylint: disable=W0611
5
+ from datetime import datetime # noqa: F401 # pylint: disable=W0611
6
+ from enum import Enum # noqa: F401 # pylint: disable=W0611
7
+ from typing import ( # noqa: F401 # pylint: disable=W0611
8
+ Any,
9
+ Optional,
10
+ Union,
11
+ )
12
+
13
+ from pydantic import ( # noqa: F401 # pylint: disable=W0611
14
+ BaseModel,
15
+ Extra,
16
+ Field,
17
+ Json,
18
+ )
19
+
20
+ from reconcile.gql_definitions.fragments.oc_connection_cluster import OcConnectionCluster
21
+
22
+
23
+ DEFINITION = """
24
+ fragment CommonJumphostFields on ClusterJumpHost_v1 {
25
+ hostname
26
+ knownHosts
27
+ user
28
+ port
29
+ remotePort
30
+ identity {
31
+ ... VaultSecret
32
+ }
33
+ }
34
+
35
+ fragment OcConnectionCluster on Cluster_v1 {
36
+ name
37
+ serverUrl
38
+ internal
39
+ insecureSkipTLSVerify
40
+ jumpHost {
41
+ ...CommonJumphostFields
42
+ }
43
+ automationToken {
44
+ ...VaultSecret
45
+ }
46
+ clusterAdminAutomationToken {
47
+ ...VaultSecret
48
+ }
49
+ disable {
50
+ integrations
51
+ }
52
+ }
53
+
54
+ fragment VaultSecret on VaultSecret_v1 {
55
+ path
56
+ field
57
+ version
58
+ format
59
+ }
60
+
61
+ query EndPointsDiscoveryNamespaces {
62
+ namespaces: namespaces_v1 {
63
+ name
64
+ delete
65
+ clusterAdmin
66
+ cluster {
67
+ ... OcConnectionCluster
68
+ }
69
+ app {
70
+ path
71
+ name
72
+ endPoints {
73
+ name
74
+ url
75
+ }
76
+ }
77
+ }
78
+ }
79
+ """
80
+
81
+
82
+ class ConfiguredBaseModel(BaseModel):
83
+ class Config:
84
+ smart_union=True
85
+ extra=Extra.forbid
86
+
87
+
88
+ class AppEndPointsV1(ConfiguredBaseModel):
89
+ name: str = Field(..., alias="name")
90
+ url: str = Field(..., alias="url")
91
+
92
+
93
+ class AppV1(ConfiguredBaseModel):
94
+ path: str = Field(..., alias="path")
95
+ name: str = Field(..., alias="name")
96
+ end_points: Optional[list[AppEndPointsV1]] = Field(..., alias="endPoints")
97
+
98
+
99
+ class NamespaceV1(ConfiguredBaseModel):
100
+ name: str = Field(..., alias="name")
101
+ delete: Optional[bool] = Field(..., alias="delete")
102
+ cluster_admin: Optional[bool] = Field(..., alias="clusterAdmin")
103
+ cluster: OcConnectionCluster = Field(..., alias="cluster")
104
+ app: AppV1 = Field(..., alias="app")
105
+
106
+
107
+ class EndPointsDiscoveryNamespacesQueryData(ConfiguredBaseModel):
108
+ namespaces: Optional[list[NamespaceV1]] = Field(..., alias="namespaces")
109
+
110
+
111
+ def query(query_func: Callable, **kwargs: Any) -> EndPointsDiscoveryNamespacesQueryData:
112
+ """
113
+ This is a convenience function which queries and parses the data into
114
+ concrete types. It should be compatible with most GQL clients.
115
+ You do not have to use it to consume the generated data classes.
116
+ Alternatively, you can also mime and alternate the behavior
117
+ of this function in the caller.
118
+
119
+ Parameters:
120
+ query_func (Callable): Function which queries your GQL Server
121
+ kwargs: optional arguments that will be passed to the query function
122
+
123
+ Returns:
124
+ EndPointsDiscoveryNamespacesQueryData: queried data parsed into generated classes
125
+ """
126
+ raw_data: dict[Any, Any] = query_func(DEFINITION, **kwargs)
127
+ return EndPointsDiscoveryNamespacesQueryData(**raw_data)
@@ -1,64 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: qontract-reconcile
3
- Version: 0.10.1rc1020
4
- Summary: Collection of tools to reconcile services with their desired state as defined in the app-interface DB.
5
- Home-page: https://github.com/app-sre/qontract-reconcile
6
- Author: Red Hat App-SRE Team
7
- Author-email: sd-app-sre@redhat.com
8
- License: Apache License 2.0
9
- Classifier: Development Status :: 2 - Pre-Alpha
10
- Classifier: Programming Language :: Python
11
- Classifier: Programming Language :: Python :: 3
12
- Classifier: Programming Language :: Python :: 3.11
13
- Requires-Python: >=3.11
14
- Requires-Dist: sretoolbox ~=2.5
15
- Requires-Dist: Click <9.0,>=7.0
16
- Requires-Dist: gql ==3.1.0
17
- Requires-Dist: toml <0.11.0,>=0.10.0
18
- Requires-Dist: jsonpath-rw <1.5.0,>=1.4.0
19
- Requires-Dist: PyGithub <1.59,>=1.58
20
- Requires-Dist: hvac <0.8.0,>=0.7.0
21
- Requires-Dist: ldap3 <2.10.0,>=2.9.1
22
- Requires-Dist: anymarkup <0.9.0,>=0.7.0
23
- Requires-Dist: python-gitlab ~=4.6
24
- Requires-Dist: semver ~=3.0
25
- Requires-Dist: boto3 ==1.34.94
26
- Requires-Dist: botocore ==1.34.94
27
- Requires-Dist: urllib3 ~=2.2
28
- Requires-Dist: slack-sdk <4.0,>=3.10
29
- Requires-Dist: pypd <1.2.0,>=1.1.0
30
- Requires-Dist: Jinja2 <3.2.0,>=2.10.1
31
- Requires-Dist: jira ~=3.1
32
- Requires-Dist: pyOpenSSL ~=23.0
33
- Requires-Dist: ruamel.yaml <0.18.0,>=0.17.22
34
- Requires-Dist: terrascript ==0.9.0
35
- Requires-Dist: tabulate <0.9.0,>=0.8.6
36
- Requires-Dist: UnleashClient ~=5.11
37
- Requires-Dist: prometheus-client ~=0.8
38
- Requires-Dist: sentry-sdk ~=2.0
39
- Requires-Dist: jenkins-job-builder ~=4.3.0
40
- Requires-Dist: parse ==1.18.0
41
- Requires-Dist: sendgrid <6.5.0,>=6.4.8
42
- Requires-Dist: dnspython ~=2.1
43
- Requires-Dist: requests ~=2.32
44
- Requires-Dist: kubernetes ~=24.0
45
- Requires-Dist: websocket-client <0.55.0,>=0.35
46
- Requires-Dist: sshtunnel >=0.4.0
47
- Requires-Dist: croniter <1.1.0,>=1.0.15
48
- Requires-Dist: pydantic ~=1.10.6
49
- Requires-Dist: MarkupSafe ==2.1.1
50
- Requires-Dist: filetype ~=1.2.0
51
- Requires-Dist: psycopg2 ~=2.9
52
- Requires-Dist: packaging ~=23.1
53
- Requires-Dist: deepdiff ==6.7.1
54
- Requires-Dist: jsonpath-ng ==1.5.3
55
- Requires-Dist: networkx ~=2.8
56
- Requires-Dist: rich <14.0.0,>=13.3.0
57
- Requires-Dist: dateparser ~=1.1.7
58
- Requires-Dist: pyjwt ~=2.7
59
- Requires-Dist: requests-oauthlib ~=1.3
60
- Requires-Dist: dt ==1.1.61
61
- Requires-Dist: jsonpatch ~=1.33
62
- Requires-Dist: jsonpointer ~=2.4
63
- Requires-Dist: yamllint ==1.34.0
64
-