qontract-reconcile 0.10.2.dev216__py3-none-any.whl → 0.10.2.dev217__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -243,7 +243,7 @@ def close_item(
243
243
  enable_closing: bool,
244
244
  item_type: str,
245
245
  item: ProjectIssue | ProjectMergeRequest,
246
- ):
246
+ ) -> None:
247
247
  if enable_closing:
248
248
  logging.info([
249
249
  "close_item",
@@ -263,32 +263,31 @@ def close_item(
263
263
 
264
264
 
265
265
  def handle_stale_items(
266
- dry_run,
267
- gl,
268
- days_interval,
269
- enable_closing,
270
- items,
271
- item_type,
272
- ):
266
+ dry_run: bool,
267
+ gl: GitLabApi,
268
+ days_interval: int,
269
+ enable_closing: bool,
270
+ items: Iterable[ProjectIssue | ProjectMergeRequest],
271
+ item_type: str,
272
+ ) -> None:
273
273
  LABEL = "stale"
274
274
 
275
275
  now = datetime.utcnow()
276
276
  for item in items:
277
- item_iid = item.attributes.get("iid")
278
277
  if AUTO_MERGE in item.labels:
279
278
  if item.merge_status == MRStatus.UNCHECKED:
280
279
  # this call triggers a status recheck
281
- item = gl.get_merge_request(item_iid)
280
+ item = gl.get_merge_request(item.iid)
282
281
  if item.merge_status == MRStatus.CANNOT_BE_MERGED:
283
282
  close_item(dry_run, gl, enable_closing, item_type, item)
284
- update_date = datetime.strptime(item.attributes.get("updated_at"), DATE_FORMAT)
283
+ update_date = datetime.strptime(item.updated_at, DATE_FORMAT)
285
284
 
286
285
  # if item is over days_interval
287
286
  current_interval = now.date() - update_date.date()
288
287
  if current_interval > timedelta(days=days_interval):
289
288
  # if item does not have 'stale' label - add it
290
289
  if LABEL not in item.labels:
291
- logging.info(["add_label", gl.project.name, item_type, item_iid, LABEL])
290
+ logging.info(["add_label", gl.project.name, item_type, item.iid, LABEL])
292
291
  if not dry_run:
293
292
  gl.add_label_with_note(item, LABEL)
294
293
  # if item has 'stale' label - close it
@@ -310,8 +309,7 @@ def handle_stale_items(
310
309
  continue
311
310
 
312
311
  cancel_notes_dates = [
313
- datetime.strptime(note.attributes.get("updated_at"), DATE_FORMAT)
314
- for note in cancel_notes
312
+ datetime.strptime(item.updated_at, DATE_FORMAT) for note in cancel_notes
315
313
  ]
316
314
  latest_cancel_note_date = max(d for d in cancel_notes_dates)
317
315
  # if the latest cancel note is under
@@ -322,20 +320,20 @@ def handle_stale_items(
322
320
  "remove_label",
323
321
  gl.project.name,
324
322
  item_type,
325
- item_iid,
323
+ item.iid,
326
324
  LABEL,
327
325
  ])
328
326
  if not dry_run:
329
327
  gl.remove_label(item, LABEL)
330
328
 
331
329
 
332
- def is_good_to_merge(labels):
330
+ def is_good_to_merge(labels: Iterable[str]) -> bool:
333
331
  return any(m in MERGE_LABELS_PRIORITY for m in labels) and not any(
334
332
  b in HOLD_LABELS for b in labels
335
333
  )
336
334
 
337
335
 
338
- def is_rebased(mr, gl: GitLabApi) -> bool:
336
+ def is_rebased(mr: ProjectMergeRequest, gl: GitLabApi) -> bool:
339
337
  target_branch = mr.target_branch
340
338
  head = cast(
341
339
  list[ProjectCommit],
@@ -471,14 +469,14 @@ def preprocess_merge_requests(
471
469
 
472
470
 
473
471
  def rebase_merge_requests(
474
- dry_run,
475
- gl,
476
- rebase_limit,
477
- pipeline_timeout=None,
478
- wait_for_pipeline=False,
479
- users_allowed_to_label=None,
480
- state=None,
481
- ):
472
+ dry_run: bool,
473
+ gl: GitLabApi,
474
+ rebase_limit: int,
475
+ state: State,
476
+ pipeline_timeout: int | None = None,
477
+ wait_for_pipeline: bool = False,
478
+ users_allowed_to_label: Iterable[str] | None = None,
479
+ ) -> None:
482
480
  rebases = 0
483
481
  merge_requests = [
484
482
  item["mr"]
@@ -542,20 +540,20 @@ def rebase_merge_requests(
542
540
 
543
541
  @retry(max_attempts=10, hook=_log_exception)
544
542
  def merge_merge_requests(
545
- dry_run,
543
+ dry_run: bool,
546
544
  gl: GitLabApi,
547
545
  project_merge_requests: list[ProjectMergeRequest],
548
546
  reload_toggle: ReloadToggle,
549
- merge_limit,
550
- rebase,
547
+ merge_limit: int,
548
+ rebase: bool,
551
549
  app_sre_usernames: Set[str],
552
- pipeline_timeout=None,
553
- insist=False,
554
- wait_for_pipeline=False,
555
- users_allowed_to_label=None,
556
- must_pass=None,
557
- state=None,
558
- ):
550
+ state: State,
551
+ pipeline_timeout: int | None = None,
552
+ insist: bool = False,
553
+ wait_for_pipeline: bool = False,
554
+ users_allowed_to_label: Iterable[str] | None = None,
555
+ must_pass: Iterable[str] | None = None,
556
+ ) -> None:
559
557
  merges = 0
560
558
  if reload_toggle.reload:
561
559
  project_merge_requests = gl.get_merge_requests(state=MRState.OPENED)
@@ -654,7 +652,7 @@ def publish_access_token_expiration_metrics(gl: GitLabApi) -> None:
654
652
  gitlab_token_expiration.remove(pat.name)
655
653
 
656
654
 
657
- def run(dry_run, wait_for_pipeline):
655
+ def run(dry_run: bool, wait_for_pipeline: bool) -> None:
658
656
  default_days_interval = 15
659
657
  default_limit = 8
660
658
  default_enable_closing = False
@@ -711,45 +709,46 @@ def run(dry_run, wait_for_pipeline):
711
709
  must_pass = hk.get("must_pass")
712
710
  try:
713
711
  merge_merge_requests(
714
- dry_run,
715
- gl,
716
- project_merge_requests,
717
- reload_toggle,
718
- limit,
719
- rebase,
720
- app_sre_usernames,
721
- pipeline_timeout,
712
+ dry_run=dry_run,
713
+ gl=gl,
714
+ project_merge_requests=project_merge_requests,
715
+ reload_toggle=reload_toggle,
716
+ merge_limit=limit,
717
+ rebase=rebase,
718
+ app_sre_usernames=app_sre_usernames,
719
+ state=state,
720
+ pipeline_timeout=pipeline_timeout,
722
721
  insist=True,
723
722
  wait_for_pipeline=wait_for_pipeline,
724
723
  users_allowed_to_label=users_allowed_to_label,
725
724
  must_pass=must_pass,
726
- state=state,
727
725
  )
728
726
  except Exception:
729
727
  logging.error(
730
728
  "All retries failed, trying to rerun merge_merge_requests() again."
731
729
  )
732
730
  merge_merge_requests(
733
- dry_run,
734
- gl,
735
- project_merge_requests,
736
- reload_toggle,
737
- limit,
738
- rebase,
739
- app_sre_usernames,
740
- pipeline_timeout,
731
+ dry_run=dry_run,
732
+ gl=gl,
733
+ project_merge_requests=project_merge_requests,
734
+ reload_toggle=reload_toggle,
735
+ merge_limit=limit,
736
+ rebase=rebase,
737
+ app_sre_usernames=app_sre_usernames,
738
+ state=state,
739
+ pipeline_timeout=pipeline_timeout,
740
+ insist=False,
741
741
  wait_for_pipeline=wait_for_pipeline,
742
742
  users_allowed_to_label=users_allowed_to_label,
743
743
  must_pass=must_pass,
744
- state=state,
745
744
  )
746
745
  if rebase:
747
746
  rebase_merge_requests(
748
- dry_run,
749
- gl,
750
- limit,
747
+ dry_run=dry_run,
748
+ gl=gl,
749
+ rebase_limit=limit,
750
+ state=state,
751
751
  pipeline_timeout=pipeline_timeout,
752
752
  wait_for_pipeline=wait_for_pipeline,
753
753
  users_allowed_to_label=users_allowed_to_label,
754
- state=state,
755
754
  )
@@ -1,10 +1,12 @@
1
1
  import itertools
2
2
  import logging
3
+ from collections.abc import Callable, Iterable
3
4
  from dataclasses import dataclass
4
5
  from typing import Any, cast
5
6
 
6
7
  from gitlab.exceptions import GitlabGetError
7
8
  from gitlab.v4.objects import (
9
+ GroupMember,
8
10
  Project,
9
11
  SharedProject,
10
12
  )
@@ -159,7 +161,9 @@ class GroupPermissionHandler:
159
161
  raise ExceptionGroup("Reconcile errors occurred", errors)
160
162
 
161
163
 
162
- def get_members_to_add(repo, gl, app_sre):
164
+ def get_members_to_add(
165
+ repo: str, gl: GitLabApi, app_sre: list[GroupMember]
166
+ ) -> list[dict[str, Any]]:
163
167
  maintainers = get_all_app_sre_maintainers(repo, gl, app_sre)
164
168
  if maintainers is None:
165
169
  return []
@@ -172,7 +176,9 @@ def get_members_to_add(repo, gl, app_sre):
172
176
  return members_to_add
173
177
 
174
178
 
175
- def get_all_app_sre_maintainers(repo, gl, app_sre):
179
+ def get_all_app_sre_maintainers(
180
+ repo: str, gl: GitLabApi, app_sre: list[GroupMember]
181
+ ) -> list[str]:
176
182
  app_sre_user_ids = [user.id for user in app_sre]
177
183
  chunks = batches.batched(app_sre_user_ids, PAGE_SIZE)
178
184
  app_sre_maintainers = (
@@ -182,12 +188,14 @@ def get_all_app_sre_maintainers(repo, gl, app_sre):
182
188
  return list(itertools.chain.from_iterable(app_sre_maintainers))
183
189
 
184
190
 
185
- def create_user_ids_query(ids):
191
+ def create_user_ids_query(ids: Iterable[str]) -> dict[str, str]:
186
192
  return {"user_ids": ",".join(str(id) for id in ids)}
187
193
 
188
194
 
189
195
  @defer
190
- def run(dry_run, thread_pool_size=10, defer=None):
196
+ def run(
197
+ dry_run: bool, thread_pool_size: int = 10, defer: Callable | None = None
198
+ ) -> None:
191
199
  instance = queries.get_gitlab_instance()
192
200
  settings = queries.get_app_interface_settings()
193
201
  gl = GitLabApi(instance, settings=settings)
@@ -221,7 +229,7 @@ def share_project_with_group_members(
221
229
  gl.add_project_member(m["repo"], m["user"])
222
230
 
223
231
 
224
- def early_exit_desired_state(*args, **kwargs) -> dict[str, Any]:
232
+ def early_exit_desired_state(*args: Any, **kwargs: Any) -> dict[str, Any]:
225
233
  instance = queries.get_gitlab_instance()
226
234
  return {
227
235
  "instance": instance,
@@ -1,5 +1,6 @@
1
1
  import logging
2
2
  import sys
3
+ from collections.abc import Callable
3
4
  from typing import Any
4
5
 
5
6
  from reconcile import queries
@@ -10,14 +11,15 @@ QONTRACT_INTEGRATION = "gitlab-projects"
10
11
 
11
12
 
12
13
  @defer
13
- def run(dry_run, defer=None):
14
+ def run(dry_run: bool, defer: Callable | None = None) -> None:
14
15
  instance = queries.get_gitlab_instance()
15
16
  settings = queries.get_app_interface_settings()
16
17
  code_components = queries.get_code_components()
17
18
  app_int_repos = [c["url"] for c in code_components]
18
19
  saas_bundle_repos = [c["url"] for c in code_components if c["resource"] == "bundle"]
19
20
  gl = GitLabApi(instance, settings=settings)
20
- defer(gl.cleanup)
21
+ if defer:
22
+ defer(gl.cleanup)
21
23
 
22
24
  project_requests = instance["projectRequests"] or []
23
25
  error = False
@@ -45,7 +47,7 @@ def run(dry_run, defer=None):
45
47
  sys.exit(error)
46
48
 
47
49
 
48
- def early_exit_desired_state(*args, **kwargs) -> dict[str, Any]:
50
+ def early_exit_desired_state(*args: Any, **kwargs: Any) -> dict[str, Any]:
49
51
  instance = queries.get_gitlab_instance()
50
52
  return {
51
53
  "instance": instance,
@@ -2,6 +2,7 @@ import logging
2
2
  import os
3
3
  import sys
4
4
  from collections.abc import (
5
+ Callable,
5
6
  Iterable,
6
7
  Mapping,
7
8
  Sequence,
@@ -119,7 +120,7 @@ def _build_helm_integration_spec(
119
120
  integration_name: str,
120
121
  managed: IntegrationManagedV1,
121
122
  shard_manager: IntegrationShardManager,
122
- ):
123
+ ) -> HelmIntegrationSpec:
123
124
  integration_spec = managed.spec.dict(by_alias=True)
124
125
  shard_specs = shard_manager.build_integration_shards(integration_name, managed)
125
126
  his = HelmIntegrationSpec(
@@ -151,7 +152,7 @@ class IntegrationsEnvironment(BaseModel):
151
152
 
152
153
  def collect_integrations_environment(
153
154
  integrations: Iterable[IntegrationV1],
154
- environment_name: str,
155
+ environment_name: str | None,
155
156
  shard_manager: IntegrationShardManager,
156
157
  ) -> list[IntegrationsEnvironment]:
157
158
  int_envs: dict[str, IntegrationsEnvironment] = {}
@@ -210,7 +211,7 @@ def fetch_desired_state(
210
211
  upstream: str,
211
212
  image: str,
212
213
  image_tag_from_ref: Mapping[str, str] | None,
213
- ):
214
+ ) -> None:
214
215
  for ie in integrations_environments:
215
216
  oc_resources = construct_oc_resources(ie, upstream, image, image_tag_from_ref)
216
217
  for r in oc_resources:
@@ -230,17 +231,17 @@ def filter_integrations(
230
231
 
231
232
  @defer
232
233
  def run(
233
- dry_run,
234
- environment_name,
234
+ dry_run: bool,
235
+ environment_name: str | None,
235
236
  integration_runtime_meta: dict[str, IntegrationMeta],
236
- thread_pool_size=10,
237
- internal=None,
238
- use_jump_host=True,
239
- image_tag_from_ref=None,
240
- upstream=None,
241
- image=None,
242
- defer=None,
243
- ):
237
+ thread_pool_size: int = 10,
238
+ internal: bool = False,
239
+ use_jump_host: bool = True,
240
+ image_tag_from_ref: dict[str, str] | None = None,
241
+ upstream: str | None = None,
242
+ image: str | None = None,
243
+ defer: Callable | None = None,
244
+ ) -> None:
244
245
  # Beware, environment_name can be empty! It's optional to set it!
245
246
  # If not set, all environments should be considered.
246
247
 
@@ -269,41 +270,31 @@ def run(
269
270
  logging.debug("Nothing to do, exiting.")
270
271
  sys.exit(ExitCodes.SUCCESS)
271
272
 
272
- fetch_args = {
273
- "namespaces": [
273
+ ri, oc_map = ob.fetch_current_state(
274
+ namespaces=[
274
275
  ie.namespace.dict(by_alias=True) for ie in integration_environments
275
276
  ],
276
- "thread_pool_size": thread_pool_size,
277
- "integration": QONTRACT_INTEGRATION,
278
- "integration_version": QONTRACT_INTEGRATION_VERSION,
279
- "override_managed_types": ["Deployment", "StatefulSet", "CronJob", "Service"],
280
- "internal": internal,
281
- "use_jump_host": use_jump_host,
282
- }
283
-
284
- if not image:
285
- image = IMAGE_DEFAULT
286
-
287
- if upstream:
288
- use_upstream = True
289
- fetch_args["caller"] = upstream
290
- else:
291
- # Not set to fetch_args on purpose, fallback for cases where caller is not yet set
292
- use_upstream = False
293
- upstream = UPSTREAM_DEFAULT
294
-
295
- ri, oc_map = ob.fetch_current_state(**fetch_args)
296
- defer(oc_map.cleanup)
277
+ thread_pool_size=thread_pool_size,
278
+ integration=QONTRACT_INTEGRATION,
279
+ integration_version=QONTRACT_INTEGRATION_VERSION,
280
+ override_managed_types=["Deployment", "StatefulSet", "CronJob", "Service"],
281
+ internal=internal,
282
+ use_jump_host=use_jump_host,
283
+ caller=upstream,
284
+ )
285
+ if defer:
286
+ defer(oc_map.cleanup)
297
287
 
298
288
  fetch_desired_state(
299
- integration_environments, ri, upstream, image, image_tag_from_ref
289
+ integration_environments,
290
+ ri,
291
+ upstream or UPSTREAM_DEFAULT,
292
+ image or IMAGE_DEFAULT,
293
+ image_tag_from_ref,
300
294
  )
301
295
 
302
296
  ob.publish_metrics(ri, QONTRACT_INTEGRATION)
303
- if use_upstream:
304
- ob.realize_data(dry_run, oc_map, ri, thread_pool_size, caller=upstream)
305
- else:
306
- ob.realize_data(dry_run, oc_map, ri, thread_pool_size)
297
+ ob.realize_data(dry_run, oc_map, ri, thread_pool_size, caller=upstream)
307
298
 
308
299
  if ri.has_error_registered():
309
300
  sys.exit(ExitCodes.ERROR)
@@ -46,7 +46,7 @@ def init_jjb(
46
46
  return JJB(configs, secret_reader=secret_reader, print_only=print_only)
47
47
 
48
48
 
49
- def validate_repos_and_admins(jjb: JJB):
49
+ def validate_repos_and_admins(jjb: JJB) -> None:
50
50
  jjb_repos = jjb.get_repos()
51
51
  app_int_repos = queries.get_repos()
52
52
  missing_repos = [r for r in jjb_repos if r not in app_int_repos]
@@ -2,6 +2,7 @@ import logging
2
2
  import operator
3
3
  import re
4
4
  import time
5
+ from typing import Any
5
6
 
6
7
  from reconcile import queries
7
8
  from reconcile.utils.jenkins_api import JenkinsApi
@@ -10,11 +11,13 @@ from reconcile.utils.secret_reader import SecretReader
10
11
  QONTRACT_INTEGRATION = "jenkins-job-builds-cleaner"
11
12
 
12
13
 
13
- def hours_to_ms(hours):
14
+ def hours_to_ms(hours: int) -> int:
14
15
  return hours * 60 * 60 * 1000
15
16
 
16
17
 
17
- def delete_builds(jenkins, builds_todel, dry_run=True):
18
+ def delete_builds(
19
+ jenkins: JenkinsApi, builds_todel: list[dict[str, Any]], dry_run: bool = True
20
+ ) -> None:
18
21
  delete_builds_count = len(builds_todel)
19
22
  for idx, build in enumerate(builds_todel, start=1):
20
23
  job_name = build["job_name"]
@@ -35,7 +38,7 @@ def delete_builds(jenkins, builds_todel, dry_run=True):
35
38
  logging.exception(msg)
36
39
 
37
40
 
38
- def get_last_build_ids(builds):
41
+ def get_last_build_ids(builds: list[dict[str, Any]]) -> list[str]:
39
42
  builds_to_keep = []
40
43
  sorted_builds = sorted(builds, key=operator.itemgetter("timestamp"), reverse=True)
41
44
  if sorted_builds:
@@ -49,7 +52,9 @@ def get_last_build_ids(builds):
49
52
  return builds_to_keep
50
53
 
51
54
 
52
- def find_builds(jenkins, job_names, rules):
55
+ def find_builds(
56
+ jenkins: JenkinsApi, job_names: list[str], rules: list[dict[str, Any]]
57
+ ) -> list[dict[str, Any]]:
53
58
  # Current time in ms
54
59
  time_ms = time.time() * 1000
55
60
 
@@ -78,7 +83,7 @@ def find_builds(jenkins, job_names, rules):
78
83
  return builds_found
79
84
 
80
85
 
81
- def run(dry_run):
86
+ def run(dry_run: bool) -> None:
82
87
  jenkins_instances = queries.get_jenkins_instances()
83
88
  secret_reader = SecretReader(queries.get_secret_reader_settings())
84
89
 
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ from collections.abc import Iterable
2
3
 
3
4
  from reconcile import queries
4
5
  from reconcile.jenkins_job_builder import init_jjb
@@ -8,7 +9,9 @@ from reconcile.utils.secret_reader import SecretReader
8
9
  QONTRACT_INTEGRATION = "jenkins-job-cleaner"
9
10
 
10
11
 
11
- def get_managed_job_names(job_names, managed_projects):
12
+ def get_managed_job_names(
13
+ job_names: Iterable[str], managed_projects: Iterable[str]
14
+ ) -> list[str]:
12
15
  managed_jobs = set()
13
16
  for job_name in job_names:
14
17
  for managed_project in managed_projects:
@@ -18,13 +21,13 @@ def get_managed_job_names(job_names, managed_projects):
18
21
  return list(managed_jobs)
19
22
 
20
23
 
21
- def get_desired_job_names(instance_name: str, secret_reader: SecretReader):
24
+ def get_desired_job_names(instance_name: str, secret_reader: SecretReader) -> list[str]:
22
25
  jjb = init_jjb(secret_reader)
23
26
  desired_jobs = jjb.get_all_jobs(instance_name=instance_name)[instance_name]
24
27
  return [j["name"] for j in desired_jobs]
25
28
 
26
29
 
27
- def run(dry_run):
30
+ def run(dry_run: bool) -> None:
28
31
  jenkins_instances = queries.get_jenkins_instances()
29
32
  secret_reader = SecretReader(queries.get_secret_reader_settings())
30
33
 
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ from collections.abc import Iterable, Mapping
2
3
 
3
4
  from reconcile import queries
4
5
  from reconcile.utils import (
@@ -76,8 +77,8 @@ def get_jenkins_map() -> dict[str, JenkinsApi]:
76
77
  return jenkins_map
77
78
 
78
79
 
79
- def get_current_state(jenkins_map):
80
- current_state = []
80
+ def get_current_state(jenkins_map: Mapping[str, JenkinsApi]) -> list[dict[str, str]]:
81
+ current_state: list[dict[str, str]] = []
81
82
 
82
83
  for instance, jenkins in jenkins_map.items():
83
84
  roles = jenkins.get_all_roles()
@@ -97,11 +98,11 @@ def get_current_state(jenkins_map):
97
98
  return current_state
98
99
 
99
100
 
100
- def get_desired_state():
101
+ def get_desired_state() -> list[dict[str, str]]:
101
102
  gqlapi = gql.get_api()
102
103
  roles: list[dict] = expiration.filter(gqlapi.query(ROLES_QUERY)["roles"])
103
104
 
104
- desired_state = []
105
+ desired_state: list[dict[str, str]] = []
105
106
  for r in roles:
106
107
  for p in r["permissions"]:
107
108
  if p["service"] != "jenkins-role":
@@ -128,7 +129,9 @@ def get_desired_state():
128
129
  return desired_state
129
130
 
130
131
 
131
- def calculate_diff(current_state, desired_state):
132
+ def calculate_diff(
133
+ current_state: Iterable[dict[str, str]], desired_state: Iterable[dict[str, str]]
134
+ ) -> list[dict[str, str]]:
132
135
  diff = []
133
136
  users_to_assign = subtract_states(
134
137
  desired_state, current_state, "assign_role_to_user"
@@ -142,7 +145,11 @@ def calculate_diff(current_state, desired_state):
142
145
  return diff
143
146
 
144
147
 
145
- def subtract_states(from_state, subtract_state, action):
148
+ def subtract_states(
149
+ from_state: Iterable[dict[str, str]],
150
+ subtract_state: Iterable[dict[str, str]],
151
+ action: str,
152
+ ) -> list[dict[str, str]]:
146
153
  result = []
147
154
 
148
155
  for f_user in from_state:
@@ -163,7 +170,7 @@ def subtract_states(from_state, subtract_state, action):
163
170
  return result
164
171
 
165
172
 
166
- def act(diff, jenkins_map):
173
+ def act(diff: dict[str, str], jenkins_map: Mapping[str, JenkinsApi]) -> None:
167
174
  instance = diff["instance"]
168
175
  role = diff["role"]
169
176
  user = diff["user"]
@@ -177,7 +184,7 @@ def act(diff, jenkins_map):
177
184
  raise Exception(f"invalid action: {action}")
178
185
 
179
186
 
180
- def run(dry_run):
187
+ def run(dry_run: bool) -> None:
181
188
  jenkins_map = get_jenkins_map()
182
189
  current_state = get_current_state(jenkins_map)
183
190
  desired_state = get_desired_state()
@@ -1,5 +1,6 @@
1
1
  import copy
2
2
  import logging
3
+ from collections.abc import Callable, MutableMapping
3
4
  from typing import Any
4
5
 
5
6
  from reconcile import queries
@@ -17,7 +18,9 @@ def get_gitlab_api(secret_reader: SecretReader) -> GitLabApi:
17
18
  return GitLabApi(instance, secret_reader=secret_reader)
18
19
 
19
20
 
20
- def get_hooks_to_add(desired_state, gl):
21
+ def get_hooks_to_add(
22
+ desired_state: MutableMapping, gl: GitLabApi
23
+ ) -> MutableMapping[str, list[dict[str, Any]]]:
21
24
  diff = copy.deepcopy(desired_state)
22
25
  for project_url, desired_hooks in diff.items():
23
26
  try:
@@ -45,7 +48,7 @@ def get_hooks_to_add(desired_state, gl):
45
48
 
46
49
 
47
50
  @defer
48
- def run(dry_run, defer=None):
51
+ def run(dry_run: bool, defer: Callable | None = None) -> None:
49
52
  secret_reader = SecretReader(queries.get_secret_reader_settings())
50
53
  jjb: JJB = init_jjb(secret_reader)
51
54
  gl = get_gitlab_api(secret_reader)
@@ -63,7 +66,7 @@ def run(dry_run, defer=None):
63
66
  gl.create_project_hook(project_url, h)
64
67
 
65
68
 
66
- def early_exit_desired_state(*args, **kwargs) -> dict[str, Any]:
69
+ def early_exit_desired_state(*args: Any, **kwargs: Any) -> dict[str, Any]:
67
70
  return {
68
71
  "jenkins_configs": queries.get_jenkins_configs(),
69
72
  }
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ from collections.abc import Callable
2
3
  from typing import Any
3
4
 
4
5
  from reconcile import queries
@@ -9,7 +10,7 @@ QONTRACT_INTEGRATION = "jenkins-webhooks-cleaner"
9
10
 
10
11
 
11
12
  @defer
12
- def run(dry_run, defer=None):
13
+ def run(dry_run: bool, defer: Callable | None = None) -> None:
13
14
  instance = queries.get_gitlab_instance()
14
15
  settings = queries.get_app_interface_settings()
15
16
  gl = GitLabApi(instance, settings=settings)
@@ -40,7 +41,7 @@ def run(dry_run, defer=None):
40
41
  logging.warning("no access to project: " + repo)
41
42
 
42
43
 
43
- def early_exit_desired_state(*args, **kwargs) -> dict[str, Any]:
44
+ def early_exit_desired_state(*args: Any, **kwargs: Any) -> dict[str, Any]:
44
45
  return {
45
46
  "previous_urls": queries.get_jenkins_instances_previous_urls(),
46
47
  }