autopkg-wrapper 2026.2.6__py3-none-any.whl → 2026.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,10 @@ from autopkg_wrapper.models.recipe import Recipe
11
11
  from autopkg_wrapper.notifier import slack
12
12
  from autopkg_wrapper.utils.args import setup_args
13
13
  from autopkg_wrapper.utils.logging import setup_logger
14
- from autopkg_wrapper.utils.recipe_batching import build_recipe_batches, recipe_type_for
14
+ from autopkg_wrapper.utils.recipe_batching import (
15
+ build_recipe_batches,
16
+ describe_recipe_batches,
17
+ )
15
18
  from autopkg_wrapper.utils.recipe_ordering import order_recipe_list
16
19
  from autopkg_wrapper.utils.report_processor import process_reports
17
20
 
@@ -27,7 +30,7 @@ def get_override_repo_info(args):
27
30
  autopkg_prefs_path = Path(args.autopkg_prefs).resolve()
28
31
 
29
32
  if autopkg_prefs_path.suffix == ".json":
30
- with open(autopkg_prefs_path, "r") as f:
33
+ with open(autopkg_prefs_path) as f:
31
34
  autopkg_prefs = json.load(f)
32
35
  elif autopkg_prefs_path.suffix == ".plist":
33
36
  autopkg_prefs = plistlib.loads(autopkg_prefs_path.read_bytes())
@@ -75,6 +78,12 @@ def get_override_repo_info(args):
75
78
 
76
79
 
77
80
  def update_recipe_repo(recipe, git_info, disable_recipe_trust_check, args):
81
+ if getattr(args, "dry_run", False):
82
+ logging.info(
83
+ "Dry run: would update trust info in override repo for %s",
84
+ recipe.identifier,
85
+ )
86
+ return
78
87
  logging.debug(f"recipe.verified: {recipe.verified}")
79
88
  logging.debug(f"disable_recipe_trust_check: {disable_recipe_trust_check}")
80
89
 
@@ -103,7 +112,7 @@ def update_recipe_repo(recipe, git_info, disable_recipe_trust_check, args):
103
112
 
104
113
  git.stage_recipe(git_info)
105
114
  git.commit_recipe(
106
- git_info, message=f"Updating Trust Info for {recipe.name}"
115
+ git_info, message=f"Updating Trust Info for {recipe.identifier}"
107
116
  )
108
117
  git.pull_branch(git_info)
109
118
  git.push_branch(git_info)
@@ -120,21 +129,21 @@ def parse_recipe_list(recipes, recipe_file, post_processors, args):
120
129
  """
121
130
  recipe_list = None
122
131
 
123
- logging.info(f"Recipes: {recipes}") if recipes else None
124
- logging.info(f"Recipe List: {recipe_file}") if recipe_file else None
132
+ logging.debug(f"Recipes: {recipes}") if recipes else None
133
+ logging.debug(f"Recipe List: {recipe_file}") if recipe_file else None
125
134
 
126
135
  if recipe_file:
127
136
  if recipe_file.suffix == ".json":
128
- with open(recipe_file, "r") as f:
137
+ with open(recipe_file) as f:
129
138
  recipe_list = json.load(f)
130
139
  elif recipe_file.suffix in {".yaml", ".yml"}:
131
140
  from ruamel.yaml import YAML
132
141
 
133
142
  yaml = YAML(typ="safe")
134
- with open(recipe_file, "r", encoding="utf-8") as f:
143
+ with open(recipe_file, encoding="utf-8") as f:
135
144
  recipe_list = yaml.load(f)
136
145
  elif recipe_file.suffix == ".txt":
137
- with open(recipe_file, "r") as f:
146
+ with open(recipe_file) as f:
138
147
  recipe_list = f.read().splitlines()
139
148
  if recipes:
140
149
  if isinstance(recipes, list):
@@ -205,6 +214,21 @@ def parse_post_processors(post_processors):
205
214
 
206
215
 
207
216
  def process_recipe(recipe, disable_recipe_trust_check, args):
217
+ if getattr(args, "dry_run", False):
218
+ logging.info("Dry run: processing recipe %s", recipe.identifier)
219
+ if disable_recipe_trust_check:
220
+ logging.info(
221
+ "Dry run: trust verification disabled for %s", recipe.identifier
222
+ )
223
+ recipe.verified = None
224
+ else:
225
+ logging.info("Dry run: would verify trust info for %s", recipe.identifier)
226
+ logging.info("Dry run: would run recipe %s", recipe.identifier)
227
+ logging.info(
228
+ "Dry run: would evaluate trust update flow for %s",
229
+ recipe.identifier,
230
+ )
231
+ return recipe
208
232
  if disable_recipe_trust_check:
209
233
  logging.debug("Setting Recipe verification to None")
210
234
  recipe.verified = None
@@ -230,7 +254,7 @@ def main():
230
254
  setup_logger(args.debug if args.debug else False)
231
255
  logging.info("Running autopkg_wrapper")
232
256
 
233
- override_repo_info = get_override_repo_info(args)
257
+ override_repo_info = None
234
258
 
235
259
  post_processors_list = parse_post_processors(post_processors=args.post_processors)
236
260
  recipe_list = parse_recipe_list(
@@ -242,12 +266,22 @@ def main():
242
266
 
243
267
  failed_recipes = []
244
268
 
269
+ if getattr(args, "dry_run", False):
270
+ logging.info("Dry run enabled: no external commands will be executed")
271
+ if args.disable_git_commands:
272
+ logging.info("Dry run: git commands already disabled")
273
+
245
274
  # Run recipes concurrently using a thread pool to parallelize subprocess calls
246
275
  max_workers = max(1, int(getattr(args, "concurrency", 1)))
247
276
  logging.info(f"Running recipes with concurrency={max_workers}")
248
277
 
249
278
  def run_one(r: Recipe):
250
- logging.info(f"Processing Recipe: {r.name}")
279
+ logging.info(f"Processing Recipe: {r.identifier}")
280
+ if args.dry_run:
281
+ logging.info(
282
+ "Dry run: would process recipe %s with trust checks and run",
283
+ r.identifier,
284
+ )
251
285
  process_recipe(
252
286
  recipe=r,
253
287
  disable_recipe_trust_check=args.disable_recipe_trust_check,
@@ -261,58 +295,112 @@ def main():
261
295
  recipe_list=recipe_list,
262
296
  recipe_processing_order=args.recipe_processing_order,
263
297
  )
264
- for batch in batches:
265
- batch_type = recipe_type_for(batch[0]) if batch else ""
266
- logging.info(
267
- f"Running {len(batch)} recipes for type={batch_type or 'unknown'}"
268
- )
298
+ logging.info("Recipe processing batches:")
299
+ batch_descriptions = describe_recipe_batches(batches)
300
+ for batch_desc in batch_descriptions:
301
+ batch_type = batch_desc.get("type") or "unknown"
302
+ logging.info(f"Batch type={batch_type} count={batch_desc.get('count', 0)}")
303
+ for batch, batch_desc in zip(batches, batch_descriptions, strict=False):
304
+ batch_type = batch_desc.get("type") or "unknown"
305
+ logging.info(f"Beginning {batch_type} batch")
306
+ logging.info(f"Batch recipes: {batch_desc.get('recipes', [])}")
307
+ if args.dry_run:
308
+ for r in batch:
309
+ run_one(r)
310
+ continue
269
311
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
270
312
  futures = [executor.submit(run_one, r) for r in batch]
271
313
  for fut in as_completed(futures):
272
314
  r = fut.result()
273
315
  if r.error or r.results.get("failed"):
274
316
  failed_recipes.append(r)
275
- else:
276
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
277
- futures = [executor.submit(run_one, r) for r in recipe_list]
278
- for fut in as_completed(futures):
279
- r = fut.result()
280
- if r.error or r.results.get("failed"):
281
- failed_recipes.append(r)
317
+ elif recipe_list:
318
+ logging.info("Recipe processing batches:")
319
+ logging.info("Batch type=all count=%d", len(recipe_list))
320
+ logging.info("Batch recipes: %s", [r.identifier for r in recipe_list])
321
+ if args.dry_run:
322
+ for r in recipe_list:
323
+ run_one(r)
324
+ else:
325
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
326
+ futures = [executor.submit(run_one, r) for r in recipe_list]
327
+ for fut in as_completed(futures):
328
+ r = fut.result()
329
+ if r.error or r.results.get("failed"):
330
+ failed_recipes.append(r)
282
331
 
283
332
  # Apply git updates serially to avoid branch/commit conflicts when concurrency > 1
284
- for r in recipe_list:
285
- update_recipe_repo(
286
- git_info=override_repo_info,
287
- recipe=r,
288
- disable_recipe_trust_check=args.disable_recipe_trust_check,
289
- args=args,
290
- )
333
+ if args.dry_run:
334
+ logging.info("Dry run: skipping git updates")
335
+ elif args.disable_git_commands:
336
+ logging.info("Skipping git updates (disabled)")
337
+ else:
338
+ if override_repo_info is None:
339
+ override_repo_info = get_override_repo_info(args)
340
+ for r in recipe_list:
341
+ update_recipe_repo(
342
+ git_info=override_repo_info,
343
+ recipe=r,
344
+ disable_recipe_trust_check=args.disable_recipe_trust_check,
345
+ args=args,
346
+ )
291
347
 
292
348
  # Send notifications serially to simplify rate limiting and ordering
293
349
  if args.slack_token:
294
- for r in recipe_list:
295
- slack.send_notification(recipe=r, token=args.slack_token)
350
+ if args.dry_run:
351
+ logging.info("Dry run: skipping Slack notifications")
352
+ else:
353
+ for r in recipe_list:
354
+ slack.send_notification(recipe=r, token=args.slack_token)
296
355
 
297
356
  # Optionally open a PR for updated trust information
298
357
  if args.create_pr and recipe_list:
299
- # Choose a representative recipe for the PR title/body
300
- rep_recipe = next(
301
- (r for r in recipe_list if r.updated is True or r.verified is False),
302
- recipe_list[0],
303
- )
304
- pr_url = git.create_pull_request(git_info=override_repo_info, recipe=rep_recipe)
305
- logging.info(f"Created Pull Request for trust info updates: {pr_url}")
358
+ if args.dry_run:
359
+ logging.info("Dry run: skipping PR creation")
360
+ elif args.disable_git_commands:
361
+ logging.info("Skipping PR creation (disabled git commands)")
362
+ else:
363
+ if override_repo_info is None:
364
+ override_repo_info = get_override_repo_info(args)
365
+ # Choose a representative recipe for the PR title/body
366
+ rep_recipe = next(
367
+ (r for r in recipe_list if r.updated is True or r.verified is False),
368
+ recipe_list[0],
369
+ )
370
+ pr_url = git.create_pull_request(
371
+ git_info=override_repo_info, recipe=rep_recipe
372
+ )
373
+ logging.info(f"Created Pull Request for trust info updates: {pr_url}")
306
374
 
307
375
  # Create GitHub issue for failed recipes
308
376
  if args.create_issues and failed_recipes and args.github_token:
309
- issue_url = git.create_issue_for_failed_recipes(
310
- git_info=override_repo_info, failed_recipes=failed_recipes
311
- )
312
- logging.info(f"Created GitHub issue for failed recipes: {issue_url}")
377
+ if args.dry_run:
378
+ logging.info("Dry run: skipping issue creation")
379
+ elif args.disable_git_commands:
380
+ logging.info("Skipping issue creation (disabled git commands)")
381
+ else:
382
+ if override_repo_info is None:
383
+ override_repo_info = get_override_repo_info(args)
384
+ issue_url = git.create_issue_for_failed_recipes(
385
+ git_info=override_repo_info, failed_recipes=failed_recipes
386
+ )
387
+ logging.info(f"Created GitHub issue for failed recipes: {issue_url}")
313
388
 
314
389
  # Optionally process reports after running recipes
315
390
  if getattr(args, "process_reports", False):
391
+ if args.dry_run:
392
+ logging.info("Dry run: skipping report processing")
393
+ return
394
+ repo_branch = ""
395
+ repo_url = None
396
+ repo_path = None
397
+ if override_repo_info is None and not args.disable_git_commands:
398
+ override_repo_info = get_override_repo_info(args)
399
+ if override_repo_info is not None:
400
+ repo_url = override_repo_info.get("override_repo_url")
401
+ repo_path = str(override_repo_info.get("override_repo_path"))
402
+ if not args.disable_git_commands:
403
+ repo_branch = git.get_current_branch(override_repo_info)
316
404
  rc = process_reports(
317
405
  zip_file=getattr(args, "reports_zip", None),
318
406
  extract_dir=getattr(
@@ -324,6 +412,9 @@ def main():
324
412
  out_dir=getattr(args, "reports_out_dir", "autopkg_reports_summary/summary"),
325
413
  debug=bool(getattr(args, "debug", False)),
326
414
  strict=bool(getattr(args, "reports_strict", False)),
415
+ repo_url=repo_url,
416
+ repo_branch=repo_branch,
417
+ repo_path=repo_path,
327
418
  )
328
419
  if rc:
329
420
  sys.exit(rc)
@@ -8,7 +8,7 @@ from itertools import chain
8
8
  from pathlib import Path
9
9
 
10
10
 
11
- class Recipe(object):
11
+ class Recipe:
12
12
  def __init__(self, name: str, post_processors: list = None):
13
13
  self.filename = name
14
14
  self.error = False
@@ -27,6 +27,10 @@ class Recipe(object):
27
27
 
28
28
  return name
29
29
 
30
+ @property
31
+ def identifier(self):
32
+ return self.filename
33
+
30
34
  def verify_trust_info(self, args):
31
35
  verbose_output = ["-vvvv"] if args.debug else []
32
36
  prefs_file = (
@@ -40,6 +44,10 @@ class Recipe(object):
40
44
  )
41
45
  logging.debug(f"cmd: {cmd}")
42
46
 
47
+ if getattr(args, "dry_run", False):
48
+ logging.info("Dry run: would verify trust info for %s", self.identifier)
49
+ return self.verified
50
+
43
51
  result = subprocess.run(cmd, capture_output=True, text=True)
44
52
  if result.returncode == 0:
45
53
  self.verified = True
@@ -56,6 +64,10 @@ class Recipe(object):
56
64
  cmd = [autopkg_bin, "update-trust-info", self.filename] + prefs_file
57
65
  logging.debug(f"cmd: {cmd}")
58
66
 
67
+ if getattr(args, "dry_run", False):
68
+ logging.info("Dry run: would update trust info for %s", self.identifier)
69
+ return
70
+
59
71
  # Fail loudly if this exits 0
60
72
  try:
61
73
  subprocess.check_call(cmd)
@@ -79,6 +91,34 @@ class Recipe(object):
79
91
  return {"imported": imported_items, "failed": failed_items}
80
92
 
81
93
  def run(self, args):
94
+ if getattr(args, "dry_run", False):
95
+ autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
96
+ prefs_file = (
97
+ ["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else []
98
+ )
99
+ verbose_output = ["-vvvv"] if args.debug else []
100
+ post_processor_cmd = (
101
+ list(
102
+ chain.from_iterable(
103
+ [("--post", processor) for processor in self.post_processors]
104
+ )
105
+ )
106
+ if self.post_processors
107
+ else []
108
+ )
109
+ report_dir = Path("/private/tmp/autopkg")
110
+ report_time = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
111
+ report_name = Path(f"{self.name}-{report_time}.plist")
112
+ report = report_dir / report_name
113
+ cmd = (
114
+ [autopkg_bin, "run", self.filename, "--report-plist", report]
115
+ + verbose_output
116
+ + prefs_file
117
+ + post_processor_cmd
118
+ )
119
+ logging.info("Dry run: would run recipe %s", self.identifier)
120
+ logging.debug(f"cmd: {cmd}")
121
+ return self
82
122
  if self.verified is False:
83
123
  self.error = True
84
124
  self.results["failed"] = True
@@ -11,11 +11,13 @@ def send_notification(recipe, token):
11
11
  logging.error("Skipping Slack Notification as no SLACK_WEBHOOK_TOKEN defined!")
12
12
  return
13
13
 
14
+ recipe_identifier = getattr(recipe, "identifier", None) or recipe.name
15
+
14
16
  if recipe.verified is False:
15
- task_title = f"{recipe.name} failed trust verification"
17
+ task_title = f"{recipe_identifier} failed trust verification"
16
18
  task_description = recipe.results["message"]
17
19
  elif recipe.error:
18
- task_title = f"Failed to import {recipe.name}"
20
+ task_title = f"Failed to import {recipe_identifier}"
19
21
  if not recipe.results["failed"]:
20
22
  task_description = "Unknown error"
21
23
  else:
@@ -27,8 +29,8 @@ def send_notification(recipe, token):
27
29
  if "No releases found for repo" in task_description:
28
30
  return
29
31
  elif recipe.updated:
30
- task_title = f"{recipe.name} has been uploaded to Jamf"
31
- task_description = f"It's time to test {recipe.name}!"
32
+ task_title = f"{recipe_identifier} has been uploaded to Jamf"
33
+ task_description = f"It's time to test {recipe_identifier}!"
32
34
  else:
33
35
  return
34
36
 
@@ -56,6 +58,6 @@ def send_notification(recipe, token):
56
58
  )
57
59
  if response.status_code != 200:
58
60
  raise ValueError(
59
- "Request to slack returned an error %s, the response is:\n%s"
60
- % (response.status_code, response.text)
61
+ "Request to slack returned an error "
62
+ f"{response.status_code}, the response is:\n{response.text}"
61
63
  )
@@ -94,6 +94,11 @@ def setup_args():
94
94
  default=os.getenv("AW_AUTOPKG_BIN", "/usr/local/bin/autopkg"),
95
95
  help="Path to the autopkg binary (default: /usr/local/bin/autopkg). Can also be set via AW_AUTOPKG_BIN.",
96
96
  )
97
+ parser.add_argument(
98
+ "--dry-run",
99
+ action="store_true",
100
+ help="Show planned actions without executing external commands",
101
+ )
97
102
 
98
103
  parser.add_argument(
99
104
  "--debug",
@@ -110,9 +110,9 @@ def push_branch(git_info):
110
110
 
111
111
 
112
112
  def create_pull_request(git_info, recipe):
113
- title = f"Update Trust Information: {recipe.name}"
113
+ title = f"Update Trust Information: {recipe.identifier}"
114
114
  body = f"""
115
- Recipe Verification information is out-of-date for {recipe.name}.
115
+ Recipe Verification information is out-of-date for {recipe.identifier}.
116
116
  Please review and merge the updated trust information for this override.
117
117
  """
118
118
 
@@ -152,7 +152,8 @@ def create_issue_for_failed_recipes(git_info, failed_recipes):
152
152
 
153
153
  body = "## Recipe Failure Details:\n\n"
154
154
  for recipe in failed_recipes:
155
- body += f"#### {recipe.name}\n"
155
+ identifier = getattr(recipe, "identifier", None) or recipe.name
156
+ body += f"#### {identifier}\n"
156
157
 
157
158
  if recipe.results.get("failed"):
158
159
  for failure in recipe.results.get("failed", []):
@@ -1,21 +1,24 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Iterable, Protocol, TypeVar
3
+ from collections.abc import Iterable
4
+ from typing import Protocol
4
5
 
5
6
 
6
7
  class HasFilename(Protocol):
7
8
  filename: str
8
9
 
9
10
 
10
- T = TypeVar("T", bound=HasFilename)
11
-
12
-
13
11
  def recipe_type_for(recipe: HasFilename) -> str:
14
12
  parts = recipe.filename.split(".", 1)
15
13
  return parts[1] if len(parts) == 2 else ""
16
14
 
17
15
 
18
- def build_recipe_batches(
16
+ def recipe_identifier_for(recipe: HasFilename) -> str:
17
+ identifier = getattr(recipe, "identifier", None)
18
+ return identifier if identifier else recipe.filename
19
+
20
+
21
+ def build_recipe_batches[T: HasFilename](
19
22
  recipe_list: Iterable[T], recipe_processing_order
20
23
  ) -> list[list[T]]:
21
24
  recipe_list = list(recipe_list)
@@ -39,3 +42,17 @@ def build_recipe_batches(
39
42
  if current_batch:
40
43
  batches.append(current_batch)
41
44
  return batches
45
+
46
+
47
+ def describe_recipe_batches[T: HasFilename](
48
+ batches: Iterable[Iterable[T]],
49
+ ) -> list[dict[str, object]]:
50
+ descriptions: list[dict[str, object]] = []
51
+ for batch in batches:
52
+ batch_list = list(batch)
53
+ batch_type = recipe_type_for(batch_list[0]) if batch_list else ""
54
+ identifiers = [recipe_identifier_for(r) for r in batch_list]
55
+ descriptions.append(
56
+ {"type": batch_type, "count": len(batch_list), "recipes": identifiers}
57
+ )
58
+ return descriptions
@@ -64,6 +64,17 @@ def order_recipe_list(recipe_list, order):
64
64
  parts = recipe_name.split(".", 1)
65
65
  return parts[1] if len(parts) == 2 else ""
66
66
 
67
+ def order_recipes_by_type(recipes: list[str]) -> list[str]:
68
+ groups: dict[str, list[str]] = {}
69
+ for r in recipes:
70
+ t = recipe_type(r)
71
+ groups.setdefault(t, []).append(r)
72
+
73
+ ordered_recipes: list[str] = []
74
+ for t in sorted(groups.keys(), key=lambda x: (x == "", x.casefold())):
75
+ ordered_recipes.extend(sorted(groups[t], key=str.casefold))
76
+ return ordered_recipes
77
+
67
78
  def recipe_segments_after_first_dot(recipe_name: str) -> list[str]:
68
79
  after_first = recipe_type(recipe_name)
69
80
  return [p for p in after_first.split(".") if p] if after_first else []
@@ -131,17 +142,11 @@ def order_recipe_list(recipe_list, order):
131
142
 
132
143
  ordered: list[str] = []
133
144
  for p in normalised_order:
134
- ordered.extend(sorted(pattern_groups[p], key=str.casefold))
145
+ ordered.extend(order_recipes_by_type(pattern_groups[p]))
135
146
 
136
147
  # Remaining recipes: group by their full type string and order groups alphabetically,
137
148
  # with empty-type last.
138
- groups: dict[str, list[str]] = {}
139
- for r in unmatched:
140
- t = recipe_type(r)
141
- groups.setdefault(t, []).append(r)
142
-
143
- for t in sorted(groups.keys(), key=lambda x: (x == "", x.casefold())):
144
- ordered.extend(sorted(groups[t], key=str.casefold))
149
+ ordered.extend(order_recipes_by_type(unmatched))
145
150
 
146
151
  logging.debug(f"Recipe processing order: {normalised_order}")
147
152
  logging.debug(f"Ordered recipes: {ordered}")