autopkg-wrapper 2026.2.8__py3-none-any.whl → 2026.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -78,6 +78,12 @@ def get_override_repo_info(args):
78
78
 
79
79
 
80
80
  def update_recipe_repo(recipe, git_info, disable_recipe_trust_check, args):
81
+ if getattr(args, "dry_run", False):
82
+ logging.info(
83
+ "Dry run: would update trust info in override repo for %s",
84
+ recipe.identifier,
85
+ )
86
+ return
81
87
  logging.debug(f"recipe.verified: {recipe.verified}")
82
88
  logging.debug(f"disable_recipe_trust_check: {disable_recipe_trust_check}")
83
89
 
@@ -208,6 +214,21 @@ def parse_post_processors(post_processors):
208
214
 
209
215
 
210
216
  def process_recipe(recipe, disable_recipe_trust_check, args):
217
+ if getattr(args, "dry_run", False):
218
+ logging.info("Dry run: processing recipe %s", recipe.identifier)
219
+ if disable_recipe_trust_check:
220
+ logging.info(
221
+ "Dry run: trust verification disabled for %s", recipe.identifier
222
+ )
223
+ recipe.verified = None
224
+ else:
225
+ logging.info("Dry run: would verify trust info for %s", recipe.identifier)
226
+ logging.info("Dry run: would run recipe %s", recipe.identifier)
227
+ logging.info(
228
+ "Dry run: would evaluate trust update flow for %s",
229
+ recipe.identifier,
230
+ )
231
+ return recipe
211
232
  if disable_recipe_trust_check:
212
233
  logging.debug("Setting Recipe verification to None")
213
234
  recipe.verified = None
@@ -233,7 +254,7 @@ def main():
233
254
  setup_logger(args.debug if args.debug else False)
234
255
  logging.info("Running autopkg_wrapper")
235
256
 
236
- override_repo_info = get_override_repo_info(args)
257
+ override_repo_info = None
237
258
 
238
259
  post_processors_list = parse_post_processors(post_processors=args.post_processors)
239
260
  recipe_list = parse_recipe_list(
@@ -245,12 +266,22 @@ def main():
245
266
 
246
267
  failed_recipes = []
247
268
 
269
+ if getattr(args, "dry_run", False):
270
+ logging.info("Dry run enabled: no external commands will be executed")
271
+ if args.disable_git_commands:
272
+ logging.info("Dry run: git commands already disabled")
273
+
248
274
  # Run recipes concurrently using a thread pool to parallelize subprocess calls
249
275
  max_workers = max(1, int(getattr(args, "concurrency", 1)))
250
276
  logging.info(f"Running recipes with concurrency={max_workers}")
251
277
 
252
278
  def run_one(r: Recipe):
253
279
  logging.info(f"Processing Recipe: {r.identifier}")
280
+ if args.dry_run:
281
+ logging.info(
282
+ "Dry run: would process recipe %s with trust checks and run",
283
+ r.identifier,
284
+ )
254
285
  process_recipe(
255
286
  recipe=r,
256
287
  disable_recipe_trust_check=args.disable_recipe_trust_check,
@@ -266,10 +297,17 @@ def main():
266
297
  )
267
298
  logging.info("Recipe processing batches:")
268
299
  batch_descriptions = describe_recipe_batches(batches)
269
- for batch, batch_desc in zip(batches, batch_descriptions, strict=False):
300
+ for batch_desc in batch_descriptions:
270
301
  batch_type = batch_desc.get("type") or "unknown"
271
302
  logging.info(f"Batch type={batch_type} count={batch_desc.get('count', 0)}")
303
+ for batch, batch_desc in zip(batches, batch_descriptions, strict=False):
304
+ batch_type = batch_desc.get("type") or "unknown"
305
+ logging.info(f"Beginning {batch_type} batch")
272
306
  logging.info(f"Batch recipes: {batch_desc.get('recipes', [])}")
307
+ if args.dry_run:
308
+ for r in batch:
309
+ run_one(r)
310
+ continue
273
311
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
274
312
  futures = [executor.submit(run_one, r) for r in batch]
275
313
  for fut in as_completed(futures):
@@ -280,46 +318,89 @@ def main():
280
318
  logging.info("Recipe processing batches:")
281
319
  logging.info("Batch type=all count=%d", len(recipe_list))
282
320
  logging.info("Batch recipes: %s", [r.identifier for r in recipe_list])
283
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
284
- futures = [executor.submit(run_one, r) for r in recipe_list]
285
- for fut in as_completed(futures):
286
- r = fut.result()
287
- if r.error or r.results.get("failed"):
288
- failed_recipes.append(r)
321
+ if args.dry_run:
322
+ for r in recipe_list:
323
+ run_one(r)
324
+ else:
325
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
326
+ futures = [executor.submit(run_one, r) for r in recipe_list]
327
+ for fut in as_completed(futures):
328
+ r = fut.result()
329
+ if r.error or r.results.get("failed"):
330
+ failed_recipes.append(r)
289
331
 
290
332
  # Apply git updates serially to avoid branch/commit conflicts when concurrency > 1
291
- for r in recipe_list:
292
- update_recipe_repo(
293
- git_info=override_repo_info,
294
- recipe=r,
295
- disable_recipe_trust_check=args.disable_recipe_trust_check,
296
- args=args,
297
- )
333
+ if args.dry_run:
334
+ logging.info("Dry run: skipping git updates")
335
+ elif args.disable_git_commands:
336
+ logging.info("Skipping git updates (disabled)")
337
+ else:
338
+ if override_repo_info is None:
339
+ override_repo_info = get_override_repo_info(args)
340
+ for r in recipe_list:
341
+ update_recipe_repo(
342
+ git_info=override_repo_info,
343
+ recipe=r,
344
+ disable_recipe_trust_check=args.disable_recipe_trust_check,
345
+ args=args,
346
+ )
298
347
 
299
348
  # Send notifications serially to simplify rate limiting and ordering
300
349
  if args.slack_token:
301
- for r in recipe_list:
302
- slack.send_notification(recipe=r, token=args.slack_token)
350
+ if args.dry_run:
351
+ logging.info("Dry run: skipping Slack notifications")
352
+ else:
353
+ for r in recipe_list:
354
+ slack.send_notification(recipe=r, token=args.slack_token)
303
355
 
304
356
  # Optionally open a PR for updated trust information
305
357
  if args.create_pr and recipe_list:
306
- # Choose a representative recipe for the PR title/body
307
- rep_recipe = next(
308
- (r for r in recipe_list if r.updated is True or r.verified is False),
309
- recipe_list[0],
310
- )
311
- pr_url = git.create_pull_request(git_info=override_repo_info, recipe=rep_recipe)
312
- logging.info(f"Created Pull Request for trust info updates: {pr_url}")
358
+ if args.dry_run:
359
+ logging.info("Dry run: skipping PR creation")
360
+ elif args.disable_git_commands:
361
+ logging.info("Skipping PR creation (disabled git commands)")
362
+ else:
363
+ if override_repo_info is None:
364
+ override_repo_info = get_override_repo_info(args)
365
+ # Choose a representative recipe for the PR title/body
366
+ rep_recipe = next(
367
+ (r for r in recipe_list if r.updated is True or r.verified is False),
368
+ recipe_list[0],
369
+ )
370
+ pr_url = git.create_pull_request(
371
+ git_info=override_repo_info, recipe=rep_recipe
372
+ )
373
+ logging.info(f"Created Pull Request for trust info updates: {pr_url}")
313
374
 
314
375
  # Create GitHub issue for failed recipes
315
376
  if args.create_issues and failed_recipes and args.github_token:
316
- issue_url = git.create_issue_for_failed_recipes(
317
- git_info=override_repo_info, failed_recipes=failed_recipes
318
- )
319
- logging.info(f"Created GitHub issue for failed recipes: {issue_url}")
377
+ if args.dry_run:
378
+ logging.info("Dry run: skipping issue creation")
379
+ elif args.disable_git_commands:
380
+ logging.info("Skipping issue creation (disabled git commands)")
381
+ else:
382
+ if override_repo_info is None:
383
+ override_repo_info = get_override_repo_info(args)
384
+ issue_url = git.create_issue_for_failed_recipes(
385
+ git_info=override_repo_info, failed_recipes=failed_recipes
386
+ )
387
+ logging.info(f"Created GitHub issue for failed recipes: {issue_url}")
320
388
 
321
389
  # Optionally process reports after running recipes
322
390
  if getattr(args, "process_reports", False):
391
+ if args.dry_run:
392
+ logging.info("Dry run: skipping report processing")
393
+ return
394
+ repo_branch = ""
395
+ repo_url = None
396
+ repo_path = None
397
+ if override_repo_info is None and not args.disable_git_commands:
398
+ override_repo_info = get_override_repo_info(args)
399
+ if override_repo_info is not None:
400
+ repo_url = override_repo_info.get("override_repo_url")
401
+ repo_path = str(override_repo_info.get("override_repo_path"))
402
+ if not args.disable_git_commands:
403
+ repo_branch = git.get_current_branch(override_repo_info)
323
404
  rc = process_reports(
324
405
  zip_file=getattr(args, "reports_zip", None),
325
406
  extract_dir=getattr(
@@ -331,6 +412,9 @@ def main():
331
412
  out_dir=getattr(args, "reports_out_dir", "autopkg_reports_summary/summary"),
332
413
  debug=bool(getattr(args, "debug", False)),
333
414
  strict=bool(getattr(args, "reports_strict", False)),
415
+ repo_url=repo_url,
416
+ repo_branch=repo_branch,
417
+ repo_path=repo_path,
334
418
  )
335
419
  if rc:
336
420
  sys.exit(rc)
@@ -44,6 +44,10 @@ class Recipe:
44
44
  )
45
45
  logging.debug(f"cmd: {cmd}")
46
46
 
47
+ if getattr(args, "dry_run", False):
48
+ logging.info("Dry run: would verify trust info for %s", self.identifier)
49
+ return self.verified
50
+
47
51
  result = subprocess.run(cmd, capture_output=True, text=True)
48
52
  if result.returncode == 0:
49
53
  self.verified = True
@@ -60,6 +64,10 @@ class Recipe:
60
64
  cmd = [autopkg_bin, "update-trust-info", self.filename] + prefs_file
61
65
  logging.debug(f"cmd: {cmd}")
62
66
 
67
+ if getattr(args, "dry_run", False):
68
+ logging.info("Dry run: would update trust info for %s", self.identifier)
69
+ return
70
+
63
71
  # Fail loudly if this exits 0
64
72
  try:
65
73
  subprocess.check_call(cmd)
@@ -83,6 +91,34 @@ class Recipe:
83
91
  return {"imported": imported_items, "failed": failed_items}
84
92
 
85
93
  def run(self, args):
94
+ if getattr(args, "dry_run", False):
95
+ autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
96
+ prefs_file = (
97
+ ["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else []
98
+ )
99
+ verbose_output = ["-vvvv"] if args.debug else []
100
+ post_processor_cmd = (
101
+ list(
102
+ chain.from_iterable(
103
+ [("--post", processor) for processor in self.post_processors]
104
+ )
105
+ )
106
+ if self.post_processors
107
+ else []
108
+ )
109
+ report_dir = Path("/private/tmp/autopkg")
110
+ report_time = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
111
+ report_name = Path(f"{self.name}-{report_time}.plist")
112
+ report = report_dir / report_name
113
+ cmd = (
114
+ [autopkg_bin, "run", self.filename, "--report-plist", report]
115
+ + verbose_output
116
+ + prefs_file
117
+ + post_processor_cmd
118
+ )
119
+ logging.info("Dry run: would run recipe %s", self.identifier)
120
+ logging.debug(f"cmd: {cmd}")
121
+ return self
86
122
  if self.verified is False:
87
123
  self.error = True
88
124
  self.results["failed"] = True
@@ -94,6 +94,11 @@ def setup_args():
94
94
  default=os.getenv("AW_AUTOPKG_BIN", "/usr/local/bin/autopkg"),
95
95
  help="Path to the autopkg binary (default: /usr/local/bin/autopkg). Can also be set via AW_AUTOPKG_BIN.",
96
96
  )
97
+ parser.add_argument(
98
+ "--dry-run",
99
+ action="store_true",
100
+ help="Show planned actions without executing external commands",
101
+ )
97
102
 
98
103
  parser.add_argument(
99
104
  "--debug",
@@ -4,6 +4,7 @@ import os
4
4
  import plistlib
5
5
  import re
6
6
  import zipfile
7
+ from pathlib import Path
7
8
 
8
9
 
9
10
  def find_report_dirs(base_path: str) -> list[str]:
@@ -45,6 +46,42 @@ def _infer_recipe_name_from_filename(path: str) -> str:
45
46
  return base
46
47
 
47
48
 
49
+ def _resolve_recipe_name(name: str, recipe_link_map: dict[str, str] | None) -> str:
50
+ if not recipe_link_map:
51
+ return name
52
+ if name in recipe_link_map:
53
+ return name
54
+ candidates = [
55
+ recipe_name
56
+ for recipe_name in recipe_link_map
57
+ if recipe_name.startswith(f"{name}.")
58
+ ]
59
+ if len(candidates) == 1:
60
+ return candidates[0]
61
+ return name
62
+
63
+
64
+ def _build_recipe_link_map(
65
+ repo_path: str | None, repo_url: str | None, repo_branch: str | None
66
+ ) -> dict[str, str]:
67
+ if not repo_path or not repo_url or not repo_branch:
68
+ return {}
69
+ repo_root = Path(repo_path)
70
+ if not repo_root.exists():
71
+ return {}
72
+
73
+ recipe_link_map: dict[str, str] = {}
74
+ for path in repo_root.rglob("*.recipe*"):
75
+ if not path.is_file():
76
+ continue
77
+ rel = path.relative_to(repo_root).as_posix()
78
+ recipe_base = path.name
79
+ recipe_name = recipe_base.split(".recipe", 1)[0]
80
+ if recipe_name not in recipe_link_map:
81
+ recipe_link_map[recipe_name] = f"{repo_url}/blob/{repo_branch}/{rel}"
82
+ return recipe_link_map
83
+
84
+
48
85
  def parse_text_file(path: str) -> dict[str, list]:
49
86
  uploads: list[dict] = []
50
87
  policies: list[dict] = []
@@ -90,7 +127,9 @@ def parse_text_file(path: str) -> dict[str, list]:
90
127
  return {"uploads": uploads, "policies": policies, "errors": errors}
91
128
 
92
129
 
93
- def parse_plist_file(path: str) -> dict[str, list]:
130
+ def parse_plist_file(
131
+ path: str, *, recipe_link_map: dict[str, str] | None = None
132
+ ) -> dict[str, list]:
94
133
  uploads: list[dict] = []
95
134
  policies: list[dict] = []
96
135
  errors: list[str] = []
@@ -116,10 +155,16 @@ def parse_plist_file(path: str) -> dict[str, list]:
116
155
  sr = plist.get("summary_results", {}) or {}
117
156
 
118
157
  recipe_name = _infer_recipe_name_from_filename(path)
158
+ if recipe_link_map:
159
+ recipe_name = _resolve_recipe_name(recipe_name, recipe_link_map)
119
160
  recipe_identifier: str | None = None
161
+ recipe_link = (recipe_link_map or {}).get(recipe_name)
162
+
163
+ handled_keys: set[str] = set()
120
164
 
121
165
  jpu = sr.get("jamfpackageuploader_summary_result")
122
166
  if isinstance(jpu, dict):
167
+ handled_keys.add("jamfpackageuploader_summary_result")
123
168
  rows = jpu.get("data_rows") or []
124
169
  for row in rows:
125
170
  name = (row.get("name") or row.get("pkg_display_name") or "-").strip()
@@ -139,12 +184,38 @@ def parse_plist_file(path: str) -> dict[str, list]:
139
184
  {
140
185
  "recipe_name": recipe_name,
141
186
  "recipe_identifier": recipe_identifier or "-",
187
+ "recipe_url": recipe_link,
142
188
  "package": pkg_name,
143
189
  "version": version or "-",
144
190
  }
145
191
  )
146
192
 
193
+ jpol = sr.get("jamfpolicyuploader_summary_result")
194
+ if isinstance(jpol, dict):
195
+ handled_keys.add("jamfpolicyuploader_summary_result")
196
+ rows = jpol.get("data_rows") or []
197
+ for row in rows:
198
+ name = (
199
+ row.get("policy")
200
+ or row.get("policy_name")
201
+ or row.get("name")
202
+ or row.get("title")
203
+ )
204
+ if not name:
205
+ continue
206
+ policies.append({"name": str(name).strip(), "action": "-"})
207
+ policy_rows.append(
208
+ {
209
+ "recipe_name": recipe_name,
210
+ "recipe_identifier": recipe_identifier or "-",
211
+ "recipe_url": recipe_link,
212
+ "policy": str(name).strip(),
213
+ }
214
+ )
215
+
147
216
  for key, block in sr.items():
217
+ if key in handled_keys:
218
+ continue
148
219
  if not isinstance(block, dict):
149
220
  continue
150
221
  hdr = [h.lower() for h in (block.get("header") or [])]
@@ -170,6 +241,7 @@ def parse_plist_file(path: str) -> dict[str, list]:
170
241
  {
171
242
  "recipe_name": recipe_name,
172
243
  "recipe_identifier": recipe_identifier or "-",
244
+ "recipe_url": recipe_link,
173
245
  "policy": str(name).strip(),
174
246
  }
175
247
  )
@@ -199,7 +271,11 @@ def parse_plist_file(path: str) -> dict[str, list]:
199
271
  }
200
272
 
201
273
 
202
- def aggregate_reports(base_path: str) -> dict:
274
+ def aggregate_reports(
275
+ base_path: str,
276
+ *,
277
+ recipe_link_map: dict[str, str] | None = None,
278
+ ) -> dict:
203
279
  summary = {
204
280
  "uploads": [],
205
281
  "policies": [],
@@ -218,7 +294,7 @@ def aggregate_reports(base_path: str) -> dict:
218
294
  ext = os.path.splitext(fn)[1].lower()
219
295
 
220
296
  if ext == ".plist":
221
- data = parse_plist_file(p)
297
+ data = parse_plist_file(p, recipe_link_map=recipe_link_map)
222
298
  summary["uploads"] += data.get("uploads", [])
223
299
  summary["policies"] += data.get("policies", [])
224
300
  summary["errors"] += data.get("errors", [])
@@ -391,8 +467,13 @@ def render_job_summary(summary: dict, environment: str, run_date: str) -> str:
391
467
  pkg = row.get("package", "-")
392
468
  pkg_url = row.get("package_url")
393
469
  pkg_cell = f"[{pkg}]({pkg_url})" if pkg_url else pkg
470
+ recipe_name = row.get("recipe_name", "-")
471
+ recipe_url = row.get("recipe_url")
472
+ recipe_cell = (
473
+ f"[{recipe_name}]({recipe_url})" if recipe_url else recipe_name
474
+ )
394
475
  lines.append(
395
- f"| {row.get('recipe_name', '-')} | {row.get('recipe_identifier', '-')} | {pkg_cell} | {row.get('version', '-')} |"
476
+ f"| {recipe_cell} | {row.get('recipe_identifier', '-')} | {pkg_cell} | {row.get('version', '-')} |"
396
477
  )
397
478
  lines.append("")
398
479
  else:
@@ -407,8 +488,16 @@ def render_job_summary(summary: dict, environment: str, run_date: str) -> str:
407
488
  for row in sorted(
408
489
  summary["policy_rows"], key=lambda r: str(r.get("recipe_name", "")).lower()
409
490
  ):
491
+ recipe_name = row.get("recipe_name", "-")
492
+ recipe_url = row.get("recipe_url")
493
+ recipe_cell = (
494
+ f"[{recipe_name}]({recipe_url})" if recipe_url else recipe_name
495
+ )
496
+ policy = row.get("policy", "-")
497
+ policy_url = row.get("policy_url")
498
+ policy_cell = f"[{policy}]({policy_url})" if policy_url else policy
410
499
  lines.append(
411
- f"| {row.get('recipe_name', '-')} | {row.get('recipe_identifier', '-')} | {row.get('policy', '-')} |"
500
+ f"| {recipe_cell} | {row.get('recipe_identifier', '-')} | {policy_cell} |"
412
501
  )
413
502
  lines.append("")
414
503
 
@@ -552,17 +641,63 @@ def build_pkg_map(jss_url: str, client_id: str, client_secret: str) -> dict[str,
552
641
  return pkg_map
553
642
 
554
643
 
644
+ def build_policy_map(
645
+ jss_url: str, client_id: str, client_secret: str
646
+ ) -> dict[str, str]:
647
+ host = _normalize_host(jss_url)
648
+ _ = host # silence linters about unused var; kept for readability
649
+ policy_map: dict[str, str] = {}
650
+ try:
651
+ from jamf_pro_sdk import ( # type: ignore
652
+ ApiClientCredentialsProvider,
653
+ JamfProClient,
654
+ )
655
+
656
+ client = JamfProClient(
657
+ _normalize_host(jss_url),
658
+ ApiClientCredentialsProvider(client_id, client_secret),
659
+ )
660
+ policies = client.pro_api.get_policies()
661
+ for p in policies:
662
+ try:
663
+ name = str(p.name).strip()
664
+ pid = str(p.id).strip()
665
+ except Exception:
666
+ continue
667
+ if not name or not pid:
668
+ continue
669
+ url = f"{jss_url}/policies.html?id={pid}"
670
+ if name not in policy_map:
671
+ policy_map[name] = url
672
+ except Exception:
673
+ return {}
674
+ return policy_map
675
+
676
+
555
677
  def enrich_upload_rows(upload_rows: list[dict], pkg_map: dict[str, str]) -> int:
556
678
  linked = 0
679
+ norm_map = {k.lower(): v for k, v in pkg_map.items()}
557
680
  for row in upload_rows:
558
681
  pkg_name = str(row.get("package") or "").strip()
559
- url = pkg_map.get(pkg_name)
682
+ url = pkg_map.get(pkg_name) or norm_map.get(pkg_name.lower())
560
683
  if url:
561
684
  row["package_url"] = url
562
685
  linked += 1
563
686
  return linked
564
687
 
565
688
 
689
+ def enrich_policy_rows(policy_rows: list[dict], policy_map: dict[str, str]) -> int:
690
+ linked = 0
691
+ norm_map = {k.lower(): v for k, v in policy_map.items()}
692
+ for row in policy_rows:
693
+ policy_name = str(row.get("policy") or "").strip()
694
+ url = policy_map.get(policy_name) or norm_map.get(policy_name.lower())
695
+ if url:
696
+ row["policy_url"] = url
697
+ linked += 1
698
+ return linked
699
+
700
+
566
701
  def enrich_upload_rows_with_jamf(
567
702
  summary: dict, jss_url: str, client_id: str, client_secret: str
568
703
  ) -> tuple[int, list[str]]:
@@ -571,6 +706,14 @@ def enrich_upload_rows_with_jamf(
571
706
  return linked, sorted(set(pkg_map.keys()))
572
707
 
573
708
 
709
+ def enrich_policy_rows_with_jamf(
710
+ summary: dict, jss_url: str, client_id: str, client_secret: str
711
+ ) -> tuple[int, list[str]]:
712
+ policy_map = build_policy_map(jss_url, client_id, client_secret)
713
+ linked = enrich_policy_rows(summary.get("policy_rows", []), policy_map)
714
+ return linked, sorted(set(policy_map.keys()))
715
+
716
+
574
717
  def process_reports(
575
718
  *,
576
719
  zip_file: str | None,
@@ -581,6 +724,9 @@ def process_reports(
581
724
  out_dir: str,
582
725
  debug: bool,
583
726
  strict: bool,
727
+ repo_url: str | None = None,
728
+ repo_branch: str | None = None,
729
+ repo_path: str | None = None,
584
730
  ) -> int:
585
731
  os.makedirs(out_dir, exist_ok=True)
586
732
 
@@ -595,7 +741,8 @@ def process_reports(
595
741
  else:
596
742
  process_dir = reports_dir or extract_dir
597
743
 
598
- summary = aggregate_reports(process_dir)
744
+ recipe_link_map = _build_recipe_link_map(repo_path, repo_url, repo_branch)
745
+ summary = aggregate_reports(process_dir, recipe_link_map=recipe_link_map)
599
746
 
600
747
  jss_url = os.environ.get("AUTOPKG_JSS_URL")
601
748
  jss_client_id = os.environ.get("AUTOPKG_CLIENT_ID")
@@ -603,15 +750,29 @@ def process_reports(
603
750
  jamf_attempted = False
604
751
  jamf_linked = 0
605
752
  jamf_keys: list[str] = []
753
+ jamf_policy_linked = 0
754
+ jamf_policy_keys: list[str] = []
606
755
  jamf_total = len(summary.get("upload_rows", []))
607
- if jss_url and jss_client_id and jss_client_secret and jamf_total:
756
+ jamf_policy_total = len(summary.get("policy_rows", []))
757
+ if (
758
+ jss_url
759
+ and jss_client_id
760
+ and jss_client_secret
761
+ and (jamf_total or jamf_policy_total)
762
+ ):
608
763
  jamf_attempted = True
609
764
  try:
610
- jamf_linked, jamf_keys = enrich_upload_rows_with_jamf(
611
- summary, jss_url, jss_client_id, jss_client_secret
612
- )
765
+ if jamf_total:
766
+ jamf_linked, jamf_keys = enrich_upload_rows_with_jamf(
767
+ summary, jss_url, jss_client_id, jss_client_secret
768
+ )
769
+ if jamf_policy_total:
770
+ jamf_policy_linked, jamf_policy_keys = enrich_policy_rows_with_jamf(
771
+ summary, jss_url, jss_client_id, jss_client_secret
772
+ )
613
773
  except Exception:
614
774
  jamf_linked = 0
775
+ jamf_policy_linked = 0
615
776
 
616
777
  job_md = render_job_summary(summary, environment, run_date)
617
778
  issue_md = None
@@ -633,20 +794,38 @@ def process_reports(
633
794
  str(r.get("package") or "").strip()
634
795
  for r in summary.get("upload_rows", [])
635
796
  ]
797
+ policy_names = [
798
+ str(r.get("policy") or "").strip()
799
+ for r in summary.get("policy_rows", [])
800
+ ]
636
801
  matched = [
637
802
  r for r in summary.get("upload_rows", []) if r.get("package_url")
638
803
  ]
639
804
  unmatched = [
640
805
  r for r in summary.get("upload_rows", []) if not r.get("package_url")
641
806
  ]
807
+ policy_matched = [
808
+ r for r in summary.get("policy_rows", []) if r.get("policy_url")
809
+ ]
810
+ policy_unmatched = [
811
+ r for r in summary.get("policy_rows", []) if not r.get("policy_url")
812
+ ]
642
813
  diag = {
643
814
  "jss_url": jss_url or "",
644
815
  "jamf_keys_count": len(jamf_keys),
645
816
  "jamf_keys_sample": jamf_keys[:20],
817
+ "jamf_policy_keys_count": len(jamf_policy_keys),
818
+ "jamf_policy_keys_sample": jamf_policy_keys[:20],
646
819
  "uploads_count": len(upload_pkg_names),
647
820
  "matched_count": len(matched),
648
821
  "unmatched_count": len(unmatched),
649
822
  "unmatched_names": [r.get("package") for r in unmatched][:20],
823
+ "policies_count": len(policy_names),
824
+ "policy_matched_count": len(policy_matched),
825
+ "policy_unmatched_count": len(policy_unmatched),
826
+ "policy_unmatched_names": [r.get("policy") for r in policy_unmatched][
827
+ :20
828
+ ],
650
829
  }
651
830
  with open(jamf_log_path, "w", encoding="utf-8") as jf:
652
831
  json.dump(diag, jf, indent=2)
@@ -659,11 +838,13 @@ def process_reports(
659
838
  f"Errors file: {'errors_issue.md' if issue_md else 'none'}",
660
839
  ]
661
840
  if jamf_attempted:
662
- status.append(f"Jamf links added: {jamf_linked}/{jamf_total}")
841
+ status.append(
842
+ f"Jamf links added: packages {jamf_linked}/{jamf_total}, policies {jamf_policy_linked}/{jamf_policy_total}"
843
+ )
663
844
  if jamf_log_path:
664
845
  status.append(f"Jamf lookup log: '{jamf_log_path}'")
665
846
  else:
666
- status.append("Jamf links: skipped (missing env or no uploads)")
847
+ status.append("Jamf links: skipped (missing env or no uploads/policies)")
667
848
  logging.info(". ".join(status))
668
849
 
669
850
  if strict and summary.get("errors"):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: autopkg-wrapper
3
- Version: 2026.2.8
3
+ Version: 2026.2.9
4
4
  Summary: A package used to execute some autopkg functions, primarily within the context of a GitHub Actions runner.
5
5
  Project-URL: Repository, https://github.com/smithjw/autopkg-wrapper
6
6
  Author-email: James Smith <james@smithjw.me>
@@ -45,7 +45,7 @@ mise run build
45
45
  usage: autopkg_wrapper [-h] [--recipe-file RECIPE_FILE |
46
46
  --recipes [RECIPES ...]]
47
47
  [--recipe-processing-order [RECIPE_PROCESSING_ORDER ...]]
48
- [--autopkg-bin AUTOPKG_BIN] [--debug]
48
+ [--autopkg-bin AUTOPKG_BIN] [--dry-run] [--debug]
49
49
  [--disable-recipe-trust-check] [--disable-git-commands]
50
50
  [--concurrency CONCURRENCY]
51
51
  [--github-token GITHUB_TOKEN]
@@ -111,6 +111,8 @@ options:
111
111
  Path to the autopkg binary (default:
112
112
  /usr/local/bin/autopkg). Can also be set via
113
113
  AW_AUTOPKG_BIN.
114
+ --dry-run Show planned actions without executing external
115
+ commands
114
116
  --debug Enable debug logging when running script
115
117
  --disable-recipe-trust-check
116
118
  If this option is used, recipe trust verification will
@@ -1,17 +1,17 @@
1
1
  autopkg_wrapper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- autopkg_wrapper/autopkg_wrapper.py,sha256=YBXttf3JDDP7tI3zoTAtBnXSGXssltLY7qk3Gy_cgNI,12728
3
- autopkg_wrapper/models/recipe.py,sha256=OQabJ-4ORbhueYZuZrq9bovXbcAC9flBoTc8A998mlE,4953
2
+ autopkg_wrapper/autopkg_wrapper.py,sha256=bb63BJMcWekfLiE-qeBqsa5C9_bSHo1QkxyOEAz_TD8,16186
3
+ autopkg_wrapper/models/recipe.py,sha256=yZOkYyTCd4D6fB7AGJrBETEK0-asf4YwJbhrs1gowlg,6471
4
4
  autopkg_wrapper/notifier/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  autopkg_wrapper/notifier/slack.py,sha256=O5Dc3ux7w258zTkfyDkjNbpfFLLCaCCoGUguOYfOETk,2056
6
6
  autopkg_wrapper/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- autopkg_wrapper/utils/args.py,sha256=1uEdjTIuqQQcealiEqihZNAamSDLbU9qBzJ6M-tpsS4,8423
7
+ autopkg_wrapper/utils/args.py,sha256=I3-Xgljv5gt-GHRUPbdlPGScgt1pcLgiMUXa8eIwjuU,8577
8
8
  autopkg_wrapper/utils/git_functions.py,sha256=5egBB4itrEusGrANoL2_8F13diRAZSfBcnZrUxn6Z5M,4976
9
9
  autopkg_wrapper/utils/logging.py,sha256=3knpMViO_zAU8WM5bSImQaz5M01vMFk_raB4lt1cbvo,324
10
10
  autopkg_wrapper/utils/recipe_batching.py,sha256=ohZUPyr6IFD8j4m9VSASsLOFh_9Fs0_UkIo6MIR4fIQ,1722
11
11
  autopkg_wrapper/utils/recipe_ordering.py,sha256=LWxbktRo_NlDNaW7NL63GJHSXGspYHDvu-2mP1JATFE,6190
12
- autopkg_wrapper/utils/report_processor.py,sha256=iB48pV_K2P11jkR9IZbeoA7dmyaZBiD0BHKwDOBwHgQ,22296
13
- autopkg_wrapper-2026.2.8.dist-info/METADATA,sha256=vWcsRVxM3ymlfXEEZmrfjEODzCkaNsPMvcfFdhFrSfc,10750
14
- autopkg_wrapper-2026.2.8.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
15
- autopkg_wrapper-2026.2.8.dist-info/entry_points.txt,sha256=TVIcOt7OozzX1c00pwMGbBysaHg_v_N3mO3juoFqPpo,73
16
- autopkg_wrapper-2026.2.8.dist-info/licenses/LICENSE,sha256=PpNOQjZGcsKFuA0wU16YU7PueVxqPX4OnyZ7TlLQlq4,1602
17
- autopkg_wrapper-2026.2.8.dist-info/RECORD,,
12
+ autopkg_wrapper/utils/report_processor.py,sha256=SfF5Ybtzo5u5O_PwnejDkhDR4-BB1SngJDnKL69VeFY,28864
13
+ autopkg_wrapper-2026.2.9.dist-info/METADATA,sha256=xYq-R6pJdaPYqUnXp8SBP9UIfdUOTyluHJLS5pN9GdY,10867
14
+ autopkg_wrapper-2026.2.9.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
15
+ autopkg_wrapper-2026.2.9.dist-info/entry_points.txt,sha256=TVIcOt7OozzX1c00pwMGbBysaHg_v_N3mO3juoFqPpo,73
16
+ autopkg_wrapper-2026.2.9.dist-info/licenses/LICENSE,sha256=PpNOQjZGcsKFuA0wU16YU7PueVxqPX4OnyZ7TlLQlq4,1602
17
+ autopkg_wrapper-2026.2.9.dist-info/RECORD,,