autopkg-wrapper 2025.8.1__py3-none-any.whl → 2026.2.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,7 @@ import logging
4
4
  import plistlib
5
5
  import subprocess
6
6
  import sys
7
+ from concurrent.futures import ThreadPoolExecutor, as_completed
7
8
  from datetime import datetime
8
9
  from itertools import chain
9
10
  from pathlib import Path
@@ -12,6 +13,7 @@ import autopkg_wrapper.utils.git_functions as git
12
13
  from autopkg_wrapper.notifier import slack
13
14
  from autopkg_wrapper.utils.args import setup_args
14
15
  from autopkg_wrapper.utils.logging import setup_logger
16
+ from autopkg_wrapper.utils.report_processor import process_reports
15
17
 
16
18
 
17
19
  class Recipe(object):
@@ -94,7 +96,7 @@ class Recipe(object):
94
96
  self.results["failed"] = True
95
97
  self.results["imported"] = ""
96
98
  else:
97
- report_dir = Path("/tmp/autopkg")
99
+ report_dir = Path("/private/tmp/autopkg")
98
100
  report_time = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
99
101
  report_name = Path(f"{self.name}-{report_time}.plist")
100
102
 
@@ -358,31 +360,50 @@ def main():
358
360
 
359
361
  failed_recipes = []
360
362
 
361
- for recipe in recipe_list:
362
- logging.info(f"Processing Recipe: {recipe.name}")
363
+ # Run recipes concurrently using a thread pool to parallelize subprocess calls
364
+ max_workers = max(1, int(getattr(args, "concurrency", 1)))
365
+ logging.info(f"Running recipes with concurrency={max_workers}")
366
+
367
+ def run_one(r: Recipe):
368
+ logging.info(f"Processing Recipe: {r.name}")
363
369
  process_recipe(
364
- recipe=recipe,
370
+ recipe=r,
365
371
  disable_recipe_trust_check=args.disable_recipe_trust_check,
366
372
  args=args,
367
373
  )
374
+ # Git updates and notifications are applied serially after all recipes finish
375
+ return r
376
+
377
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
378
+ futures = [executor.submit(run_one, r) for r in recipe_list]
379
+ for fut in as_completed(futures):
380
+ r = fut.result()
381
+ if r.error or r.results.get("failed"):
382
+ failed_recipes.append(r)
383
+
384
+ # Apply git updates serially to avoid branch/commit conflicts when concurrency > 1
385
+ for r in recipe_list:
368
386
  update_recipe_repo(
369
387
  git_info=override_repo_info,
370
- recipe=recipe,
388
+ recipe=r,
371
389
  disable_recipe_trust_check=args.disable_recipe_trust_check,
372
390
  args=args,
373
391
  )
374
- slack.send_notification(
375
- recipe=recipe, token=args.slack_token
376
- ) if args.slack_token else None
377
-
378
- if recipe.error or recipe.results.get("failed"):
379
- failed_recipes.append(recipe)
380
392
 
381
- recipe.pr_url = (
382
- git.create_pull_request(git_info=override_repo_info, recipe=recipe)
383
- if args.create_pr
384
- else None
385
- )
393
+ # Send notifications serially to simplify rate limiting and ordering
394
+ if args.slack_token:
395
+ for r in recipe_list:
396
+ slack.send_notification(recipe=r, token=args.slack_token)
397
+
398
+ # Optionally open a PR for updated trust information
399
+ if args.create_pr and recipe_list:
400
+ # Choose a representative recipe for the PR title/body
401
+ rep_recipe = next(
402
+ (r for r in recipe_list if r.updated is True or r.verified is False),
403
+ recipe_list[0],
404
+ )
405
+ pr_url = git.create_pull_request(git_info=override_repo_info, recipe=rep_recipe)
406
+ logging.info(f"Created Pull Request for trust info updates: {pr_url}")
386
407
 
387
408
  # Create GitHub issue for failed recipes
388
409
  if args.create_issues and failed_recipes and args.github_token:
@@ -390,3 +411,20 @@ def main():
390
411
  git_info=override_repo_info, failed_recipes=failed_recipes
391
412
  )
392
413
  logging.info(f"Created GitHub issue for failed recipes: {issue_url}")
414
+
415
+ # Optionally process reports after running recipes
416
+ if getattr(args, "process_reports", False):
417
+ rc = process_reports(
418
+ zip_file=getattr(args, "reports_zip", None),
419
+ extract_dir=getattr(
420
+ args, "reports_extract_dir", "autopkg_reports_summary/reports"
421
+ ),
422
+ reports_dir=(getattr(args, "reports_dir", None) or "/private/tmp/autopkg"),
423
+ environment="",
424
+ run_date=getattr(args, "reports_run_date", ""),
425
+ out_dir=getattr(args, "reports_out_dir", "autopkg_reports_summary/summary"),
426
+ debug=bool(getattr(args, "debug", False)),
427
+ strict=bool(getattr(args, "reports_strict", False)),
428
+ )
429
+ if rc:
430
+ sys.exit(rc)
@@ -90,6 +90,12 @@ def setup_args():
90
90
  If this option is used, git commands won't be run
91
91
  """,
92
92
  )
93
+ parser.add_argument(
94
+ "--concurrency",
95
+ type=int,
96
+ default=int(os.getenv("AW_CONCURRENCY", "1")),
97
+ help="Number of recipes to run in parallel (default: 1)",
98
+ )
93
99
  parser.add_argument(
94
100
  "--slack-token",
95
101
  default=os.getenv("SLACK_WEBHOOK_TOKEN", None),
@@ -144,4 +150,41 @@ def setup_args():
144
150
  """,
145
151
  )
146
152
 
153
+ # Report processing options
154
+ parser.add_argument(
155
+ "--process-reports",
156
+ action="store_true",
157
+ help="Process autopkg report directories or zip and emit markdown summaries",
158
+ )
159
+ parser.add_argument(
160
+ "--reports-zip",
161
+ default=os.getenv("AW_REPORTS_ZIP", None),
162
+ help="Path to an autopkg_report-*.zip to extract and process",
163
+ )
164
+ parser.add_argument(
165
+ "--reports-extract-dir",
166
+ default=os.getenv("AW_REPORTS_EXTRACT_DIR", "autopkg_reports_summary/reports"),
167
+ help="Directory to extract the zip into (default: autopkg_reports_summary/reports)",
168
+ )
169
+ parser.add_argument(
170
+ "--reports-dir",
171
+ default=os.getenv("AW_REPORTS_DIR", None),
172
+ help="Directory of reports to process (if no zip provided)",
173
+ )
174
+ parser.add_argument(
175
+ "--reports-out-dir",
176
+ default=os.getenv("AW_REPORTS_OUT_DIR", "autopkg_reports_summary/summary"),
177
+ help="Directory to write markdown outputs (default: autopkg_reports_summary/summary)",
178
+ )
179
+ parser.add_argument(
180
+ "--reports-run-date",
181
+ default=os.getenv("AW_REPORTS_RUN_DATE", ""),
182
+ help="Run date string to include in the summary",
183
+ )
184
+ parser.add_argument(
185
+ "--reports-strict",
186
+ action="store_true",
187
+ help="Exit non-zero if any errors are detected in processed reports",
188
+ )
189
+
147
190
  return parser.parse_args()
@@ -0,0 +1,674 @@
1
+ import json
2
+ import logging
3
+ import os
4
+ import plistlib
5
+ import re
6
+ import zipfile
7
+ from typing import Dict, List, Optional, Tuple
8
+
9
+
10
+ def find_report_dirs(base_path: str) -> List[str]:
11
+ dirs: List[str] = []
12
+ if not os.path.exists(base_path):
13
+ return dirs
14
+ for root, subdirs, _files in os.walk(base_path):
15
+ for d in subdirs:
16
+ if d.startswith("autopkg_report-"):
17
+ dirs.append(os.path.join(root, d))
18
+ if not dirs:
19
+ try:
20
+ has_files = any(
21
+ os.path.isfile(os.path.join(base_path, f))
22
+ for f in os.listdir(base_path)
23
+ )
24
+ except FileNotFoundError:
25
+ has_files = False
26
+ if has_files:
27
+ dirs.append(base_path)
28
+ return sorted(dirs)
29
+
30
+
31
+ def parse_json_file(path: str) -> Dict:
32
+ try:
33
+ with open(path, "r", encoding="utf-8") as f:
34
+ return json.load(f)
35
+ except Exception:
36
+ return {}
37
+
38
+
39
+ def _infer_recipe_name_from_filename(path: str) -> str:
40
+ base = os.path.basename(path)
41
+ if base.endswith(".plist"):
42
+ base = base[:-6]
43
+ m = re.search(r"-(\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2})$", base)
44
+ if m:
45
+ return base[: m.start()]
46
+ return base
47
+
48
+
49
+ def parse_text_file(path: str) -> Dict[str, List]:
50
+ uploads: List[Dict] = []
51
+ policies: List[Dict] = []
52
+ errors: List[str] = []
53
+
54
+ re_error = re.compile(r"ERROR[:\s-]+(.+)", re.IGNORECASE)
55
+ re_upload = re.compile(
56
+ r"(Uploaded|Upload|Uploading)[^\n]*?(?P<name>[A-Za-z0-9 ._+\-]+)(?:[^\n]*?version[^\d]*(?P<version>\d+(?:\.\d+)+))?",
57
+ re.IGNORECASE,
58
+ )
59
+ re_policy = re.compile(r"Policy (created|updated):\s*(?P<name>.+)", re.IGNORECASE)
60
+
61
+ try:
62
+ with open(path, "r", encoding="utf-8", errors="ignore") as f:
63
+ for line in f:
64
+ m_err = re_error.search(line)
65
+ if m_err:
66
+ errors.append(m_err.group(1).strip())
67
+ continue
68
+
69
+ m_up = re_upload.search(line)
70
+ if m_up:
71
+ uploads.append(
72
+ {
73
+ "name": (m_up.group("name") or "").strip(),
74
+ "version": (m_up.group("version") or "-") or "-",
75
+ }
76
+ )
77
+ continue
78
+
79
+ m_pol = re_policy.search(line)
80
+ if m_pol:
81
+ action = "updated" if "updated" in line.lower() else "created"
82
+ policies.append(
83
+ {
84
+ "name": m_pol.group("name").strip(),
85
+ "action": action,
86
+ }
87
+ )
88
+ except Exception:
89
+ pass
90
+
91
+ return {"uploads": uploads, "policies": policies, "errors": errors}
92
+
93
+
94
+ def parse_plist_file(path: str) -> Dict[str, List]:
95
+ uploads: List[Dict] = []
96
+ policies: List[Dict] = []
97
+ errors: List[str] = []
98
+ upload_rows: List[Dict] = []
99
+ policy_rows: List[Dict] = []
100
+ error_rows: List[Dict] = []
101
+
102
+ try:
103
+ with open(path, "rb") as f:
104
+ plist = plistlib.load(f)
105
+ except Exception:
106
+ return {
107
+ "uploads": uploads,
108
+ "policies": policies,
109
+ "errors": errors,
110
+ "upload_rows": upload_rows,
111
+ "policy_rows": policy_rows,
112
+ "error_rows": error_rows,
113
+ }
114
+
115
+ failures = plist.get("failures", []) or []
116
+
117
+ sr = plist.get("summary_results", {}) or {}
118
+
119
+ recipe_name = _infer_recipe_name_from_filename(path)
120
+ recipe_identifier: Optional[str] = None
121
+
122
+ jpu = sr.get("jamfpackageuploader_summary_result")
123
+ if isinstance(jpu, dict):
124
+ rows = jpu.get("data_rows") or []
125
+ for row in rows:
126
+ name = (row.get("name") or row.get("pkg_display_name") or "-").strip()
127
+ version = (row.get("version") or "-").strip()
128
+ uploads.append({"name": name, "version": version})
129
+ pkg_name = (
130
+ row.get("pkg_name") or row.get("pkg_display_name") or "-"
131
+ ).strip()
132
+ pkg_path = (row.get("pkg_path") or "").strip()
133
+ if pkg_path:
134
+ parts = pkg_path.split("/cache/")
135
+ if len(parts) > 1:
136
+ after = parts[1]
137
+ rid = after.split("/")[0]
138
+ recipe_identifier = rid or recipe_identifier
139
+ upload_rows.append(
140
+ {
141
+ "recipe_name": recipe_name,
142
+ "recipe_identifier": recipe_identifier or "-",
143
+ "package": pkg_name,
144
+ "version": version or "-",
145
+ }
146
+ )
147
+
148
+ for key, block in sr.items():
149
+ if not isinstance(block, dict):
150
+ continue
151
+ hdr = [h.lower() for h in (block.get("header") or [])]
152
+ rows = block.get("data_rows") or []
153
+ summary_text = (block.get("summary_text") or "").lower()
154
+ looks_like_policy = (
155
+ "policy" in key.lower()
156
+ or "policy" in summary_text
157
+ or any("policy" in h for h in hdr)
158
+ )
159
+ if looks_like_policy and rows:
160
+ for row in rows:
161
+ name = row.get("policy_name") or row.get("name") or row.get("title")
162
+ action = row.get("action") or row.get("status") or row.get("result")
163
+ if name:
164
+ policies.append(
165
+ {
166
+ "name": str(name).strip(),
167
+ "action": (str(action).strip() if action else "-"),
168
+ }
169
+ )
170
+ policy_rows.append(
171
+ {
172
+ "recipe_name": recipe_name,
173
+ "recipe_identifier": recipe_identifier or "-",
174
+ "policy": str(name).strip(),
175
+ }
176
+ )
177
+
178
+ for fail in failures:
179
+ if isinstance(fail, dict):
180
+ msg = fail.get("message") or json.dumps(fail)
181
+ rec = fail.get("recipe") or recipe_name
182
+ else:
183
+ msg = str(fail)
184
+ rec = recipe_name
185
+ errors.append(msg)
186
+ error_rows.append(
187
+ {
188
+ "recipe_name": rec,
189
+ "error_type": _classify_error_simple(msg),
190
+ }
191
+ )
192
+
193
+ return {
194
+ "uploads": uploads,
195
+ "policies": policies,
196
+ "errors": errors,
197
+ "upload_rows": upload_rows,
198
+ "policy_rows": policy_rows,
199
+ "error_rows": error_rows,
200
+ }
201
+
202
+
203
+ def aggregate_reports(base_path: str) -> Dict:
204
+ summary = {
205
+ "uploads": [],
206
+ "policies": [],
207
+ "errors": [],
208
+ "recipes": 0,
209
+ "upload_rows": [],
210
+ "policy_rows": [],
211
+ "error_rows": [],
212
+ }
213
+ report_dirs = find_report_dirs(base_path)
214
+
215
+ for repdir in report_dirs:
216
+ for root, _subdirs, files in os.walk(repdir):
217
+ for fn in files:
218
+ p = os.path.join(root, fn)
219
+ ext = os.path.splitext(fn)[1].lower()
220
+
221
+ if ext == ".plist":
222
+ data = parse_plist_file(p)
223
+ summary["uploads"] += data.get("uploads", [])
224
+ summary["policies"] += data.get("policies", [])
225
+ summary["errors"] += data.get("errors", [])
226
+ summary["upload_rows"] += data.get("upload_rows", [])
227
+ summary["policy_rows"] += data.get("policy_rows", [])
228
+ summary["error_rows"] += data.get("error_rows", [])
229
+ summary["recipes"] += 1
230
+ elif ext == ".json":
231
+ data = parse_json_file(p)
232
+ if not data:
233
+ continue
234
+ if isinstance(data, dict):
235
+ uploads = data.get("uploads")
236
+ policies = data.get("policies")
237
+ errors = data.get("errors")
238
+ recipes = data.get("recipes")
239
+ if isinstance(uploads, list):
240
+ summary["uploads"] += uploads
241
+ if isinstance(policies, list):
242
+ summary["policies"] += policies
243
+ if isinstance(errors, list):
244
+ summary["errors"] += errors
245
+ if isinstance(errors, list):
246
+ for e in errors:
247
+ if isinstance(e, dict):
248
+ rn = e.get("recipe") or "-"
249
+ msg = e.get("message") or json.dumps(e)
250
+ summary["error_rows"].append(
251
+ {
252
+ "recipe_name": rn,
253
+ "error_type": _classify_error_simple(
254
+ str(msg)
255
+ ),
256
+ }
257
+ )
258
+ if isinstance(recipes, int):
259
+ summary["recipes"] += recipes
260
+ else:
261
+ data = parse_text_file(p)
262
+ summary["uploads"] += data.get("uploads", [])
263
+ summary["policies"] += data.get("policies", [])
264
+ summary["errors"] += data.get("errors", [])
265
+
266
+ return summary
267
+
268
+
269
+ # ---------- Rendering ----------
270
+
271
+
272
+ def _aggregate_for_display(
273
+ summary: Dict,
274
+ ) -> Tuple[Dict[str, set], Dict[str, set], Dict[str, int]]:
275
+ uploads = summary.get("uploads", [])
276
+ policies = summary.get("policies", [])
277
+ errors = summary.get("errors", [])
278
+
279
+ def plausible_app_name(n: str) -> bool:
280
+ if not n or n == "-":
281
+ return False
282
+ if n.lower() in {"apps", "packages", "pkg", "file", "37"}:
283
+ return False
284
+ if not re.search(r"[A-Za-z]", n):
285
+ return False
286
+ return True
287
+
288
+ uploads_by_app: Dict[str, set] = {}
289
+ for u in uploads:
290
+ if isinstance(u, dict):
291
+ name = (u.get("name") or "-").strip()
292
+ ver = (u.get("version") or "-").strip()
293
+ else:
294
+ name = str(u).strip()
295
+ ver = "-"
296
+ if not plausible_app_name(name):
297
+ name = "-"
298
+ uploads_by_app.setdefault(name, set()).add(ver)
299
+
300
+ policies_by_name: Dict[str, set] = {}
301
+ for p in policies:
302
+ if isinstance(p, dict):
303
+ name = (p.get("name") or "-").strip()
304
+ action = (p.get("action") or "-").strip()
305
+ else:
306
+ name = str(p).strip()
307
+ action = "-"
308
+ policies_by_name.setdefault(name, set()).add(action)
309
+
310
+ error_categories: Dict[str, int] = {
311
+ "trust": 0,
312
+ "signature": 0,
313
+ "download": 0,
314
+ "network": 0,
315
+ "auth": 0,
316
+ "jamf": 0,
317
+ "other": 0,
318
+ }
319
+
320
+ def classify_error(msg: str) -> str:
321
+ lm = msg.lower()
322
+ if "trust" in lm:
323
+ return "trust"
324
+ if "signature" in lm or "codesign" in lm:
325
+ return "signature"
326
+ if "download" in lm or "fetch" in lm:
327
+ return "download"
328
+ if (
329
+ "proxy" in lm
330
+ or "timeout" in lm
331
+ or "network" in lm
332
+ or "url" in lm
333
+ or "dns" in lm
334
+ ):
335
+ return "network"
336
+ if (
337
+ "auth" in lm
338
+ or "token" in lm
339
+ or "permission" in lm
340
+ or "401" in lm
341
+ or "403" in lm
342
+ ):
343
+ return "auth"
344
+ if "jamf" in lm or "policy" in lm:
345
+ return "jamf"
346
+ return "other"
347
+
348
+ for e in errors:
349
+ emsg = e if isinstance(e, str) else json.dumps(e)
350
+ cat = classify_error(emsg)
351
+ error_categories[cat] = error_categories.get(cat, 0) + 1
352
+
353
+ return uploads_by_app, policies_by_name, error_categories
354
+
355
+
356
+ def render_job_summary(summary: Dict, environment: str, run_date: str) -> str:
357
+ lines: List[str] = []
358
+ title_bits: List[str] = []
359
+ if environment:
360
+ title_bits.append(environment)
361
+ if run_date:
362
+ title_bits.append(run_date)
363
+ if title_bits:
364
+ lines.append(f"# Autopkg Report Summary ({' '.join(title_bits)})")
365
+ else:
366
+ lines.append("# Autopkg Report Summary")
367
+ lines.append("")
368
+
369
+ total_uploads_raw = len(summary.get("uploads", []))
370
+ uploads_by_app, policies_by_name, error_categories = _aggregate_for_display(summary)
371
+ total_uploads_apps = len(uploads_by_app)
372
+ total_policies = len(policies_by_name)
373
+ total_errors = len(summary.get("errors", []))
374
+ recipes = summary.get("recipes") or "N/A"
375
+
376
+ lines.append("| Metric | Value |")
377
+ lines.append("| --- | --- |")
378
+ lines.append(f"| Recipes processed | {recipes} |")
379
+ lines.append(
380
+ f"| Apps uploaded | {total_uploads_apps} (items: {total_uploads_raw}) |"
381
+ )
382
+ lines.append(f"| Policies changed | {total_policies} |")
383
+ lines.append(f"| Errors | {total_errors} |")
384
+ lines.append("")
385
+
386
+ if summary.get("upload_rows"):
387
+ lines.append("## Uploaded Recipes")
388
+ lines.append("")
389
+ lines.append("| Recipe Name | Identifier | Package | Version |")
390
+ lines.append("| --- | --- | --- | --- |")
391
+ for row in sorted(
392
+ summary["upload_rows"], key=lambda r: str(r.get("recipe_name", "")).lower()
393
+ ):
394
+ pkg = row.get("package", "-")
395
+ pkg_url = row.get("package_url")
396
+ pkg_cell = f"[{pkg}]({pkg_url})" if pkg_url else pkg
397
+ lines.append(
398
+ f"| {row.get('recipe_name', '-')} | {row.get('recipe_identifier', '-')} | {pkg_cell} | {row.get('version', '-')} |"
399
+ )
400
+ lines.append("")
401
+ else:
402
+ lines.append("No uploads in this run.")
403
+ lines.append("")
404
+
405
+ if summary.get("policy_rows"):
406
+ lines.append("## Policy Recipes")
407
+ lines.append("")
408
+ lines.append("| Recipe Name | Identifier | Policy |")
409
+ lines.append("| --- | --- | --- |")
410
+ for row in sorted(
411
+ summary["policy_rows"], key=lambda r: str(r.get("recipe_name", "")).lower()
412
+ ):
413
+ lines.append(
414
+ f"| {row.get('recipe_name', '-')} | {row.get('recipe_identifier', '-')} | {row.get('policy', '-')} |"
415
+ )
416
+ lines.append("")
417
+
418
+ if total_errors:
419
+ lines.append("## Errors Summary")
420
+ lines.append("")
421
+ lines.append("| Category | Count |")
422
+ lines.append("| --- | --- |")
423
+ for cat in [
424
+ "trust",
425
+ "signature",
426
+ "download",
427
+ "network",
428
+ "auth",
429
+ "jamf",
430
+ "other",
431
+ ]:
432
+ lines.append(f"| {cat} | {error_categories.get(cat, 0)} |")
433
+ lines.append("")
434
+
435
+ return "\n".join(lines)
436
+
437
+
438
+ def render_issue_body(summary: Dict, environment: str, run_date: str) -> str:
439
+ lines: List[str] = []
440
+ total_errors = len(summary.get("errors", []))
441
+ _uploads_by_app, _policies_by_name, _error_categories = _aggregate_for_display(
442
+ summary
443
+ )
444
+
445
+ prefix = "Autopkg run"
446
+ suffix_bits: List[str] = []
447
+ if run_date:
448
+ suffix_bits.append(f"on {run_date}")
449
+ if environment:
450
+ suffix_bits.append(f"({environment})")
451
+ suffix = (" ".join(suffix_bits)).strip()
452
+ if suffix:
453
+ lines.append(f"{prefix} {suffix} reported {total_errors} error(s).")
454
+ else:
455
+ lines.append(f"{prefix} reported {total_errors} error(s).")
456
+ lines.append("")
457
+ lines.append("### Errors")
458
+ lines.append("| Recipe | Error Type |")
459
+ lines.append("| --- | --- |")
460
+ for row in summary.get("error_rows", []):
461
+ lines.append(
462
+ f"| {row.get('recipe_name', '-')} | {row.get('error_type', 'other')} |"
463
+ )
464
+
465
+ lines.append("")
466
+
467
+ return "\n".join(lines)
468
+
469
+
470
+ # ---------- Utility ----------
471
+
472
+
473
+ def _redact_sensitive(s: str) -> str:
474
+ s = re.sub(r"ghs_[A-Za-z0-9]+", "ghs_***", s)
475
+ s = re.sub(
476
+ r"(Authorization:\s*token)\s+[A-Za-z0-9_\-]+",
477
+ r"\1 ***",
478
+ s,
479
+ flags=re.IGNORECASE,
480
+ )
481
+ s = re.sub(r"(Bearer)\s+[A-Za-z0-9._\-]+", r"\1 ***", s, flags=re.IGNORECASE)
482
+ return s
483
+
484
+
485
+ def _classify_error_simple(msg: str) -> str:
486
+ lm = msg.lower()
487
+ if "trust" in lm:
488
+ return "trust"
489
+ if "signature" in lm or "codesign" in lm:
490
+ return "signature"
491
+ if (
492
+ "401" in lm
493
+ or "403" in lm
494
+ or "auth" in lm
495
+ or "token" in lm
496
+ or "permission" in lm
497
+ ):
498
+ return "auth"
499
+ if "download" in lm or "fetch" in lm or "curl" in lm:
500
+ return "download"
501
+ if (
502
+ "proxy" in lm
503
+ or "timeout" in lm
504
+ or "network" in lm
505
+ or "url" in lm
506
+ or "dns" in lm
507
+ ):
508
+ return "network"
509
+ if "jamf" in lm or "policy" in lm:
510
+ return "jamf"
511
+ return "other"
512
+
513
+
514
+ # ---------- Jamf Helpers ----------
515
+
516
+
517
+ def _normalize_host(url: str) -> str:
518
+ h = (url or "").strip()
519
+ if h.startswith("https://"):
520
+ h = h[len("https://") :]
521
+ if h.startswith("http://"):
522
+ h = h[len("http://") :]
523
+ return h.rstrip("/")
524
+
525
+
526
+ def build_pkg_map(jss_url: str, client_id: str, client_secret: str) -> Dict[str, str]:
527
+ host = _normalize_host(jss_url)
528
+ _ = host # silence linters about unused var; kept for readability
529
+ pkg_map: Dict[str, str] = {}
530
+ try:
531
+ from jamf_pro_sdk import ( # type: ignore
532
+ ApiClientCredentialsProvider,
533
+ JamfProClient,
534
+ )
535
+
536
+ client = JamfProClient(
537
+ _normalize_host(jss_url),
538
+ ApiClientCredentialsProvider(client_id, client_secret),
539
+ )
540
+ packages = client.pro_api.get_packages_v1()
541
+ for p in packages:
542
+ try:
543
+ name = str(getattr(p, "packageName")).strip()
544
+ pid = str(getattr(p, "id")).strip()
545
+ except Exception as e: # noqa: F841
546
+ # ignore objects that do not match expected shape
547
+ continue
548
+ if not name or not pid:
549
+ continue
550
+ url = f"{jss_url}/view/settings/computer-management/packages/{pid}"
551
+ if name not in pkg_map:
552
+ pkg_map[name] = url
553
+ except Exception as e: # noqa: F841
554
+ return {}
555
+ return pkg_map
556
+
557
+
558
+ def enrich_upload_rows(upload_rows: List[Dict], pkg_map: Dict[str, str]) -> int:
559
+ linked = 0
560
+ for row in upload_rows:
561
+ pkg_name = str(row.get("package") or "").strip()
562
+ url = pkg_map.get(pkg_name)
563
+ if url:
564
+ row["package_url"] = url
565
+ linked += 1
566
+ return linked
567
+
568
+
569
+ def enrich_upload_rows_with_jamf(
570
+ summary: Dict, jss_url: str, client_id: str, client_secret: str
571
+ ) -> Tuple[int, List[str]]:
572
+ pkg_map = build_pkg_map(jss_url, client_id, client_secret)
573
+ linked = enrich_upload_rows(summary.get("upload_rows", []), pkg_map)
574
+ return linked, sorted(set(pkg_map.keys()))
575
+
576
+
577
+ def process_reports(
578
+ *,
579
+ zip_file: Optional[str],
580
+ extract_dir: str,
581
+ reports_dir: Optional[str],
582
+ environment: str = "",
583
+ run_date: str = "",
584
+ out_dir: str,
585
+ debug: bool,
586
+ strict: bool,
587
+ ) -> int:
588
+ os.makedirs(out_dir, exist_ok=True)
589
+
590
+ if zip_file:
591
+ zpath = zip_file
592
+ if not os.path.exists(zpath):
593
+ raise FileNotFoundError(f"zip file not found: {zpath}")
594
+ os.makedirs(extract_dir, exist_ok=True)
595
+ with zipfile.ZipFile(zpath, "r") as zf:
596
+ zf.extractall(extract_dir)
597
+ process_dir = extract_dir
598
+ else:
599
+ process_dir = reports_dir or extract_dir
600
+
601
+ summary = aggregate_reports(process_dir)
602
+
603
+ jss_url = os.environ.get("AUTOPKG_JSS_URL")
604
+ jss_client_id = os.environ.get("AUTOPKG_CLIENT_ID")
605
+ jss_client_secret = os.environ.get("AUTOPKG_CLIENT_SECRET")
606
+ jamf_attempted = False
607
+ jamf_linked = 0
608
+ jamf_keys: List[str] = []
609
+ jamf_total = len(summary.get("upload_rows", []))
610
+ if jss_url and jss_client_id and jss_client_secret and jamf_total:
611
+ jamf_attempted = True
612
+ try:
613
+ jamf_linked, jamf_keys = enrich_upload_rows_with_jamf(
614
+ summary, jss_url, jss_client_id, jss_client_secret
615
+ )
616
+ except Exception:
617
+ jamf_linked = 0
618
+
619
+ job_md = render_job_summary(summary, environment, run_date)
620
+ issue_md = None
621
+ if summary.get("errors"):
622
+ issue_md = render_issue_body(summary, environment, run_date)
623
+
624
+ with open(os.path.join(out_dir, "job_summary.md"), "w", encoding="utf-8") as f:
625
+ f.write(job_md)
626
+
627
+ if issue_md:
628
+ with open(os.path.join(out_dir, "errors_issue.md"), "w", encoding="utf-8") as f:
629
+ f.write(issue_md)
630
+
631
+ jamf_log_path = ""
632
+ if debug:
633
+ jamf_log_path = os.path.join(out_dir, "jamf_lookup_debug.json")
634
+ try:
635
+ upload_pkg_names = [
636
+ str(r.get("package") or "").strip()
637
+ for r in summary.get("upload_rows", [])
638
+ ]
639
+ matched = [
640
+ r for r in summary.get("upload_rows", []) if r.get("package_url")
641
+ ]
642
+ unmatched = [
643
+ r for r in summary.get("upload_rows", []) if not r.get("package_url")
644
+ ]
645
+ diag = {
646
+ "jss_url": jss_url or "",
647
+ "jamf_keys_count": len(jamf_keys),
648
+ "jamf_keys_sample": jamf_keys[:20],
649
+ "uploads_count": len(upload_pkg_names),
650
+ "matched_count": len(matched),
651
+ "unmatched_count": len(unmatched),
652
+ "unmatched_names": [r.get("package") for r in unmatched][:20],
653
+ }
654
+ with open(jamf_log_path, "w", encoding="utf-8") as jf:
655
+ json.dump(diag, jf, indent=2)
656
+ except Exception:
657
+ jamf_log_path = ""
658
+
659
+ status = [
660
+ f"Processed reports in '{process_dir}'. Recipes: {summary.get('recipes', 'N/A')}",
661
+ f"Summary: '{os.path.join(out_dir, 'job_summary.md')}'",
662
+ f"Errors file: {'errors_issue.md' if issue_md else 'none'}",
663
+ ]
664
+ if jamf_attempted:
665
+ status.append(f"Jamf links added: {jamf_linked}/{jamf_total}")
666
+ if jamf_log_path:
667
+ status.append(f"Jamf lookup log: '{jamf_log_path}'")
668
+ else:
669
+ status.append("Jamf links: skipped (missing env or no uploads)")
670
+ logging.info(". ".join(status))
671
+
672
+ if strict and summary.get("errors"):
673
+ return 1
674
+ return 0
@@ -0,0 +1,105 @@
1
+ Metadata-Version: 2.4
2
+ Name: autopkg-wrapper
3
+ Version: 2026.2.2
4
+ Summary: A package used to execute some autopkg functions, primarily within the context of a GitHub Actions runner.
5
+ Project-URL: Repository, https://github.com/smithjw/autopkg-wrapper
6
+ Author-email: James Smith <james@smithjw.me>
7
+ License-Expression: BSD-3-Clause
8
+ License-File: LICENSE
9
+ Requires-Python: ~=3.14.0
10
+ Requires-Dist: chardet
11
+ Requires-Dist: idna
12
+ Requires-Dist: jamf-pro-sdk
13
+ Requires-Dist: pygithub
14
+ Requires-Dist: requests
15
+ Requires-Dist: ruamel-yaml
16
+ Requires-Dist: toml
17
+ Requires-Dist: urllib3
18
+ Description-Content-Type: text/markdown
19
+
20
+ # autopkg-wrapper
21
+
22
+ `autopkg_wrapper` is a small package that can be used to run [`autopkg`](https://github.com/autopkg/autopkg) within CI/CD environments such as GitHub Actions.
23
+
24
+ The easiest way to run it is by installing with pip.
25
+
26
+ ```shell
27
+ pip install autopkg-wrapper
28
+ ```
29
+
30
+ ## Command Line Parameters
31
+
32
+ ```shell
33
+ -h, --help Show this help message and exit
34
+ --recipe-file RECIPE_FILE Path to a list of recipes to run (cannot be run with --recipes)
35
+ --recipes [RECIPES ...] Recipes to run with autopkg (cannot be run with --recipe-file)
36
+ --debug Enable debug logging when running script
37
+ --disable-recipe-trust-check If this option is used, recipe trust verification will not be run prior to a recipe run.
38
+ --github-token GITHUB_TOKEN A token used to publish a PR to your GitHub repo if overrides require their trust to be updated
39
+ --branch-name BRANCH_NAME Branch name to be used where recipe overrides have failed their trust verification and need to be updated.
40
+ By default, this will be in the format of "fix/update_trust_information/YYYY-MM-DDTHH-MM-SS"
41
+ --create-pr If enabled, autopkg_wrapper will open a PR for updated trust information
42
+ --create-issues Create a GitHub issue for recipes that fail during processing
43
+ --disable-git-commands If this option is used, git commands won't be run
44
+ --post-processors [POST_PROCESSORS ...]
45
+ One or more autopkg post processors to run after each recipe execution
46
+ --autopkg-prefs AW_AUTOPKG_PREFS_FILE
47
+ Path to the autopkg preferences you'd like to use
48
+ --overrides-repo-path AUTOPKG_OVERRIDES_REPO_PATH
49
+ The path on disk to the git repository containing the autopkg overrides directory. If none is provided, we will try to determine it for you.
50
+ --concurrency CONCURRENCY Number of recipes to run in parallel (default: 1)
51
+ --process-reports Process autopkg report directories or zip and emit markdown summaries (runs after recipes complete)
52
+ --reports-zip REPORTS_ZIP Path to an autopkg_report-*.zip to extract and process
53
+ --reports-extract-dir REPORTS_EXTRACT_DIR
54
+ Directory to extract the zip into (default: autopkg_reports_summary/reports)
55
+ --reports-dir REPORTS_DIR Directory of reports to process (if no zip provided). Defaults to /private/tmp/autopkg when processing after a run
56
+ --reports-out-dir REPORTS_OUT_DIR
57
+ Directory to write markdown outputs (default: autopkg_reports_summary/summary)
58
+ --reports-run-date REPORTS_RUN_DATE
59
+ Run date string to include in the summary
60
+ --reports-strict Exit non-zero if any errors are detected in processed reports
61
+ ```
62
+
63
+ ## Examples
64
+
65
+ Run recipes (serial):
66
+
67
+ ```bash
68
+ autopkg_wrapper --recipes Foo.download Bar.download
69
+ ```
70
+
71
+ Run 3 recipes concurrently and process reports afterward:
72
+
73
+ ```bash
74
+ autopkg_wrapper \
75
+ --recipe-file /path/to/recipe_list.txt \
76
+ --concurrency 3 \
77
+ --disable-git-commands \
78
+ --process-reports \
79
+ --reports-out-dir /tmp/autopkg_reports_summary \
80
+ --reports-strict
81
+ ```
82
+
83
+ Process a reports zip explicitly (no recipe run):
84
+
85
+ ```bash
86
+ autopkg_wrapper \
87
+ --process-reports \
88
+ --reports-zip /path/to/autopkg_report-2026-02-02.zip \
89
+ --reports-extract-dir /tmp/autopkg_reports \
90
+ --reports-out-dir /tmp/autopkg_reports_summary
91
+ ```
92
+
93
+ Notes:
94
+
95
+ - During recipe runs, per‑recipe plist reports are written to `/private/tmp/autopkg`.
96
+ - When `--process-reports` is supplied without `--reports-zip` or `--reports-dir`, the tool processes `/private/tmp/autopkg`.
97
+ - If `AUTOPKG_JSS_URL`, `AUTOPKG_CLIENT_ID`, and `AUTOPKG_CLIENT_SECRET` are set, uploaded package rows are enriched with Jamf package links.
98
+ - No extra CLI flag is required; enrichment runs automatically when all three env vars are present.
99
+
100
+ An example folder structure and GitHub Actions Workflow is available within the [`actions-demo`](actions-demo)
101
+
102
+ ## Credits
103
+
104
+ - [`autopkg_tools` from Facebook](https://github.com/facebook/IT-CPE/tree/main/legacy/autopkg_tools)
105
+ - [`autopkg_tools` from Facebook, modified by Gusto](https://github.com/Gusto/it-cpe-opensource/tree/main/autopkg)
@@ -0,0 +1,14 @@
1
+ autopkg_wrapper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ autopkg_wrapper/autopkg_wrapper.py,sha256=lIcsLJoaHqhQx8yWvew3_GVm3Vn3DgAp9nBAvfz2JnY,15364
3
+ autopkg_wrapper/notifier/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ autopkg_wrapper/notifier/slack.py,sha256=aPxQDGd5zPxSsu3mEqalNOF0ly0QnYog0ieHokd5-OY,1979
5
+ autopkg_wrapper/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ autopkg_wrapper/utils/args.py,sha256=s7QawLF_WV8nReXTLlyXzT1yGL3M03FXegeTUJ3mzNw,6463
7
+ autopkg_wrapper/utils/git_functions.py,sha256=Ojsq-wQsw7Gezq9pYDTtXF9SxrK9b9Cfap3mbJyVgdw,4456
8
+ autopkg_wrapper/utils/logging.py,sha256=3knpMViO_zAU8WM5bSImQaz5M01vMFk_raB4lt1cbvo,324
9
+ autopkg_wrapper/utils/report_processor.py,sha256=kjKgumD2ERYOrPqvg6ozmIsOxDZLSabs1UTjm4bMl6o,22391
10
+ autopkg_wrapper-2026.2.2.dist-info/METADATA,sha256=j07Aix7InD5wasrgREyTNqIEVU3kiMqqCPc3ayDOOVU,4936
11
+ autopkg_wrapper-2026.2.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
12
+ autopkg_wrapper-2026.2.2.dist-info/entry_points.txt,sha256=TVIcOt7OozzX1c00pwMGbBysaHg_v_N3mO3juoFqPpo,73
13
+ autopkg_wrapper-2026.2.2.dist-info/licenses/LICENSE,sha256=PpNOQjZGcsKFuA0wU16YU7PueVxqPX4OnyZ7TlLQlq4,1602
14
+ autopkg_wrapper-2026.2.2.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.27.0
2
+ Generator: hatchling 1.28.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,54 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: autopkg-wrapper
3
- Version: 2025.8.1
4
- Summary: A package used to execute some autopkg functions, primarily within the context of a GitHub Actions runner.
5
- Project-URL: Repository, https://github.com/smithjw/autopkg-wrapper
6
- Author-email: James Smith <james@smithjw.me>
7
- License-Expression: BSD-3-Clause
8
- License-File: LICENSE
9
- Requires-Python: ~=3.12.0
10
- Requires-Dist: chardet
11
- Requires-Dist: idna
12
- Requires-Dist: pygithub
13
- Requires-Dist: requests
14
- Requires-Dist: ruamel-yaml
15
- Requires-Dist: toml
16
- Requires-Dist: urllib3
17
- Description-Content-Type: text/markdown
18
-
19
- # autopkg-wrapper
20
-
21
- `autopkg_wrapper` is a small package that can be used to run [`autopkg`](https://github.com/autopkg/autopkg) within CI/CD environments such as GitHub Actions.
22
-
23
- The easiest way to run it is by installing with pip.
24
-
25
- ```shell
26
- pip install autopkg-wrapper
27
- ```
28
-
29
- ## Command Line Parameters
30
-
31
- ```shell
32
- -h, --help Show this help message and exit
33
- --recipe-file RECIPE_FILE Path to a list of recipes to run (cannot be run with --recipes)
34
- --recipes [RECIPES ...] Recipes to run with autopkg (cannot be run with --recipe-file)
35
- --debug Enable debug logging when running script
36
- --override-trust If set recipe override trust verification will be disabled. (Default: True)
37
- --github-token GITHUB_TOKEN A token used to publish a PR to your GitHub repo if overrides require their trust to be updated
38
- --branch-name BRANCH_NAME Branch name to be used where recipe overrides have failed their trust verification and need to be updated.
39
- By default, this will be in the format of "fix/update_trust_information/YYYY-MM-DDTHH-MM-SS"
40
- --create-pr If enabled, autopkg_wrapper will open a PR for updated trust information
41
- --autopkg-prefs AW_AUTOPKG_PREFS_FILE
42
- Path to the autopkg preferences you'd like to use
43
- --autopkg-overrides-repo-path AUTOPKG_OVERRIDES_REPO_PATH
44
- The path on disk to the git repository containing the autopkg overrides directory. If none is provided, we will try to determine it for you.
45
- ```
46
-
47
- ## Example
48
-
49
- An example folder structure and GitHub Actions Workflow is available within the [`actions-demo`](actions-demo)
50
-
51
- ## Credits
52
-
53
- - [`autopkg_tools` from Facebook](https://github.com/facebook/IT-CPE/tree/main/legacy/autopkg_tools)
54
- - [`autopkg_tools` from Facebook, modified by Gusto](https://github.com/Gusto/it-cpe-opensource/tree/main/autopkg)
@@ -1,13 +0,0 @@
1
- autopkg_wrapper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- autopkg_wrapper/autopkg_wrapper.py,sha256=dF8BGhk1IpP4w6lRtJqgpY-VK9vkoOiD0jidIUaSn9M,13457
3
- autopkg_wrapper/notifier/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- autopkg_wrapper/notifier/slack.py,sha256=aPxQDGd5zPxSsu3mEqalNOF0ly0QnYog0ieHokd5-OY,1979
5
- autopkg_wrapper/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- autopkg_wrapper/utils/args.py,sha256=6sghB9DWZmv7VLUR9uJA5WkhxsZ08Ri1qoUY5rxydjY,4883
7
- autopkg_wrapper/utils/git_functions.py,sha256=Ojsq-wQsw7Gezq9pYDTtXF9SxrK9b9Cfap3mbJyVgdw,4456
8
- autopkg_wrapper/utils/logging.py,sha256=3knpMViO_zAU8WM5bSImQaz5M01vMFk_raB4lt1cbvo,324
9
- autopkg_wrapper-2025.8.1.dist-info/METADATA,sha256=v97bxd_6_vj97C7Av4hl7vCXgUBWu-SHIKmn-QGDLMc,2527
10
- autopkg_wrapper-2025.8.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
11
- autopkg_wrapper-2025.8.1.dist-info/entry_points.txt,sha256=TVIcOt7OozzX1c00pwMGbBysaHg_v_N3mO3juoFqPpo,73
12
- autopkg_wrapper-2025.8.1.dist-info/licenses/LICENSE,sha256=PpNOQjZGcsKFuA0wU16YU7PueVxqPX4OnyZ7TlLQlq4,1602
13
- autopkg_wrapper-2025.8.1.dist-info/RECORD,,