autopkg-wrapper 2026.2.6__py3-none-any.whl → 2026.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,10 @@ from autopkg_wrapper.models.recipe import Recipe
11
11
  from autopkg_wrapper.notifier import slack
12
12
  from autopkg_wrapper.utils.args import setup_args
13
13
  from autopkg_wrapper.utils.logging import setup_logger
14
- from autopkg_wrapper.utils.recipe_batching import build_recipe_batches, recipe_type_for
14
+ from autopkg_wrapper.utils.recipe_batching import (
15
+ build_recipe_batches,
16
+ describe_recipe_batches,
17
+ )
15
18
  from autopkg_wrapper.utils.recipe_ordering import order_recipe_list
16
19
  from autopkg_wrapper.utils.report_processor import process_reports
17
20
 
@@ -27,7 +30,7 @@ def get_override_repo_info(args):
27
30
  autopkg_prefs_path = Path(args.autopkg_prefs).resolve()
28
31
 
29
32
  if autopkg_prefs_path.suffix == ".json":
30
- with open(autopkg_prefs_path, "r") as f:
33
+ with open(autopkg_prefs_path) as f:
31
34
  autopkg_prefs = json.load(f)
32
35
  elif autopkg_prefs_path.suffix == ".plist":
33
36
  autopkg_prefs = plistlib.loads(autopkg_prefs_path.read_bytes())
@@ -103,7 +106,7 @@ def update_recipe_repo(recipe, git_info, disable_recipe_trust_check, args):
103
106
 
104
107
  git.stage_recipe(git_info)
105
108
  git.commit_recipe(
106
- git_info, message=f"Updating Trust Info for {recipe.name}"
109
+ git_info, message=f"Updating Trust Info for {recipe.identifier}"
107
110
  )
108
111
  git.pull_branch(git_info)
109
112
  git.push_branch(git_info)
@@ -120,21 +123,21 @@ def parse_recipe_list(recipes, recipe_file, post_processors, args):
120
123
  """
121
124
  recipe_list = None
122
125
 
123
- logging.info(f"Recipes: {recipes}") if recipes else None
124
- logging.info(f"Recipe List: {recipe_file}") if recipe_file else None
126
+ logging.debug(f"Recipes: {recipes}") if recipes else None
127
+ logging.debug(f"Recipe List: {recipe_file}") if recipe_file else None
125
128
 
126
129
  if recipe_file:
127
130
  if recipe_file.suffix == ".json":
128
- with open(recipe_file, "r") as f:
131
+ with open(recipe_file) as f:
129
132
  recipe_list = json.load(f)
130
133
  elif recipe_file.suffix in {".yaml", ".yml"}:
131
134
  from ruamel.yaml import YAML
132
135
 
133
136
  yaml = YAML(typ="safe")
134
- with open(recipe_file, "r", encoding="utf-8") as f:
137
+ with open(recipe_file, encoding="utf-8") as f:
135
138
  recipe_list = yaml.load(f)
136
139
  elif recipe_file.suffix == ".txt":
137
- with open(recipe_file, "r") as f:
140
+ with open(recipe_file) as f:
138
141
  recipe_list = f.read().splitlines()
139
142
  if recipes:
140
143
  if isinstance(recipes, list):
@@ -247,7 +250,7 @@ def main():
247
250
  logging.info(f"Running recipes with concurrency={max_workers}")
248
251
 
249
252
  def run_one(r: Recipe):
250
- logging.info(f"Processing Recipe: {r.name}")
253
+ logging.info(f"Processing Recipe: {r.identifier}")
251
254
  process_recipe(
252
255
  recipe=r,
253
256
  disable_recipe_trust_check=args.disable_recipe_trust_check,
@@ -261,18 +264,22 @@ def main():
261
264
  recipe_list=recipe_list,
262
265
  recipe_processing_order=args.recipe_processing_order,
263
266
  )
264
- for batch in batches:
265
- batch_type = recipe_type_for(batch[0]) if batch else ""
266
- logging.info(
267
- f"Running {len(batch)} recipes for type={batch_type or 'unknown'}"
268
- )
267
+ logging.info("Recipe processing batches:")
268
+ batch_descriptions = describe_recipe_batches(batches)
269
+ for batch, batch_desc in zip(batches, batch_descriptions, strict=False):
270
+ batch_type = batch_desc.get("type") or "unknown"
271
+ logging.info(f"Batch type={batch_type} count={batch_desc.get('count', 0)}")
272
+ logging.info(f"Batch recipes: {batch_desc.get('recipes', [])}")
269
273
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
270
274
  futures = [executor.submit(run_one, r) for r in batch]
271
275
  for fut in as_completed(futures):
272
276
  r = fut.result()
273
277
  if r.error or r.results.get("failed"):
274
278
  failed_recipes.append(r)
275
- else:
279
+ elif recipe_list:
280
+ logging.info("Recipe processing batches:")
281
+ logging.info("Batch type=all count=%d", len(recipe_list))
282
+ logging.info("Batch recipes: %s", [r.identifier for r in recipe_list])
276
283
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
277
284
  futures = [executor.submit(run_one, r) for r in recipe_list]
278
285
  for fut in as_completed(futures):
@@ -8,7 +8,7 @@ from itertools import chain
8
8
  from pathlib import Path
9
9
 
10
10
 
11
- class Recipe(object):
11
+ class Recipe:
12
12
  def __init__(self, name: str, post_processors: list = None):
13
13
  self.filename = name
14
14
  self.error = False
@@ -27,6 +27,10 @@ class Recipe(object):
27
27
 
28
28
  return name
29
29
 
30
+ @property
31
+ def identifier(self):
32
+ return self.filename
33
+
30
34
  def verify_trust_info(self, args):
31
35
  verbose_output = ["-vvvv"] if args.debug else []
32
36
  prefs_file = (
@@ -11,11 +11,13 @@ def send_notification(recipe, token):
11
11
  logging.error("Skipping Slack Notification as no SLACK_WEBHOOK_TOKEN defined!")
12
12
  return
13
13
 
14
+ recipe_identifier = getattr(recipe, "identifier", None) or recipe.name
15
+
14
16
  if recipe.verified is False:
15
- task_title = f"{recipe.name} failed trust verification"
17
+ task_title = f"{recipe_identifier} failed trust verification"
16
18
  task_description = recipe.results["message"]
17
19
  elif recipe.error:
18
- task_title = f"Failed to import {recipe.name}"
20
+ task_title = f"Failed to import {recipe_identifier}"
19
21
  if not recipe.results["failed"]:
20
22
  task_description = "Unknown error"
21
23
  else:
@@ -27,8 +29,8 @@ def send_notification(recipe, token):
27
29
  if "No releases found for repo" in task_description:
28
30
  return
29
31
  elif recipe.updated:
30
- task_title = f"{recipe.name} has been uploaded to Jamf"
31
- task_description = f"It's time to test {recipe.name}!"
32
+ task_title = f"{recipe_identifier} has been uploaded to Jamf"
33
+ task_description = f"It's time to test {recipe_identifier}!"
32
34
  else:
33
35
  return
34
36
 
@@ -56,6 +58,6 @@ def send_notification(recipe, token):
56
58
  )
57
59
  if response.status_code != 200:
58
60
  raise ValueError(
59
- "Request to slack returned an error %s, the response is:\n%s"
60
- % (response.status_code, response.text)
61
+ "Request to slack returned an error "
62
+ f"{response.status_code}, the response is:\n{response.text}"
61
63
  )
@@ -110,9 +110,9 @@ def push_branch(git_info):
110
110
 
111
111
 
112
112
  def create_pull_request(git_info, recipe):
113
- title = f"Update Trust Information: {recipe.name}"
113
+ title = f"Update Trust Information: {recipe.identifier}"
114
114
  body = f"""
115
- Recipe Verification information is out-of-date for {recipe.name}.
115
+ Recipe Verification information is out-of-date for {recipe.identifier}.
116
116
  Please review and merge the updated trust information for this override.
117
117
  """
118
118
 
@@ -152,7 +152,8 @@ def create_issue_for_failed_recipes(git_info, failed_recipes):
152
152
 
153
153
  body = "## Recipe Failure Details:\n\n"
154
154
  for recipe in failed_recipes:
155
- body += f"#### {recipe.name}\n"
155
+ identifier = getattr(recipe, "identifier", None) or recipe.name
156
+ body += f"#### {identifier}\n"
156
157
 
157
158
  if recipe.results.get("failed"):
158
159
  for failure in recipe.results.get("failed", []):
@@ -1,21 +1,24 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Iterable, Protocol, TypeVar
3
+ from collections.abc import Iterable
4
+ from typing import Protocol
4
5
 
5
6
 
6
7
  class HasFilename(Protocol):
7
8
  filename: str
8
9
 
9
10
 
10
- T = TypeVar("T", bound=HasFilename)
11
-
12
-
13
11
  def recipe_type_for(recipe: HasFilename) -> str:
14
12
  parts = recipe.filename.split(".", 1)
15
13
  return parts[1] if len(parts) == 2 else ""
16
14
 
17
15
 
18
- def build_recipe_batches(
16
+ def recipe_identifier_for(recipe: HasFilename) -> str:
17
+ identifier = getattr(recipe, "identifier", None)
18
+ return identifier if identifier else recipe.filename
19
+
20
+
21
+ def build_recipe_batches[T: HasFilename](
19
22
  recipe_list: Iterable[T], recipe_processing_order
20
23
  ) -> list[list[T]]:
21
24
  recipe_list = list(recipe_list)
@@ -39,3 +42,17 @@ def build_recipe_batches(
39
42
  if current_batch:
40
43
  batches.append(current_batch)
41
44
  return batches
45
+
46
+
47
+ def describe_recipe_batches[T: HasFilename](
48
+ batches: Iterable[Iterable[T]],
49
+ ) -> list[dict[str, object]]:
50
+ descriptions: list[dict[str, object]] = []
51
+ for batch in batches:
52
+ batch_list = list(batch)
53
+ batch_type = recipe_type_for(batch_list[0]) if batch_list else ""
54
+ identifiers = [recipe_identifier_for(r) for r in batch_list]
55
+ descriptions.append(
56
+ {"type": batch_type, "count": len(batch_list), "recipes": identifiers}
57
+ )
58
+ return descriptions
@@ -64,6 +64,17 @@ def order_recipe_list(recipe_list, order):
64
64
  parts = recipe_name.split(".", 1)
65
65
  return parts[1] if len(parts) == 2 else ""
66
66
 
67
+ def order_recipes_by_type(recipes: list[str]) -> list[str]:
68
+ groups: dict[str, list[str]] = {}
69
+ for r in recipes:
70
+ t = recipe_type(r)
71
+ groups.setdefault(t, []).append(r)
72
+
73
+ ordered_recipes: list[str] = []
74
+ for t in sorted(groups.keys(), key=lambda x: (x == "", x.casefold())):
75
+ ordered_recipes.extend(sorted(groups[t], key=str.casefold))
76
+ return ordered_recipes
77
+
67
78
  def recipe_segments_after_first_dot(recipe_name: str) -> list[str]:
68
79
  after_first = recipe_type(recipe_name)
69
80
  return [p for p in after_first.split(".") if p] if after_first else []
@@ -131,17 +142,11 @@ def order_recipe_list(recipe_list, order):
131
142
 
132
143
  ordered: list[str] = []
133
144
  for p in normalised_order:
134
- ordered.extend(sorted(pattern_groups[p], key=str.casefold))
145
+ ordered.extend(order_recipes_by_type(pattern_groups[p]))
135
146
 
136
147
  # Remaining recipes: group by their full type string and order groups alphabetically,
137
148
  # with empty-type last.
138
- groups: dict[str, list[str]] = {}
139
- for r in unmatched:
140
- t = recipe_type(r)
141
- groups.setdefault(t, []).append(r)
142
-
143
- for t in sorted(groups.keys(), key=lambda x: (x == "", x.casefold())):
144
- ordered.extend(sorted(groups[t], key=str.casefold))
149
+ ordered.extend(order_recipes_by_type(unmatched))
145
150
 
146
151
  logging.debug(f"Recipe processing order: {normalised_order}")
147
152
  logging.debug(f"Ordered recipes: {ordered}")
@@ -4,11 +4,10 @@ import os
4
4
  import plistlib
5
5
  import re
6
6
  import zipfile
7
- from typing import Dict, List, Optional, Tuple
8
7
 
9
8
 
10
- def find_report_dirs(base_path: str) -> List[str]:
11
- dirs: List[str] = []
9
+ def find_report_dirs(base_path: str) -> list[str]:
10
+ dirs: list[str] = []
12
11
  if not os.path.exists(base_path):
13
12
  return dirs
14
13
  for root, subdirs, _files in os.walk(base_path):
@@ -28,9 +27,9 @@ def find_report_dirs(base_path: str) -> List[str]:
28
27
  return sorted(dirs)
29
28
 
30
29
 
31
- def parse_json_file(path: str) -> Dict:
30
+ def parse_json_file(path: str) -> dict:
32
31
  try:
33
- with open(path, "r", encoding="utf-8") as f:
32
+ with open(path, encoding="utf-8") as f:
34
33
  return json.load(f)
35
34
  except Exception:
36
35
  return {}
@@ -46,10 +45,10 @@ def _infer_recipe_name_from_filename(path: str) -> str:
46
45
  return base
47
46
 
48
47
 
49
- def parse_text_file(path: str) -> Dict[str, List]:
50
- uploads: List[Dict] = []
51
- policies: List[Dict] = []
52
- errors: List[str] = []
48
+ def parse_text_file(path: str) -> dict[str, list]:
49
+ uploads: list[dict] = []
50
+ policies: list[dict] = []
51
+ errors: list[str] = []
53
52
 
54
53
  re_error = re.compile(r"ERROR[:\s-]+(.+)", re.IGNORECASE)
55
54
  re_upload = re.compile(
@@ -59,7 +58,7 @@ def parse_text_file(path: str) -> Dict[str, List]:
59
58
  re_policy = re.compile(r"Policy (created|updated):\s*(?P<name>.+)", re.IGNORECASE)
60
59
 
61
60
  try:
62
- with open(path, "r", encoding="utf-8", errors="ignore") as f:
61
+ with open(path, encoding="utf-8", errors="ignore") as f:
63
62
  for line in f:
64
63
  m_err = re_error.search(line)
65
64
  if m_err:
@@ -91,13 +90,13 @@ def parse_text_file(path: str) -> Dict[str, List]:
91
90
  return {"uploads": uploads, "policies": policies, "errors": errors}
92
91
 
93
92
 
94
- def parse_plist_file(path: str) -> Dict[str, List]:
95
- uploads: List[Dict] = []
96
- policies: List[Dict] = []
97
- errors: List[str] = []
98
- upload_rows: List[Dict] = []
99
- policy_rows: List[Dict] = []
100
- error_rows: List[Dict] = []
93
+ def parse_plist_file(path: str) -> dict[str, list]:
94
+ uploads: list[dict] = []
95
+ policies: list[dict] = []
96
+ errors: list[str] = []
97
+ upload_rows: list[dict] = []
98
+ policy_rows: list[dict] = []
99
+ error_rows: list[dict] = []
101
100
 
102
101
  try:
103
102
  with open(path, "rb") as f:
@@ -117,7 +116,7 @@ def parse_plist_file(path: str) -> Dict[str, List]:
117
116
  sr = plist.get("summary_results", {}) or {}
118
117
 
119
118
  recipe_name = _infer_recipe_name_from_filename(path)
120
- recipe_identifier: Optional[str] = None
119
+ recipe_identifier: str | None = None
121
120
 
122
121
  jpu = sr.get("jamfpackageuploader_summary_result")
123
122
  if isinstance(jpu, dict):
@@ -200,7 +199,7 @@ def parse_plist_file(path: str) -> Dict[str, List]:
200
199
  }
201
200
 
202
201
 
203
- def aggregate_reports(base_path: str) -> Dict:
202
+ def aggregate_reports(base_path: str) -> dict:
204
203
  summary = {
205
204
  "uploads": [],
206
205
  "policies": [],
@@ -270,8 +269,8 @@ def aggregate_reports(base_path: str) -> Dict:
270
269
 
271
270
 
272
271
  def _aggregate_for_display(
273
- summary: Dict,
274
- ) -> Tuple[Dict[str, set], Dict[str, set], Dict[str, int]]:
272
+ summary: dict,
273
+ ) -> tuple[dict[str, set], dict[str, set], dict[str, int]]:
275
274
  uploads = summary.get("uploads", [])
276
275
  policies = summary.get("policies", [])
277
276
  errors = summary.get("errors", [])
@@ -281,11 +280,9 @@ def _aggregate_for_display(
281
280
  return False
282
281
  if n.lower() in {"apps", "packages", "pkg", "file", "37"}:
283
282
  return False
284
- if not re.search(r"[A-Za-z]", n):
285
- return False
286
- return True
283
+ return re.search(r"[A-Za-z]", n) is not None
287
284
 
288
- uploads_by_app: Dict[str, set] = {}
285
+ uploads_by_app: dict[str, set] = {}
289
286
  for u in uploads:
290
287
  if isinstance(u, dict):
291
288
  name = (u.get("name") or "-").strip()
@@ -297,7 +294,7 @@ def _aggregate_for_display(
297
294
  name = "-"
298
295
  uploads_by_app.setdefault(name, set()).add(ver)
299
296
 
300
- policies_by_name: Dict[str, set] = {}
297
+ policies_by_name: dict[str, set] = {}
301
298
  for p in policies:
302
299
  if isinstance(p, dict):
303
300
  name = (p.get("name") or "-").strip()
@@ -307,7 +304,7 @@ def _aggregate_for_display(
307
304
  action = "-"
308
305
  policies_by_name.setdefault(name, set()).add(action)
309
306
 
310
- error_categories: Dict[str, int] = {
307
+ error_categories: dict[str, int] = {
311
308
  "trust": 0,
312
309
  "signature": 0,
313
310
  "download": 0,
@@ -353,9 +350,9 @@ def _aggregate_for_display(
353
350
  return uploads_by_app, policies_by_name, error_categories
354
351
 
355
352
 
356
- def render_job_summary(summary: Dict, environment: str, run_date: str) -> str:
357
- lines: List[str] = []
358
- title_bits: List[str] = []
353
+ def render_job_summary(summary: dict, environment: str, run_date: str) -> str:
354
+ lines: list[str] = []
355
+ title_bits: list[str] = []
359
356
  if environment:
360
357
  title_bits.append(environment)
361
358
  if run_date:
@@ -435,15 +432,15 @@ def render_job_summary(summary: Dict, environment: str, run_date: str) -> str:
435
432
  return "\n".join(lines)
436
433
 
437
434
 
438
- def render_issue_body(summary: Dict, environment: str, run_date: str) -> str:
439
- lines: List[str] = []
435
+ def render_issue_body(summary: dict, environment: str, run_date: str) -> str:
436
+ lines: list[str] = []
440
437
  total_errors = len(summary.get("errors", []))
441
438
  _uploads_by_app, _policies_by_name, _error_categories = _aggregate_for_display(
442
439
  summary
443
440
  )
444
441
 
445
442
  prefix = "Autopkg run"
446
- suffix_bits: List[str] = []
443
+ suffix_bits: list[str] = []
447
444
  if run_date:
448
445
  suffix_bits.append(f"on {run_date}")
449
446
  if environment:
@@ -523,10 +520,10 @@ def _normalize_host(url: str) -> str:
523
520
  return h.rstrip("/")
524
521
 
525
522
 
526
- def build_pkg_map(jss_url: str, client_id: str, client_secret: str) -> Dict[str, str]:
523
+ def build_pkg_map(jss_url: str, client_id: str, client_secret: str) -> dict[str, str]:
527
524
  host = _normalize_host(jss_url)
528
525
  _ = host # silence linters about unused var; kept for readability
529
- pkg_map: Dict[str, str] = {}
526
+ pkg_map: dict[str, str] = {}
530
527
  try:
531
528
  from jamf_pro_sdk import ( # type: ignore
532
529
  ApiClientCredentialsProvider,
@@ -540,8 +537,8 @@ def build_pkg_map(jss_url: str, client_id: str, client_secret: str) -> Dict[str,
540
537
  packages = client.pro_api.get_packages_v1()
541
538
  for p in packages:
542
539
  try:
543
- name = str(getattr(p, "packageName")).strip()
544
- pid = str(getattr(p, "id")).strip()
540
+ name = str(p.packageName).strip()
541
+ pid = str(p.id).strip()
545
542
  except Exception as e: # noqa: F841
546
543
  # ignore objects that do not match expected shape
547
544
  continue
@@ -555,7 +552,7 @@ def build_pkg_map(jss_url: str, client_id: str, client_secret: str) -> Dict[str,
555
552
  return pkg_map
556
553
 
557
554
 
558
- def enrich_upload_rows(upload_rows: List[Dict], pkg_map: Dict[str, str]) -> int:
555
+ def enrich_upload_rows(upload_rows: list[dict], pkg_map: dict[str, str]) -> int:
559
556
  linked = 0
560
557
  for row in upload_rows:
561
558
  pkg_name = str(row.get("package") or "").strip()
@@ -567,8 +564,8 @@ def enrich_upload_rows(upload_rows: List[Dict], pkg_map: Dict[str, str]) -> int:
567
564
 
568
565
 
569
566
  def enrich_upload_rows_with_jamf(
570
- summary: Dict, jss_url: str, client_id: str, client_secret: str
571
- ) -> Tuple[int, List[str]]:
567
+ summary: dict, jss_url: str, client_id: str, client_secret: str
568
+ ) -> tuple[int, list[str]]:
572
569
  pkg_map = build_pkg_map(jss_url, client_id, client_secret)
573
570
  linked = enrich_upload_rows(summary.get("upload_rows", []), pkg_map)
574
571
  return linked, sorted(set(pkg_map.keys()))
@@ -576,9 +573,9 @@ def enrich_upload_rows_with_jamf(
576
573
 
577
574
  def process_reports(
578
575
  *,
579
- zip_file: Optional[str],
576
+ zip_file: str | None,
580
577
  extract_dir: str,
581
- reports_dir: Optional[str],
578
+ reports_dir: str | None,
582
579
  environment: str = "",
583
580
  run_date: str = "",
584
581
  out_dir: str,
@@ -605,7 +602,7 @@ def process_reports(
605
602
  jss_client_secret = os.environ.get("AUTOPKG_CLIENT_SECRET")
606
603
  jamf_attempted = False
607
604
  jamf_linked = 0
608
- jamf_keys: List[str] = []
605
+ jamf_keys: list[str] = []
609
606
  jamf_total = len(summary.get("upload_rows", []))
610
607
  if jss_url and jss_client_id and jss_client_secret and jamf_total:
611
608
  jamf_attempted = True
@@ -0,0 +1,242 @@
1
+ Metadata-Version: 2.4
2
+ Name: autopkg-wrapper
3
+ Version: 2026.2.8
4
+ Summary: A package used to execute some autopkg functions, primarily within the context of a GitHub Actions runner.
5
+ Project-URL: Repository, https://github.com/smithjw/autopkg-wrapper
6
+ Author-email: James Smith <james@smithjw.me>
7
+ License-Expression: BSD-3-Clause
8
+ License-File: LICENSE
9
+ Requires-Python: ~=3.14.0
10
+ Requires-Dist: chardet
11
+ Requires-Dist: idna
12
+ Requires-Dist: jamf-pro-sdk
13
+ Requires-Dist: pygithub
14
+ Requires-Dist: requests
15
+ Requires-Dist: ruamel-yaml
16
+ Requires-Dist: toml
17
+ Requires-Dist: urllib3
18
+ Description-Content-Type: text/markdown
19
+
20
+ # autopkg-wrapper
21
+
22
+ `autopkg_wrapper` is a small package that can be used to run [`autopkg`](https://github.com/autopkg/autopkg) within CI/CD environments such as GitHub Actions.
23
+
24
+ The easiest way to run it is by installing with pip.
25
+
26
+ ```shell
27
+ pip install autopkg-wrapper
28
+ ```
29
+
30
+ ## Development
31
+
32
+ This project uses `uv` for dependency management and `mise` tasks for common workflows.
33
+
34
+ ```bash
35
+ mise run install
36
+ mise run test
37
+ mise run build
38
+ ```
39
+
40
+ ## Command Line Parameters
41
+
42
+ <!-- CLI-PARAMS-START -->
43
+
44
+ ```shell
45
+ usage: autopkg_wrapper [-h] [--recipe-file RECIPE_FILE |
46
+ --recipes [RECIPES ...]]
47
+ [--recipe-processing-order [RECIPE_PROCESSING_ORDER ...]]
48
+ [--autopkg-bin AUTOPKG_BIN] [--debug]
49
+ [--disable-recipe-trust-check] [--disable-git-commands]
50
+ [--concurrency CONCURRENCY]
51
+ [--github-token GITHUB_TOKEN]
52
+ [--branch-name BRANCH_NAME] [--create-pr]
53
+ [--create-issues]
54
+ [--overrides-repo-path OVERRIDES_REPO_PATH]
55
+ [--post-processors [POST_PROCESSORS ...]]
56
+ [--autopkg-prefs AUTOPKG_PREFS] [--process-reports]
57
+ [--reports-zip REPORTS_ZIP]
58
+ [--reports-extract-dir REPORTS_EXTRACT_DIR]
59
+ [--reports-dir REPORTS_DIR]
60
+ [--reports-out-dir REPORTS_OUT_DIR]
61
+ [--reports-run-date REPORTS_RUN_DATE]
62
+ [--reports-strict]
63
+
64
+ Run autopkg recipes
65
+
66
+ options:
67
+ -h, --help show this help message and exit
68
+ --recipe-file RECIPE_FILE
69
+ Provide the list of recipes to run via a JSON file for
70
+ easier management.
71
+ --recipes [RECIPES ...]
72
+ Recipes to run via CLI flag or environment variable.
73
+ If the '--recipes' flag is used, simply provide a
74
+ space-separated list on the command line: `autopkg-
75
+ wrapper --recipes recipe_one.download
76
+ recipe_two.download` Alternatively, you can provide a
77
+ space/comma-separated list in the 'AW_RECIPES'
78
+ environment variable: `export
79
+ AW_RECIPES="recipe_one.download recipe_two.download"`
80
+ `export AW_RECIPES="recipe_one.pkg,recipe_two.pkg"`
81
+ `autopkg-wrapper`
82
+ --recipe-processing-order [RECIPE_PROCESSING_ORDER ...]
83
+ This option comes in handy if you include additional
84
+ recipe type names in your overrides and wish them to
85
+ be processed in a specific order. We'll specifically
86
+ look for these recipe types after the first period (.)
87
+ in the recipe name. Order items can be either a full
88
+ type suffix (e.g. "upload.jamf") or a partial token
89
+ (e.g. "upload", "auto_update"). Partial tokens are
90
+ matched against the dot-separated segments after the
91
+ first '.' so recipes like "Foo.epz.auto_update.jamf"
92
+ will match "auto_update". This can also be provided
93
+ via the 'AW_RECIPE_PROCESSING_ORDER' environment
94
+ variable as a comma-separated list (e.g.
95
+ "upload,self_service,auto_update"). For example, if
96
+ you have the following recipes to be processed:
97
+ ExampleApp.auto_install.jamf ExampleApp.upload.jamf
98
+ ExampleApp.self_service.jamf And you want to ensure
99
+ that the .upload recipes are always processed first,
100
+ followed by .auto_install, and finally .self_service,
101
+ you would provide the following processing order:
102
+ `--recipe-processing-order upload.jamf
103
+ auto_install.jamf self_service.jamf` This would ensure
104
+ that all .upload recipes are processed before any
105
+ other recipe types. Within each recipe type, the
106
+ recipes will be ordered alphabetically. We assume that
107
+ no extensions are provided (but will strip them if
108
+ needed - extensions that are stripped include .recipe
109
+ or .recipe.yaml).
110
+ --autopkg-bin AUTOPKG_BIN
111
+ Path to the autopkg binary (default:
112
+ /usr/local/bin/autopkg). Can also be set via
113
+ AW_AUTOPKG_BIN.
114
+ --debug Enable debug logging when running script
115
+ --disable-recipe-trust-check
116
+ If this option is used, recipe trust verification will
117
+ not be run prior to a recipe run. This does not set
118
+ FAIL_RECIPES_WITHOUT_TRUST_INFO to No. You will need
119
+ to set that outside of this application.
120
+ --disable-git-commands
121
+ If this option is used, git commands won't be run
122
+ --concurrency CONCURRENCY
123
+ Number of recipes to run in parallel (default: 1)
124
+ --github-token GITHUB_TOKEN
125
+ --branch-name BRANCH_NAME
126
+ Branch name to be used recipe overrides have failed
127
+ their trust verification and need to be updated. By
128
+ default, this will be in the format of
129
+ "fix/update_trust_information/YYYY-MM-DDTHH-MM-SS"
130
+ --create-pr If enabled, autopkg_wrapper will open a PR for updated
131
+ trust information
132
+ --create-issues Create a GitHub issue for recipes that fail during
133
+ processing
134
+ --overrides-repo-path OVERRIDES_REPO_PATH
135
+ The path on disk to the git repository containing the
136
+ autopkg overrides directory. If none is provided, we
137
+ will try to determine it for you.
138
+ --post-processors [POST_PROCESSORS ...]
139
+ One or more autopkg post processors to run after each
140
+ recipe execution
141
+ --autopkg-prefs AUTOPKG_PREFS
142
+ Path to the autopkg preferences you'd like to use
143
+ --process-reports Process autopkg report directories or zip and emit
144
+ markdown summaries
145
+ --reports-zip REPORTS_ZIP
146
+ Path to an autopkg_report-*.zip to extract and process
147
+ --reports-extract-dir REPORTS_EXTRACT_DIR
148
+ Directory to extract the zip into (default:
149
+ autopkg_reports_summary/reports)
150
+ --reports-dir REPORTS_DIR
151
+ Directory of reports to process (if no zip provided)
152
+ --reports-out-dir REPORTS_OUT_DIR
153
+ Directory to write markdown outputs (default:
154
+ autopkg_reports_summary/summary)
155
+ --reports-run-date REPORTS_RUN_DATE
156
+ Run date string to include in the summary
157
+ --reports-strict Exit non-zero if any errors are detected in processed
158
+ reports
159
+ ```
160
+
161
+ <!-- CLI-PARAMS-END -->
162
+
163
+ ## Examples
164
+
165
+ Run recipes (serial):
166
+
167
+ ```bash
168
+ autopkg_wrapper --recipes Foo.download Bar.download
169
+ ```
170
+
171
+ Run 3 recipes concurrently and process reports afterward:
172
+
173
+ ```bash
174
+ autopkg_wrapper \
175
+ --recipe-file /path/to/recipe_list.txt \
176
+ --concurrency 3 \
177
+ --disable-git-commands \
178
+ --process-reports \
179
+ --reports-out-dir /tmp/autopkg_reports_summary \
180
+ --reports-strict
181
+ ```
182
+
183
+ Process a reports zip explicitly (no recipe run):
184
+
185
+ ```bash
186
+ autopkg_wrapper \
187
+ --process-reports \
188
+ --reports-zip /path/to/autopkg_report-2026-02-02.zip \
189
+ --reports-extract-dir /tmp/autopkg_reports \
190
+ --reports-out-dir /tmp/autopkg_reports_summary
191
+ ```
192
+
193
+ ## Recipe Processing Flow
194
+
195
+ ```mermaid
196
+ flowchart TD
197
+ start([Start]) --> args[Parse CLI args]
198
+ args --> load[Load recipes list]
199
+ load --> order{Processing order provided?}
200
+ order -- Yes --> batches[Build recipe batches by type]
201
+ order -- No --> all[Single batch of all recipes]
202
+ batches --> log[Log each batch type and identifiers]
203
+ all --> log
204
+ log --> run[Run batch recipes concurrently within batch]
205
+ run --> next{More batches?}
206
+ next -- Yes --> log
207
+ next -- No --> git[Apply git updates serially]
208
+ git --> notify[Send notifications]
209
+ notify --> pr{Create PR?}
210
+ pr -- Yes --> createPR[Open trust update PR]
211
+ pr -- No --> issues{Create issues?}
212
+ createPR --> issues
213
+ issues -- Yes --> createIssue[Open failures issue]
214
+ issues -- No --> reports{Process reports?}
215
+ createIssue --> reports
216
+ reports -- Yes --> process[Process reports output]
217
+ reports -- No --> done([Done])
218
+ process --> done
219
+ ```
220
+
221
+ Related code:
222
+
223
+ - `autopkg_wrapper/autopkg_wrapper.py`
224
+ - `autopkg_wrapper/utils/recipe_batching.py`
225
+ - `autopkg_wrapper/utils/recipe_ordering.py`
226
+ - `autopkg_wrapper/utils/report_processor.py`
227
+ - `autopkg_wrapper/notifier/slack.py`
228
+
229
+ Notes:
230
+
231
+ - During recipe runs, per‑recipe plist reports are written to `/private/tmp/autopkg`.
232
+ - Log output references full recipe identifiers (for example, `Foo.upload.jamf`) and batch logs list recipe identifiers grouped by type.
233
+ - When `--process-reports` is supplied without `--reports-zip` or `--reports-dir`, the tool processes `/private/tmp/autopkg`.
234
+ - If `AUTOPKG_JSS_URL`, `AUTOPKG_CLIENT_ID`, and `AUTOPKG_CLIENT_SECRET` are set, uploaded package rows are enriched with Jamf package links.
235
+ - No extra CLI flag is required; enrichment runs automatically when all three env vars are present.
236
+
237
+ An example folder structure and GitHub Actions Workflow is available within the [`actions-demo`](actions-demo)
238
+
239
+ ## Credits
240
+
241
+ - [`autopkg_tools` from Facebook](https://github.com/facebook/IT-CPE/tree/main/legacy/autopkg_tools)
242
+ - [`autopkg_tools` from Facebook, modified by Gusto](https://github.com/Gusto/it-cpe-opensource/tree/main/autopkg)
@@ -0,0 +1,17 @@
1
+ autopkg_wrapper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ autopkg_wrapper/autopkg_wrapper.py,sha256=YBXttf3JDDP7tI3zoTAtBnXSGXssltLY7qk3Gy_cgNI,12728
3
+ autopkg_wrapper/models/recipe.py,sha256=OQabJ-4ORbhueYZuZrq9bovXbcAC9flBoTc8A998mlE,4953
4
+ autopkg_wrapper/notifier/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ autopkg_wrapper/notifier/slack.py,sha256=O5Dc3ux7w258zTkfyDkjNbpfFLLCaCCoGUguOYfOETk,2056
6
+ autopkg_wrapper/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ autopkg_wrapper/utils/args.py,sha256=1uEdjTIuqQQcealiEqihZNAamSDLbU9qBzJ6M-tpsS4,8423
8
+ autopkg_wrapper/utils/git_functions.py,sha256=5egBB4itrEusGrANoL2_8F13diRAZSfBcnZrUxn6Z5M,4976
9
+ autopkg_wrapper/utils/logging.py,sha256=3knpMViO_zAU8WM5bSImQaz5M01vMFk_raB4lt1cbvo,324
10
+ autopkg_wrapper/utils/recipe_batching.py,sha256=ohZUPyr6IFD8j4m9VSASsLOFh_9Fs0_UkIo6MIR4fIQ,1722
11
+ autopkg_wrapper/utils/recipe_ordering.py,sha256=LWxbktRo_NlDNaW7NL63GJHSXGspYHDvu-2mP1JATFE,6190
12
+ autopkg_wrapper/utils/report_processor.py,sha256=iB48pV_K2P11jkR9IZbeoA7dmyaZBiD0BHKwDOBwHgQ,22296
13
+ autopkg_wrapper-2026.2.8.dist-info/METADATA,sha256=vWcsRVxM3ymlfXEEZmrfjEODzCkaNsPMvcfFdhFrSfc,10750
14
+ autopkg_wrapper-2026.2.8.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
15
+ autopkg_wrapper-2026.2.8.dist-info/entry_points.txt,sha256=TVIcOt7OozzX1c00pwMGbBysaHg_v_N3mO3juoFqPpo,73
16
+ autopkg_wrapper-2026.2.8.dist-info/licenses/LICENSE,sha256=PpNOQjZGcsKFuA0wU16YU7PueVxqPX4OnyZ7TlLQlq4,1602
17
+ autopkg_wrapper-2026.2.8.dist-info/RECORD,,
@@ -1,107 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: autopkg-wrapper
3
- Version: 2026.2.6
4
- Summary: A package used to execute some autopkg functions, primarily within the context of a GitHub Actions runner.
5
- Project-URL: Repository, https://github.com/smithjw/autopkg-wrapper
6
- Author-email: James Smith <james@smithjw.me>
7
- License-Expression: BSD-3-Clause
8
- License-File: LICENSE
9
- Requires-Python: ~=3.14.0
10
- Requires-Dist: chardet
11
- Requires-Dist: idna
12
- Requires-Dist: jamf-pro-sdk
13
- Requires-Dist: pygithub
14
- Requires-Dist: requests
15
- Requires-Dist: ruamel-yaml
16
- Requires-Dist: toml
17
- Requires-Dist: urllib3
18
- Description-Content-Type: text/markdown
19
-
20
- # autopkg-wrapper
21
-
22
- `autopkg_wrapper` is a small package that can be used to run [`autopkg`](https://github.com/autopkg/autopkg) within CI/CD environments such as GitHub Actions.
23
-
24
- The easiest way to run it is by installing with pip.
25
-
26
- ```shell
27
- pip install autopkg-wrapper
28
- ```
29
-
30
- ## Command Line Parameters
31
-
32
- ```shell
33
- -h, --help Show this help message and exit
34
- --recipe-file RECIPE_FILE Path to a list of recipes to run (cannot be run with --recipes)
35
- --recipes [RECIPES ...] Recipes to run with autopkg (cannot be run with --recipe-file)
36
- --recipe-processing-order [RECIPE_PROCESSING_ORDER ...]
37
- Optional processing order for recipe "types" (suffix segments after the first '.'); supports partial tokens like upload/auto_update; env var AW_RECIPE_PROCESSING_ORDER expects comma-separated values
38
- --debug Enable debug logging when running script
39
- --disable-recipe-trust-check If this option is used, recipe trust verification will not be run prior to a recipe run.
40
- --github-token GITHUB_TOKEN A token used to publish a PR to your GitHub repo if overrides require their trust to be updated
41
- --branch-name BRANCH_NAME Branch name to be used where recipe overrides have failed their trust verification and need to be updated.
42
- By default, this will be in the format of "fix/update_trust_information/YYYY-MM-DDTHH-MM-SS"
43
- --create-pr If enabled, autopkg_wrapper will open a PR for updated trust information
44
- --create-issues Create a GitHub issue for recipes that fail during processing
45
- --disable-git-commands If this option is used, git commands won't be run
46
- --post-processors [POST_PROCESSORS ...]
47
- One or more autopkg post processors to run after each recipe execution
48
- --autopkg-prefs AW_AUTOPKG_PREFS_FILE
49
- Path to the autopkg preferences you'd like to use
50
- --overrides-repo-path AUTOPKG_OVERRIDES_REPO_PATH
51
- The path on disk to the git repository containing the autopkg overrides directory. If none is provided, we will try to determine it for you.
52
- --concurrency CONCURRENCY Number of recipes to run in parallel (default: 1)
53
- --process-reports Process autopkg report directories or zip and emit markdown summaries (runs after recipes complete)
54
- --reports-zip REPORTS_ZIP Path to an autopkg_report-*.zip to extract and process
55
- --reports-extract-dir REPORTS_EXTRACT_DIR
56
- Directory to extract the zip into (default: autopkg_reports_summary/reports)
57
- --reports-dir REPORTS_DIR Directory of reports to process (if no zip provided). Defaults to /private/tmp/autopkg when processing after a run
58
- --reports-out-dir REPORTS_OUT_DIR
59
- Directory to write markdown outputs (default: autopkg_reports_summary/summary)
60
- --reports-run-date REPORTS_RUN_DATE
61
- Run date string to include in the summary
62
- --reports-strict Exit non-zero if any errors are detected in processed reports
63
- ```
64
-
65
- ## Examples
66
-
67
- Run recipes (serial):
68
-
69
- ```bash
70
- autopkg_wrapper --recipes Foo.download Bar.download
71
- ```
72
-
73
- Run 3 recipes concurrently and process reports afterward:
74
-
75
- ```bash
76
- autopkg_wrapper \
77
- --recipe-file /path/to/recipe_list.txt \
78
- --concurrency 3 \
79
- --disable-git-commands \
80
- --process-reports \
81
- --reports-out-dir /tmp/autopkg_reports_summary \
82
- --reports-strict
83
- ```
84
-
85
- Process a reports zip explicitly (no recipe run):
86
-
87
- ```bash
88
- autopkg_wrapper \
89
- --process-reports \
90
- --reports-zip /path/to/autopkg_report-2026-02-02.zip \
91
- --reports-extract-dir /tmp/autopkg_reports \
92
- --reports-out-dir /tmp/autopkg_reports_summary
93
- ```
94
-
95
- Notes:
96
-
97
- - During recipe runs, per‑recipe plist reports are written to `/private/tmp/autopkg`.
98
- - When `--process-reports` is supplied without `--reports-zip` or `--reports-dir`, the tool processes `/private/tmp/autopkg`.
99
- - If `AUTOPKG_JSS_URL`, `AUTOPKG_CLIENT_ID`, and `AUTOPKG_CLIENT_SECRET` are set, uploaded package rows are enriched with Jamf package links.
100
- - No extra CLI flag is required; enrichment runs automatically when all three env vars are present.
101
-
102
- An example folder structure and GitHub Actions Workflow is available within the [`actions-demo`](actions-demo)
103
-
104
- ## Credits
105
-
106
- - [`autopkg_tools` from Facebook](https://github.com/facebook/IT-CPE/tree/main/legacy/autopkg_tools)
107
- - [`autopkg_tools` from Facebook, modified by Gusto](https://github.com/Gusto/it-cpe-opensource/tree/main/autopkg)
@@ -1,17 +0,0 @@
1
- autopkg_wrapper/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- autopkg_wrapper/autopkg_wrapper.py,sha256=k4V-vEoCLUsUUNRxDh3VdWvxAsDIrYH5aSMMZ0HRlTY,12307
3
- autopkg_wrapper/models/recipe.py,sha256=xUENrhmCIXN8U86u8HwplSsoxP-vgznJZF_OfhGD9_Y,4891
4
- autopkg_wrapper/notifier/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- autopkg_wrapper/notifier/slack.py,sha256=pUsjwpVfwDSn3c09O3UbdcNtfD98q2fXJ_rKPWvDw7E,1959
6
- autopkg_wrapper/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
- autopkg_wrapper/utils/args.py,sha256=1uEdjTIuqQQcealiEqihZNAamSDLbU9qBzJ6M-tpsS4,8423
8
- autopkg_wrapper/utils/git_functions.py,sha256=e7wiUIW8Pu6m4oK0LlH7Vnrvp8XzknwTPYXz-Ekn40o,4893
9
- autopkg_wrapper/utils/logging.py,sha256=3knpMViO_zAU8WM5bSImQaz5M01vMFk_raB4lt1cbvo,324
10
- autopkg_wrapper/utils/recipe_batching.py,sha256=13Xw952cUryeymYMICA-ATAQiRdpv89Mn4HK17Mynoo,1051
11
- autopkg_wrapper/utils/recipe_ordering.py,sha256=v5yn8KAcvOnNuvAL93ZXwkCUlmNnTGo3oNIqpUAF2jk,5974
12
- autopkg_wrapper/utils/report_processor.py,sha256=TjSvW02Jq62JhsHNmt_JmZCuQwT_x5RfJNfVTmIePrY,22420
13
- autopkg_wrapper-2026.2.6.dist-info/METADATA,sha256=UlniYyeX0seVYUVeXb76mJbJaAcoICtBkKQfHNyVuoM,5223
14
- autopkg_wrapper-2026.2.6.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
15
- autopkg_wrapper-2026.2.6.dist-info/entry_points.txt,sha256=TVIcOt7OozzX1c00pwMGbBysaHg_v_N3mO3juoFqPpo,73
16
- autopkg_wrapper-2026.2.6.dist-info/licenses/LICENSE,sha256=PpNOQjZGcsKFuA0wU16YU7PueVxqPX4OnyZ7TlLQlq4,1602
17
- autopkg_wrapper-2026.2.6.dist-info/RECORD,,