autopkg-wrapper 2026.2.2__tar.gz → 2026.2.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/.github/workflows/build-publish.yml +11 -13
  2. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/PKG-INFO +3 -1
  3. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/README.md +2 -0
  4. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/autopkg_wrapper/autopkg_wrapper.py +45 -146
  5. autopkg_wrapper-2026.2.6/autopkg_wrapper/models/recipe.py +139 -0
  6. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/autopkg_wrapper/notifier/slack.py +1 -1
  7. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/autopkg_wrapper/utils/args.py +27 -0
  8. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/autopkg_wrapper/utils/git_functions.py +20 -5
  9. autopkg_wrapper-2026.2.6/autopkg_wrapper/utils/recipe_batching.py +41 -0
  10. autopkg_wrapper-2026.2.6/autopkg_wrapper/utils/recipe_ordering.py +149 -0
  11. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/autopkg_wrapper/utils/report_processor.py +1 -1
  12. autopkg_wrapper-2026.2.6/mise.toml +25 -0
  13. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/pyproject.toml +1 -1
  14. autopkg_wrapper-2026.2.6/tests/test_args_utils.py +79 -0
  15. autopkg_wrapper-2026.2.6/tests/test_autopkg_commands.py +124 -0
  16. autopkg_wrapper-2026.2.6/tests/test_git_functions.py +81 -0
  17. autopkg_wrapper-2026.2.6/tests/test_order_recipe_list.py +86 -0
  18. autopkg_wrapper-2026.2.6/tests/test_parse_recipe_list.py +120 -0
  19. autopkg_wrapper-2026.2.6/tests/test_report_processor.py +121 -0
  20. autopkg_wrapper-2026.2.6/tests/test_setup_logger.py +26 -0
  21. autopkg_wrapper-2026.2.6/tests/test_slack_notifier.py +84 -0
  22. autopkg_wrapper-2026.2.6/uv.lock +418 -0
  23. autopkg_wrapper-2026.2.2/mise.toml +0 -3
  24. autopkg_wrapper-2026.2.2/uv.lock +0 -356
  25. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/.github/dependabot.yml +0 -0
  26. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/.github/workflows/codeql.yml +0 -0
  27. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/.github/workflows/dependency-review.yml +0 -0
  28. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/.gitignore +0 -0
  29. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/.pre-commit-config.yaml +0 -0
  30. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/CONTRIBUTING +0 -0
  31. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/LICENSE +0 -0
  32. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/actions-demo/.github/workflows/autopkg-wrapper-demo.yml +0 -0
  33. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/actions-demo/overrides/Google_Chrome.pkg.recipe.yaml +0 -0
  34. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/actions-demo/repo_list.txt +0 -0
  35. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/actions-demo/requirements.txt +0 -0
  36. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/autopkg_wrapper/__init__.py +0 -0
  37. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/autopkg_wrapper/notifier/__init__.py +0 -0
  38. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/autopkg_wrapper/utils/__init__.py +0 -0
  39. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/autopkg_wrapper/utils/logging.py +0 -0
  40. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/tests/__init__.py +0 -0
  41. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/tests/prefs.json +0 -0
  42. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/tests/prefs.plist +0 -0
  43. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/tests/recipe_list.json +0 -0
  44. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/tests/recipe_list.txt +0 -0
  45. {autopkg_wrapper-2026.2.2 → autopkg_wrapper-2026.2.6}/tests/recipe_list.yaml +0 -0
@@ -105,22 +105,20 @@ jobs:
105
105
  steps:
106
106
  - uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
107
107
 
108
- - name: Setup UV
109
- uses: astral-sh/setup-uv@803947b9bd8e9f986429fa0c5a41c367cd732b41 # v7.2.1
108
+ - name: Setup mise
109
+ uses: jdx/mise-action@6d1e696aa24c1aa1bcc1adea0212707c71ab78a8 # v3.6.1
110
110
  with:
111
- activate-environment: true
112
- enable-cache: true
113
- cache-dependency-glob: uv.lock
111
+ install: true
112
+ cache: true
114
113
 
115
- - name: Setup Python
116
- uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
117
- with:
118
- python-version-file: pyproject.toml
114
+ - name: Run tests
115
+ run: mise run test
116
+
117
+ - name: Build package
118
+ env:
119
+ RELEASE_VERSION: ${{ needs.release.outputs.version }}
120
+ run: mise run build
119
121
 
120
- - name: Build Package with UV
121
- run: |
122
- uv version ${{ needs.release.outputs.version }}
123
- uv build
124
122
  - name: Upload Package Artifacts
125
123
  uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
126
124
  with:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: autopkg-wrapper
3
- Version: 2026.2.2
3
+ Version: 2026.2.6
4
4
  Summary: A package used to execute some autopkg functions, primarily within the context of a GitHub Actions runner.
5
5
  Project-URL: Repository, https://github.com/smithjw/autopkg-wrapper
6
6
  Author-email: James Smith <james@smithjw.me>
@@ -33,6 +33,8 @@ pip install autopkg-wrapper
33
33
  -h, --help Show this help message and exit
34
34
  --recipe-file RECIPE_FILE Path to a list of recipes to run (cannot be run with --recipes)
35
35
  --recipes [RECIPES ...] Recipes to run with autopkg (cannot be run with --recipe-file)
36
+ --recipe-processing-order [RECIPE_PROCESSING_ORDER ...]
37
+ Optional processing order for recipe "types" (suffix segments after the first '.'); supports partial tokens like upload/auto_update; env var AW_RECIPE_PROCESSING_ORDER expects comma-separated values
36
38
  --debug Enable debug logging when running script
37
39
  --disable-recipe-trust-check If this option is used, recipe trust verification will not be run prior to a recipe run.
38
40
  --github-token GITHUB_TOKEN A token used to publish a PR to your GitHub repo if overrides require their trust to be updated
@@ -14,6 +14,8 @@ pip install autopkg-wrapper
14
14
  -h, --help Show this help message and exit
15
15
  --recipe-file RECIPE_FILE Path to a list of recipes to run (cannot be run with --recipes)
16
16
  --recipes [RECIPES ...] Recipes to run with autopkg (cannot be run with --recipe-file)
17
+ --recipe-processing-order [RECIPE_PROCESSING_ORDER ...]
18
+ Optional processing order for recipe "types" (suffix segments after the first '.'); supports partial tokens like upload/auto_update; env var AW_RECIPE_PROCESSING_ORDER expects comma-separated values
17
19
  --debug Enable debug logging when running script
18
20
  --disable-recipe-trust-check If this option is used, recipe trust verification will not be run prior to a recipe run.
19
21
  --github-token GITHUB_TOKEN A token used to publish a PR to your GitHub repo if overrides require their trust to be updated
@@ -2,154 +2,20 @@
2
2
  import json
3
3
  import logging
4
4
  import plistlib
5
- import subprocess
6
5
  import sys
7
6
  from concurrent.futures import ThreadPoolExecutor, as_completed
8
- from datetime import datetime
9
- from itertools import chain
10
7
  from pathlib import Path
11
8
 
12
9
  import autopkg_wrapper.utils.git_functions as git
10
+ from autopkg_wrapper.models.recipe import Recipe
13
11
  from autopkg_wrapper.notifier import slack
14
12
  from autopkg_wrapper.utils.args import setup_args
15
13
  from autopkg_wrapper.utils.logging import setup_logger
14
+ from autopkg_wrapper.utils.recipe_batching import build_recipe_batches, recipe_type_for
15
+ from autopkg_wrapper.utils.recipe_ordering import order_recipe_list
16
16
  from autopkg_wrapper.utils.report_processor import process_reports
17
17
 
18
18
 
19
- class Recipe(object):
20
- def __init__(self, name: str, post_processors: list = None):
21
- self.filename = name
22
- self.error = False
23
- self.results = {}
24
- self.updated = False
25
- self.verified = None
26
- self.pr_url = None
27
- self.post_processors = post_processors
28
-
29
- self._keys = None
30
- self._has_run = False
31
-
32
- @property
33
- def name(self):
34
- name = self.filename.split(".")[0]
35
-
36
- return name
37
-
38
- def verify_trust_info(self, args):
39
- verbose_output = ["-vvvv"] if args.debug else None
40
- prefs_file = (
41
- ["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else None
42
- )
43
- cmd = ["/usr/local/bin/autopkg", "verify-trust-info", self.filename]
44
- cmd = cmd + verbose_output if verbose_output else cmd
45
- cmd = cmd + prefs_file if prefs_file else cmd
46
- cmd = " ".join(cmd)
47
- logging.debug(f"cmd: {str(cmd)}")
48
-
49
- p = subprocess.Popen(
50
- cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
51
- )
52
- (output, err) = p.communicate()
53
- p_status = p.wait()
54
- if p_status == 0:
55
- self.verified = True
56
- else:
57
- err = err.decode()
58
- self.results["message"] = err
59
- self.verified = False
60
- return self.verified
61
-
62
- def update_trust_info(self, args):
63
- prefs_file = (
64
- ["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else None
65
- )
66
- cmd = ["/usr/local/bin/autopkg", "update-trust-info", self.filename]
67
- cmd = cmd + prefs_file if prefs_file else cmd
68
- cmd = " ".join(cmd)
69
- logging.debug(f"cmd: {str(cmd)}")
70
-
71
- # Fail loudly if this exits 0
72
- try:
73
- subprocess.check_call(cmd, shell=True)
74
- except subprocess.CalledProcessError as e:
75
- logging.error(e.stderr)
76
- raise e
77
-
78
- def _parse_report(self, report):
79
- with open(report, "rb") as f:
80
- report_data = plistlib.load(f)
81
-
82
- failed_items = report_data.get("failures", [])
83
- imported_items = []
84
- if report_data["summary_results"]:
85
- # This means something happened
86
- munki_results = report_data["summary_results"].get(
87
- "munki_importer_summary_result", {}
88
- )
89
- imported_items.extend(munki_results.get("data_rows", []))
90
-
91
- return {"imported": imported_items, "failed": failed_items}
92
-
93
- def run(self, args):
94
- if self.verified is False:
95
- self.error = True
96
- self.results["failed"] = True
97
- self.results["imported"] = ""
98
- else:
99
- report_dir = Path("/private/tmp/autopkg")
100
- report_time = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
101
- report_name = Path(f"{self.name}-{report_time}.plist")
102
-
103
- report_dir.mkdir(parents=True, exist_ok=True)
104
- report = report_dir / report_name
105
- report.touch(exist_ok=True)
106
-
107
- try:
108
- prefs_file = (
109
- ["--prefs", args.autopkg_prefs.as_posix()]
110
- if args.autopkg_prefs
111
- else None
112
- )
113
- verbose_output = ["-vvvv"] if args.debug else None
114
- post_processor_cmd = (
115
- list(
116
- chain.from_iterable(
117
- [
118
- ("--post", processor)
119
- for processor in self.post_processors
120
- ]
121
- )
122
- )
123
- if self.post_processors
124
- else None
125
- )
126
- cmd = [
127
- "/usr/local/bin/autopkg",
128
- "run",
129
- self.filename,
130
- "--report-plist",
131
- str(report),
132
- ]
133
- cmd = cmd + post_processor_cmd if post_processor_cmd else cmd
134
- cmd = cmd + verbose_output if verbose_output else cmd
135
- cmd = cmd + prefs_file if prefs_file else cmd
136
- cmd = " ".join(cmd)
137
-
138
- logging.debug(f"cmd: {str(cmd)}")
139
-
140
- subprocess.check_call(cmd, shell=True)
141
-
142
- except subprocess.CalledProcessError:
143
- self.error = True
144
-
145
- self._has_run = True
146
- self.results = self._parse_report(report)
147
- if not self.results["failed"] and not self.error:
148
- self.updated = True
149
-
150
- return self.results
151
-
152
-
153
19
  def get_override_repo_info(args):
154
20
  if args.overrides_repo_path:
155
21
  recipe_override_dirs = args.overrides_repo_path
@@ -246,7 +112,12 @@ def update_recipe_repo(recipe, git_info, disable_recipe_trust_check, args):
246
112
 
247
113
 
248
114
  def parse_recipe_list(recipes, recipe_file, post_processors, args):
249
- """Parsing list of recipes into a common format"""
115
+ """Parse recipe inputs into a common list of recipe names.
116
+
117
+ The arguments assume that `recipes` and `recipe_file` are mutually exclusive.
118
+ If `args.recipe_processing_order` is provided, the list is re-ordered before
119
+ creating `Recipe` objects.
120
+ """
250
121
  recipe_list = None
251
122
 
252
123
  logging.info(f"Recipes: {recipes}") if recipes else None
@@ -256,6 +127,12 @@ def parse_recipe_list(recipes, recipe_file, post_processors, args):
256
127
  if recipe_file.suffix == ".json":
257
128
  with open(recipe_file, "r") as f:
258
129
  recipe_list = json.load(f)
130
+ elif recipe_file.suffix in {".yaml", ".yml"}:
131
+ from ruamel.yaml import YAML
132
+
133
+ yaml = YAML(typ="safe")
134
+ with open(recipe_file, "r", encoding="utf-8") as f:
135
+ recipe_list = yaml.load(f)
259
136
  elif recipe_file.suffix == ".txt":
260
137
  with open(recipe_file, "r") as f:
261
138
  recipe_list = f.read().splitlines()
@@ -279,11 +156,16 @@ def parse_recipe_list(recipes, recipe_file, post_processors, args):
279
156
  """Please provide recipes to run via the following methods:
280
157
  --recipes recipe_one.download recipe_two.download
281
158
  --recipe-file path/to/recipe_list.json
282
- Comma separated list in the AUTOPKG_RECIPES env variable"""
159
+ Comma separated list in the AW_RECIPES env variable"""
283
160
  )
284
161
  sys.exit(1)
285
162
 
286
- logging.info(f"Processing the following recipes: {recipe_list}")
163
+ if args.recipe_processing_order:
164
+ recipe_list = order_recipe_list(
165
+ recipe_list=recipe_list, order=args.recipe_processing_order
166
+ )
167
+
168
+ logging.info(f"Processing {len(recipe_list)} recipes.")
287
169
  recipe_map = [Recipe(name, post_processors=post_processors) for name in recipe_list]
288
170
 
289
171
  return recipe_map
@@ -374,12 +256,29 @@ def main():
374
256
  # Git updates and notifications are applied serially after all recipes finish
375
257
  return r
376
258
 
377
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
378
- futures = [executor.submit(run_one, r) for r in recipe_list]
379
- for fut in as_completed(futures):
380
- r = fut.result()
381
- if r.error or r.results.get("failed"):
382
- failed_recipes.append(r)
259
+ if args.recipe_processing_order:
260
+ batches = build_recipe_batches(
261
+ recipe_list=recipe_list,
262
+ recipe_processing_order=args.recipe_processing_order,
263
+ )
264
+ for batch in batches:
265
+ batch_type = recipe_type_for(batch[0]) if batch else ""
266
+ logging.info(
267
+ f"Running {len(batch)} recipes for type={batch_type or 'unknown'}"
268
+ )
269
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
270
+ futures = [executor.submit(run_one, r) for r in batch]
271
+ for fut in as_completed(futures):
272
+ r = fut.result()
273
+ if r.error or r.results.get("failed"):
274
+ failed_recipes.append(r)
275
+ else:
276
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
277
+ futures = [executor.submit(run_one, r) for r in recipe_list]
278
+ for fut in as_completed(futures):
279
+ r = fut.result()
280
+ if r.error or r.results.get("failed"):
281
+ failed_recipes.append(r)
383
282
 
384
283
  # Apply git updates serially to avoid branch/commit conflicts when concurrency > 1
385
284
  for r in recipe_list:
@@ -0,0 +1,139 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import plistlib
5
+ import subprocess
6
+ from datetime import datetime
7
+ from itertools import chain
8
+ from pathlib import Path
9
+
10
+
11
+ class Recipe(object):
12
+ def __init__(self, name: str, post_processors: list = None):
13
+ self.filename = name
14
+ self.error = False
15
+ self.results = {}
16
+ self.updated = False
17
+ self.verified = None
18
+ self.pr_url = None
19
+ self.post_processors = post_processors
20
+
21
+ self._keys = None
22
+ self._has_run = False
23
+
24
+ @property
25
+ def name(self):
26
+ name = self.filename.split(".")[0]
27
+
28
+ return name
29
+
30
+ def verify_trust_info(self, args):
31
+ verbose_output = ["-vvvv"] if args.debug else []
32
+ prefs_file = (
33
+ ["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else []
34
+ )
35
+ autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
36
+ cmd = (
37
+ [autopkg_bin, "verify-trust-info", self.filename]
38
+ + verbose_output
39
+ + prefs_file
40
+ )
41
+ logging.debug(f"cmd: {cmd}")
42
+
43
+ result = subprocess.run(cmd, capture_output=True, text=True)
44
+ if result.returncode == 0:
45
+ self.verified = True
46
+ else:
47
+ self.results["message"] = (result.stderr or "").strip()
48
+ self.verified = False
49
+ return self.verified
50
+
51
+ def update_trust_info(self, args):
52
+ prefs_file = (
53
+ ["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else []
54
+ )
55
+ autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
56
+ cmd = [autopkg_bin, "update-trust-info", self.filename] + prefs_file
57
+ logging.debug(f"cmd: {cmd}")
58
+
59
+ # Fail loudly if this exits 0
60
+ try:
61
+ subprocess.check_call(cmd)
62
+ except subprocess.CalledProcessError as e:
63
+ logging.error(str(e))
64
+ raise e
65
+
66
+ def _parse_report(self, report):
67
+ with open(report, "rb") as f:
68
+ report_data = plistlib.load(f)
69
+
70
+ failed_items = report_data.get("failures", [])
71
+ imported_items = []
72
+ if report_data["summary_results"]:
73
+ # This means something happened
74
+ munki_results = report_data["summary_results"].get(
75
+ "munki_importer_summary_result", {}
76
+ )
77
+ imported_items.extend(munki_results.get("data_rows", []))
78
+
79
+ return {"imported": imported_items, "failed": failed_items}
80
+
81
+ def run(self, args):
82
+ if self.verified is False:
83
+ self.error = True
84
+ self.results["failed"] = True
85
+ self.results["imported"] = ""
86
+ else:
87
+ report_dir = Path("/private/tmp/autopkg")
88
+ report_time = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
89
+ report_name = Path(f"{self.name}-{report_time}.plist")
90
+
91
+ report_dir.mkdir(parents=True, exist_ok=True)
92
+ report = report_dir / report_name
93
+ report.touch(exist_ok=True)
94
+
95
+ try:
96
+ prefs_file = (
97
+ ["--prefs", args.autopkg_prefs.as_posix()]
98
+ if args.autopkg_prefs
99
+ else []
100
+ )
101
+ verbose_output = ["-vvvv"] if args.debug else []
102
+ post_processor_cmd = (
103
+ list(
104
+ chain.from_iterable(
105
+ [
106
+ ("--post", processor)
107
+ for processor in self.post_processors
108
+ ]
109
+ )
110
+ )
111
+ if self.post_processors
112
+ else []
113
+ )
114
+ autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
115
+ cmd = (
116
+ [autopkg_bin, "run", self.filename, "--report-plist", report]
117
+ + verbose_output
118
+ + prefs_file
119
+ + post_processor_cmd
120
+ )
121
+ logging.debug(f"cmd: {cmd}")
122
+
123
+ result = subprocess.run(cmd, capture_output=True, text=True)
124
+ if result.returncode == 0:
125
+ report_info = self._parse_report(report)
126
+ self.results = report_info
127
+ else:
128
+ self.error = True
129
+ self.results["failed"] = True
130
+ self.results["message"] = (result.stderr or "").strip()
131
+ self.results["imported"] = ""
132
+ except Exception as e: # pylint: disable=broad-exception-caught
133
+ logging.error(f"Recipe run failed: {e}")
134
+ self.error = True
135
+ self.results["failed"] = True
136
+ self.results["message"] = (result.stderr or "").strip()
137
+ self.results["imported"] = ""
138
+
139
+ return self
@@ -5,7 +5,7 @@ import requests
5
5
 
6
6
 
7
7
  def send_notification(recipe, token):
8
- logging.debug("Skipping Slack notification as DEBUG is enabled!")
8
+ logging.debug("Preparing Slack notification")
9
9
 
10
10
  if token is None:
11
11
  logging.error("Skipping Slack Notification as no SLACK_WEBHOOK_TOKEN defined!")
@@ -68,6 +68,33 @@ def setup_args():
68
68
  `autopkg-wrapper`
69
69
  """,
70
70
  )
71
+ parser.add_argument(
72
+ "--recipe-processing-order",
73
+ nargs="*",
74
+ default=os.getenv("AW_RECIPE_PROCESSING_ORDER", None),
75
+ help="""
76
+ This option comes in handy if you include additional recipe type names in your overrides and wish them to be processed in a specific order.
77
+ We'll specifically look for these recipe types after the first period (.) in the recipe name.
78
+ Order items can be either a full type suffix (e.g. "upload.jamf") or a partial token (e.g. "upload", "auto_update").
79
+ Partial tokens are matched against the dot-separated segments after the first '.' so recipes like "Foo.epz.auto_update.jamf" will match "auto_update".
80
+ This can also be provided via the 'AW_RECIPE_PROCESSING_ORDER' environment variable as a comma-separated list (e.g. "upload,self_service,auto_update").
81
+ For example, if you have the following recipes to be processed:
82
+ ExampleApp.auto_install.jamf
83
+ ExampleApp.upload.jamf
84
+ ExampleApp.self_service.jamf
85
+ And you want to ensure that the .upload recipes are always processed first, followed by .auto_install, and finally .self_service, you would provide the following processing order:
86
+ `--recipe-processing-order upload.jamf auto_install.jamf self_service.jamf`
87
+ This would ensure that all .upload recipes are processed before any other recipe types.
88
+ Within each recipe type, the recipes will be ordered alphabetically.
89
+ We assume that no extensions are provided (but will strip them if needed - extensions that are stripped include .recipe or .recipe.yaml).
90
+ """,
91
+ )
92
+ parser.add_argument(
93
+ "--autopkg-bin",
94
+ default=os.getenv("AW_AUTOPKG_BIN", "/usr/local/bin/autopkg"),
95
+ help="Path to the autopkg binary (default: /usr/local/bin/autopkg). Can also be set via AW_AUTOPKG_BIN.",
96
+ )
97
+
71
98
  parser.add_argument(
72
99
  "--debug",
73
100
  default=validate_bool(os.getenv("AW_DEBUG", False)),
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ import re
2
3
  import subprocess
3
4
  from datetime import datetime
4
5
 
@@ -21,12 +22,26 @@ def git_run(*args):
21
22
 
22
23
 
23
24
  def get_repo_info(override_repo_git_git_dir):
24
- repo_url = (
25
- git_run(override_repo_git_git_dir, "config", "--get", "remote.origin.url")
26
- .stdout.strip()
27
- .split(".git")[0]
25
+ remote = git_run(
26
+ override_repo_git_git_dir, "config", "--get", "remote.origin.url"
27
+ ).stdout.strip()
28
+
29
+ # Supports:
30
+ # - https://github.com/<owner>/<repo>.git
31
+ # - git@github.com:<owner>/<repo>.git
32
+ # - ssh://git@github.com/<owner>/<repo>.git
33
+ m = re.search(
34
+ r"github\.com[:/](?P<owner>[^/]+)/(?P<repo>[^\s/]+?)(?:\.git)?$",
35
+ remote,
36
+ flags=re.IGNORECASE,
28
37
  )
29
- remote_repo_ref = repo_url.split("https://github.com/")[1]
38
+ if not m:
39
+ raise ValueError(f"Unsupported Git remote URL: {remote}")
40
+
41
+ owner = m.group("owner")
42
+ repo = m.group("repo")
43
+ remote_repo_ref = f"{owner}/{repo}"
44
+ repo_url = f"https://github.com/{remote_repo_ref}"
30
45
 
31
46
  logging.debug(f"Repo URL: {repo_url}")
32
47
  logging.debug(f"Remote Repo Ref: {remote_repo_ref}")
@@ -0,0 +1,41 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Iterable, Protocol, TypeVar
4
+
5
+
6
+ class HasFilename(Protocol):
7
+ filename: str
8
+
9
+
10
+ T = TypeVar("T", bound=HasFilename)
11
+
12
+
13
+ def recipe_type_for(recipe: HasFilename) -> str:
14
+ parts = recipe.filename.split(".", 1)
15
+ return parts[1] if len(parts) == 2 else ""
16
+
17
+
18
+ def build_recipe_batches(
19
+ recipe_list: Iterable[T], recipe_processing_order
20
+ ) -> list[list[T]]:
21
+ recipe_list = list(recipe_list)
22
+ if not recipe_list:
23
+ return []
24
+ if not recipe_processing_order:
25
+ return [recipe_list]
26
+
27
+ batches: list[list[T]] = []
28
+ current_batch: list[T] = []
29
+ current_type = None
30
+ for recipe in recipe_list:
31
+ r_type = recipe_type_for(recipe)
32
+ if current_type is None or r_type == current_type:
33
+ current_batch.append(recipe)
34
+ current_type = r_type
35
+ continue
36
+ batches.append(current_batch)
37
+ current_batch = [recipe]
38
+ current_type = r_type
39
+ if current_batch:
40
+ batches.append(current_batch)
41
+ return batches