autopkg-wrapper 2025.11.1__py3-none-any.whl → 2026.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autopkg_wrapper/autopkg_wrapper.py +103 -52
- autopkg_wrapper/notifier/slack.py +1 -1
- autopkg_wrapper/utils/args.py +70 -0
- autopkg_wrapper/utils/git_functions.py +20 -5
- autopkg_wrapper/utils/recipe_ordering.py +149 -0
- autopkg_wrapper/utils/report_processor.py +674 -0
- autopkg_wrapper-2026.2.5.dist-info/METADATA +107 -0
- autopkg_wrapper-2026.2.5.dist-info/RECORD +15 -0
- {autopkg_wrapper-2025.11.1.dist-info → autopkg_wrapper-2026.2.5.dist-info}/WHEEL +1 -1
- autopkg_wrapper-2025.11.1.dist-info/METADATA +0 -54
- autopkg_wrapper-2025.11.1.dist-info/RECORD +0 -13
- {autopkg_wrapper-2025.11.1.dist-info → autopkg_wrapper-2026.2.5.dist-info}/entry_points.txt +0 -0
- {autopkg_wrapper-2025.11.1.dist-info → autopkg_wrapper-2026.2.5.dist-info}/licenses/LICENSE +0 -0
|
@@ -4,6 +4,7 @@ import logging
|
|
|
4
4
|
import plistlib
|
|
5
5
|
import subprocess
|
|
6
6
|
import sys
|
|
7
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
7
8
|
from datetime import datetime
|
|
8
9
|
from itertools import chain
|
|
9
10
|
from pathlib import Path
|
|
@@ -12,6 +13,8 @@ import autopkg_wrapper.utils.git_functions as git
|
|
|
12
13
|
from autopkg_wrapper.notifier import slack
|
|
13
14
|
from autopkg_wrapper.utils.args import setup_args
|
|
14
15
|
from autopkg_wrapper.utils.logging import setup_logger
|
|
16
|
+
from autopkg_wrapper.utils.recipe_ordering import order_recipe_list
|
|
17
|
+
from autopkg_wrapper.utils.report_processor import process_reports
|
|
15
18
|
|
|
16
19
|
|
|
17
20
|
class Recipe(object):
|
|
@@ -34,43 +37,39 @@ class Recipe(object):
|
|
|
34
37
|
return name
|
|
35
38
|
|
|
36
39
|
def verify_trust_info(self, args):
|
|
37
|
-
verbose_output = ["-vvvv"] if args.debug else
|
|
40
|
+
verbose_output = ["-vvvv"] if args.debug else []
|
|
38
41
|
prefs_file = (
|
|
39
|
-
["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else
|
|
42
|
+
["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else []
|
|
40
43
|
)
|
|
41
|
-
|
|
42
|
-
cmd =
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
p = subprocess.Popen(
|
|
48
|
-
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
|
|
44
|
+
autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
|
|
45
|
+
cmd = (
|
|
46
|
+
[autopkg_bin, "verify-trust-info", self.filename]
|
|
47
|
+
+ verbose_output
|
|
48
|
+
+ prefs_file
|
|
49
49
|
)
|
|
50
|
-
(
|
|
51
|
-
|
|
52
|
-
|
|
50
|
+
logging.debug(f"cmd: {cmd}")
|
|
51
|
+
|
|
52
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
53
|
+
if result.returncode == 0:
|
|
53
54
|
self.verified = True
|
|
54
55
|
else:
|
|
55
|
-
|
|
56
|
-
self.results["message"] = err
|
|
56
|
+
self.results["message"] = (result.stderr or "").strip()
|
|
57
57
|
self.verified = False
|
|
58
58
|
return self.verified
|
|
59
59
|
|
|
60
60
|
def update_trust_info(self, args):
|
|
61
61
|
prefs_file = (
|
|
62
|
-
["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else
|
|
62
|
+
["--prefs", args.autopkg_prefs.as_posix()] if args.autopkg_prefs else []
|
|
63
63
|
)
|
|
64
|
-
|
|
65
|
-
cmd =
|
|
66
|
-
cmd
|
|
67
|
-
logging.debug(f"cmd: {str(cmd)}")
|
|
64
|
+
autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
|
|
65
|
+
cmd = [autopkg_bin, "update-trust-info", self.filename] + prefs_file
|
|
66
|
+
logging.debug(f"cmd: {cmd}")
|
|
68
67
|
|
|
69
68
|
# Fail loudly if this exits 0
|
|
70
69
|
try:
|
|
71
|
-
subprocess.check_call(cmd
|
|
70
|
+
subprocess.check_call(cmd)
|
|
72
71
|
except subprocess.CalledProcessError as e:
|
|
73
|
-
logging.error(e
|
|
72
|
+
logging.error(str(e))
|
|
74
73
|
raise e
|
|
75
74
|
|
|
76
75
|
def _parse_report(self, report):
|
|
@@ -94,7 +93,7 @@ class Recipe(object):
|
|
|
94
93
|
self.results["failed"] = True
|
|
95
94
|
self.results["imported"] = ""
|
|
96
95
|
else:
|
|
97
|
-
report_dir = Path("/tmp/autopkg")
|
|
96
|
+
report_dir = Path("/private/tmp/autopkg")
|
|
98
97
|
report_time = datetime.now().strftime("%Y-%m-%dT%H-%M-%S")
|
|
99
98
|
report_name = Path(f"{self.name}-{report_time}.plist")
|
|
100
99
|
|
|
@@ -106,9 +105,9 @@ class Recipe(object):
|
|
|
106
105
|
prefs_file = (
|
|
107
106
|
["--prefs", args.autopkg_prefs.as_posix()]
|
|
108
107
|
if args.autopkg_prefs
|
|
109
|
-
else
|
|
108
|
+
else []
|
|
110
109
|
)
|
|
111
|
-
verbose_output = ["-vvvv"] if args.debug else
|
|
110
|
+
verbose_output = ["-vvvv"] if args.debug else []
|
|
112
111
|
post_processor_cmd = (
|
|
113
112
|
list(
|
|
114
113
|
chain.from_iterable(
|
|
@@ -119,25 +118,25 @@ class Recipe(object):
|
|
|
119
118
|
)
|
|
120
119
|
)
|
|
121
120
|
if self.post_processors
|
|
122
|
-
else
|
|
121
|
+
else []
|
|
123
122
|
)
|
|
123
|
+
autopkg_bin = getattr(args, "autopkg_bin", "/usr/local/bin/autopkg")
|
|
124
124
|
cmd = [
|
|
125
|
-
|
|
125
|
+
autopkg_bin,
|
|
126
126
|
"run",
|
|
127
127
|
self.filename,
|
|
128
128
|
"--report-plist",
|
|
129
129
|
str(report),
|
|
130
130
|
]
|
|
131
|
-
cmd = cmd + post_processor_cmd
|
|
132
|
-
cmd = cmd + verbose_output if verbose_output else cmd
|
|
133
|
-
cmd = cmd + prefs_file if prefs_file else cmd
|
|
134
|
-
cmd = " ".join(cmd)
|
|
131
|
+
cmd = cmd + post_processor_cmd + verbose_output + prefs_file
|
|
135
132
|
|
|
136
|
-
logging.debug(f"cmd: {
|
|
133
|
+
logging.debug(f"cmd: {cmd}")
|
|
137
134
|
|
|
138
|
-
subprocess.
|
|
135
|
+
result = subprocess.run(cmd, capture_output=True, text=True)
|
|
136
|
+
if result.returncode != 0:
|
|
137
|
+
self.error = True
|
|
139
138
|
|
|
140
|
-
except
|
|
139
|
+
except Exception:
|
|
141
140
|
self.error = True
|
|
142
141
|
|
|
143
142
|
self._has_run = True
|
|
@@ -244,7 +243,12 @@ def update_recipe_repo(recipe, git_info, disable_recipe_trust_check, args):
|
|
|
244
243
|
|
|
245
244
|
|
|
246
245
|
def parse_recipe_list(recipes, recipe_file, post_processors, args):
|
|
247
|
-
"""
|
|
246
|
+
"""Parse recipe inputs into a common list of recipe names.
|
|
247
|
+
|
|
248
|
+
The arguments assume that `recipes` and `recipe_file` are mutually exclusive.
|
|
249
|
+
If `args.recipe_processing_order` is provided, the list is re-ordered before
|
|
250
|
+
creating `Recipe` objects.
|
|
251
|
+
"""
|
|
248
252
|
recipe_list = None
|
|
249
253
|
|
|
250
254
|
logging.info(f"Recipes: {recipes}") if recipes else None
|
|
@@ -254,6 +258,12 @@ def parse_recipe_list(recipes, recipe_file, post_processors, args):
|
|
|
254
258
|
if recipe_file.suffix == ".json":
|
|
255
259
|
with open(recipe_file, "r") as f:
|
|
256
260
|
recipe_list = json.load(f)
|
|
261
|
+
elif recipe_file.suffix in {".yaml", ".yml"}:
|
|
262
|
+
from ruamel.yaml import YAML
|
|
263
|
+
|
|
264
|
+
yaml = YAML(typ="safe")
|
|
265
|
+
with open(recipe_file, "r", encoding="utf-8") as f:
|
|
266
|
+
recipe_list = yaml.load(f)
|
|
257
267
|
elif recipe_file.suffix == ".txt":
|
|
258
268
|
with open(recipe_file, "r") as f:
|
|
259
269
|
recipe_list = f.read().splitlines()
|
|
@@ -277,11 +287,16 @@ def parse_recipe_list(recipes, recipe_file, post_processors, args):
|
|
|
277
287
|
"""Please provide recipes to run via the following methods:
|
|
278
288
|
--recipes recipe_one.download recipe_two.download
|
|
279
289
|
--recipe-file path/to/recipe_list.json
|
|
280
|
-
Comma separated list in the
|
|
290
|
+
Comma separated list in the AW_RECIPES env variable"""
|
|
281
291
|
)
|
|
282
292
|
sys.exit(1)
|
|
283
293
|
|
|
284
|
-
|
|
294
|
+
if args.recipe_processing_order:
|
|
295
|
+
recipe_list = order_recipe_list(
|
|
296
|
+
recipe_list=recipe_list, order=args.recipe_processing_order
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
logging.info(f"Processing {len(recipe_list)} recipes.")
|
|
285
300
|
recipe_map = [Recipe(name, post_processors=post_processors) for name in recipe_list]
|
|
286
301
|
|
|
287
302
|
return recipe_map
|
|
@@ -358,31 +373,50 @@ def main():
|
|
|
358
373
|
|
|
359
374
|
failed_recipes = []
|
|
360
375
|
|
|
361
|
-
|
|
362
|
-
|
|
376
|
+
# Run recipes concurrently using a thread pool to parallelize subprocess calls
|
|
377
|
+
max_workers = max(1, int(getattr(args, "concurrency", 1)))
|
|
378
|
+
logging.info(f"Running recipes with concurrency={max_workers}")
|
|
379
|
+
|
|
380
|
+
def run_one(r: Recipe):
|
|
381
|
+
logging.info(f"Processing Recipe: {r.name}")
|
|
363
382
|
process_recipe(
|
|
364
|
-
recipe=
|
|
383
|
+
recipe=r,
|
|
365
384
|
disable_recipe_trust_check=args.disable_recipe_trust_check,
|
|
366
385
|
args=args,
|
|
367
386
|
)
|
|
387
|
+
# Git updates and notifications are applied serially after all recipes finish
|
|
388
|
+
return r
|
|
389
|
+
|
|
390
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
391
|
+
futures = [executor.submit(run_one, r) for r in recipe_list]
|
|
392
|
+
for fut in as_completed(futures):
|
|
393
|
+
r = fut.result()
|
|
394
|
+
if r.error or r.results.get("failed"):
|
|
395
|
+
failed_recipes.append(r)
|
|
396
|
+
|
|
397
|
+
# Apply git updates serially to avoid branch/commit conflicts when concurrency > 1
|
|
398
|
+
for r in recipe_list:
|
|
368
399
|
update_recipe_repo(
|
|
369
400
|
git_info=override_repo_info,
|
|
370
|
-
recipe=
|
|
401
|
+
recipe=r,
|
|
371
402
|
disable_recipe_trust_check=args.disable_recipe_trust_check,
|
|
372
403
|
args=args,
|
|
373
404
|
)
|
|
374
|
-
slack.send_notification(
|
|
375
|
-
recipe=recipe, token=args.slack_token
|
|
376
|
-
) if args.slack_token else None
|
|
377
405
|
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
406
|
+
# Send notifications serially to simplify rate limiting and ordering
|
|
407
|
+
if args.slack_token:
|
|
408
|
+
for r in recipe_list:
|
|
409
|
+
slack.send_notification(recipe=r, token=args.slack_token)
|
|
410
|
+
|
|
411
|
+
# Optionally open a PR for updated trust information
|
|
412
|
+
if args.create_pr and recipe_list:
|
|
413
|
+
# Choose a representative recipe for the PR title/body
|
|
414
|
+
rep_recipe = next(
|
|
415
|
+
(r for r in recipe_list if r.updated is True or r.verified is False),
|
|
416
|
+
recipe_list[0],
|
|
417
|
+
)
|
|
418
|
+
pr_url = git.create_pull_request(git_info=override_repo_info, recipe=rep_recipe)
|
|
419
|
+
logging.info(f"Created Pull Request for trust info updates: {pr_url}")
|
|
386
420
|
|
|
387
421
|
# Create GitHub issue for failed recipes
|
|
388
422
|
if args.create_issues and failed_recipes and args.github_token:
|
|
@@ -390,3 +424,20 @@ def main():
|
|
|
390
424
|
git_info=override_repo_info, failed_recipes=failed_recipes
|
|
391
425
|
)
|
|
392
426
|
logging.info(f"Created GitHub issue for failed recipes: {issue_url}")
|
|
427
|
+
|
|
428
|
+
# Optionally process reports after running recipes
|
|
429
|
+
if getattr(args, "process_reports", False):
|
|
430
|
+
rc = process_reports(
|
|
431
|
+
zip_file=getattr(args, "reports_zip", None),
|
|
432
|
+
extract_dir=getattr(
|
|
433
|
+
args, "reports_extract_dir", "autopkg_reports_summary/reports"
|
|
434
|
+
),
|
|
435
|
+
reports_dir=(getattr(args, "reports_dir", None) or "/private/tmp/autopkg"),
|
|
436
|
+
environment="",
|
|
437
|
+
run_date=getattr(args, "reports_run_date", ""),
|
|
438
|
+
out_dir=getattr(args, "reports_out_dir", "autopkg_reports_summary/summary"),
|
|
439
|
+
debug=bool(getattr(args, "debug", False)),
|
|
440
|
+
strict=bool(getattr(args, "reports_strict", False)),
|
|
441
|
+
)
|
|
442
|
+
if rc:
|
|
443
|
+
sys.exit(rc)
|
|
@@ -5,7 +5,7 @@ import requests
|
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
def send_notification(recipe, token):
|
|
8
|
-
logging.debug("
|
|
8
|
+
logging.debug("Preparing Slack notification")
|
|
9
9
|
|
|
10
10
|
if token is None:
|
|
11
11
|
logging.error("Skipping Slack Notification as no SLACK_WEBHOOK_TOKEN defined!")
|
autopkg_wrapper/utils/args.py
CHANGED
|
@@ -68,6 +68,33 @@ def setup_args():
|
|
|
68
68
|
`autopkg-wrapper`
|
|
69
69
|
""",
|
|
70
70
|
)
|
|
71
|
+
parser.add_argument(
|
|
72
|
+
"--recipe-processing-order",
|
|
73
|
+
nargs="*",
|
|
74
|
+
default=os.getenv("AW_RECIPE_PROCESSING_ORDER", None),
|
|
75
|
+
help="""
|
|
76
|
+
This option comes in handy if you include additional recipe type names in your overrides and wish them to be processed in a specific order.
|
|
77
|
+
We'll specifically look for these recipe types after the first period (.) in the recipe name.
|
|
78
|
+
Order items can be either a full type suffix (e.g. "upload.jamf") or a partial token (e.g. "upload", "auto_update").
|
|
79
|
+
Partial tokens are matched against the dot-separated segments after the first '.' so recipes like "Foo.epz.auto_update.jamf" will match "auto_update".
|
|
80
|
+
This can also be provided via the 'AW_RECIPE_PROCESSING_ORDER' environment variable as a comma-separated list (e.g. "upload,self_service,auto_update").
|
|
81
|
+
For example, if you have the following recipes to be processed:
|
|
82
|
+
ExampleApp.auto_install.jamf
|
|
83
|
+
ExampleApp.upload.jamf
|
|
84
|
+
ExampleApp.self_service.jamf
|
|
85
|
+
And you want to ensure that the .upload recipes are always processed first, followed by .auto_install, and finally .self_service, you would provide the following processing order:
|
|
86
|
+
`--recipe-processing-order upload.jamf auto_install.jamf self_service.jamf`
|
|
87
|
+
This would ensure that all .upload recipes are processed before any other recipe types.
|
|
88
|
+
Within each recipe type, the recipes will be ordered alphabetically.
|
|
89
|
+
We assume that no extensions are provided (but will strip them if needed - extensions that are stripped include .recipe or .recipe.yaml).
|
|
90
|
+
""",
|
|
91
|
+
)
|
|
92
|
+
parser.add_argument(
|
|
93
|
+
"--autopkg-bin",
|
|
94
|
+
default=os.getenv("AW_AUTOPKG_BIN", "/usr/local/bin/autopkg"),
|
|
95
|
+
help="Path to the autopkg binary (default: /usr/local/bin/autopkg). Can also be set via AW_AUTOPKG_BIN.",
|
|
96
|
+
)
|
|
97
|
+
|
|
71
98
|
parser.add_argument(
|
|
72
99
|
"--debug",
|
|
73
100
|
default=validate_bool(os.getenv("AW_DEBUG", False)),
|
|
@@ -90,6 +117,12 @@ def setup_args():
|
|
|
90
117
|
If this option is used, git commands won't be run
|
|
91
118
|
""",
|
|
92
119
|
)
|
|
120
|
+
parser.add_argument(
|
|
121
|
+
"--concurrency",
|
|
122
|
+
type=int,
|
|
123
|
+
default=int(os.getenv("AW_CONCURRENCY", "1")),
|
|
124
|
+
help="Number of recipes to run in parallel (default: 1)",
|
|
125
|
+
)
|
|
93
126
|
parser.add_argument(
|
|
94
127
|
"--slack-token",
|
|
95
128
|
default=os.getenv("SLACK_WEBHOOK_TOKEN", None),
|
|
@@ -144,4 +177,41 @@ def setup_args():
|
|
|
144
177
|
""",
|
|
145
178
|
)
|
|
146
179
|
|
|
180
|
+
# Report processing options
|
|
181
|
+
parser.add_argument(
|
|
182
|
+
"--process-reports",
|
|
183
|
+
action="store_true",
|
|
184
|
+
help="Process autopkg report directories or zip and emit markdown summaries",
|
|
185
|
+
)
|
|
186
|
+
parser.add_argument(
|
|
187
|
+
"--reports-zip",
|
|
188
|
+
default=os.getenv("AW_REPORTS_ZIP", None),
|
|
189
|
+
help="Path to an autopkg_report-*.zip to extract and process",
|
|
190
|
+
)
|
|
191
|
+
parser.add_argument(
|
|
192
|
+
"--reports-extract-dir",
|
|
193
|
+
default=os.getenv("AW_REPORTS_EXTRACT_DIR", "autopkg_reports_summary/reports"),
|
|
194
|
+
help="Directory to extract the zip into (default: autopkg_reports_summary/reports)",
|
|
195
|
+
)
|
|
196
|
+
parser.add_argument(
|
|
197
|
+
"--reports-dir",
|
|
198
|
+
default=os.getenv("AW_REPORTS_DIR", None),
|
|
199
|
+
help="Directory of reports to process (if no zip provided)",
|
|
200
|
+
)
|
|
201
|
+
parser.add_argument(
|
|
202
|
+
"--reports-out-dir",
|
|
203
|
+
default=os.getenv("AW_REPORTS_OUT_DIR", "autopkg_reports_summary/summary"),
|
|
204
|
+
help="Directory to write markdown outputs (default: autopkg_reports_summary/summary)",
|
|
205
|
+
)
|
|
206
|
+
parser.add_argument(
|
|
207
|
+
"--reports-run-date",
|
|
208
|
+
default=os.getenv("AW_REPORTS_RUN_DATE", ""),
|
|
209
|
+
help="Run date string to include in the summary",
|
|
210
|
+
)
|
|
211
|
+
parser.add_argument(
|
|
212
|
+
"--reports-strict",
|
|
213
|
+
action="store_true",
|
|
214
|
+
help="Exit non-zero if any errors are detected in processed reports",
|
|
215
|
+
)
|
|
216
|
+
|
|
147
217
|
return parser.parse_args()
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import logging
|
|
2
|
+
import re
|
|
2
3
|
import subprocess
|
|
3
4
|
from datetime import datetime
|
|
4
5
|
|
|
@@ -21,12 +22,26 @@ def git_run(*args):
|
|
|
21
22
|
|
|
22
23
|
|
|
23
24
|
def get_repo_info(override_repo_git_git_dir):
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
25
|
+
remote = git_run(
|
|
26
|
+
override_repo_git_git_dir, "config", "--get", "remote.origin.url"
|
|
27
|
+
).stdout.strip()
|
|
28
|
+
|
|
29
|
+
# Supports:
|
|
30
|
+
# - https://github.com/<owner>/<repo>.git
|
|
31
|
+
# - git@github.com:<owner>/<repo>.git
|
|
32
|
+
# - ssh://git@github.com/<owner>/<repo>.git
|
|
33
|
+
m = re.search(
|
|
34
|
+
r"github\.com[:/](?P<owner>[^/]+)/(?P<repo>[^\s/]+?)(?:\.git)?$",
|
|
35
|
+
remote,
|
|
36
|
+
flags=re.IGNORECASE,
|
|
28
37
|
)
|
|
29
|
-
|
|
38
|
+
if not m:
|
|
39
|
+
raise ValueError(f"Unsupported Git remote URL: {remote}")
|
|
40
|
+
|
|
41
|
+
owner = m.group("owner")
|
|
42
|
+
repo = m.group("repo")
|
|
43
|
+
remote_repo_ref = f"{owner}/{repo}"
|
|
44
|
+
repo_url = f"https://github.com/{remote_repo_ref}"
|
|
30
45
|
|
|
31
46
|
logging.debug(f"Repo URL: {repo_url}")
|
|
32
47
|
logging.debug(f"Remote Repo Ref: {remote_repo_ref}")
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def order_recipe_list(recipe_list, order):
|
|
5
|
+
# This option comes in handy if you include additional recipe type names in your overrides and wish them to be processed in a specific order.
|
|
6
|
+
# We'll specifically look for these recipe types after the first period (.) in the recipe name.
|
|
7
|
+
# For example, if you have the following recipes to be processed:
|
|
8
|
+
# ExampleApp.auto_install.jamf
|
|
9
|
+
# ExampleApp.upload.jamf
|
|
10
|
+
# ExampleApp.self_service.jamf
|
|
11
|
+
# And you want to ensure that the .upload recipes are always processed first, followed by .auto_install, and finally .self_service, you would provide the following processing order:
|
|
12
|
+
# `--recipe-processing-order upload.jamf auto_install.jamf self_service.jamf`
|
|
13
|
+
# This would ensure that all .upload recipes are processed before any other recipe types.
|
|
14
|
+
# Within each recipe type, the recipes will be ordered alphabetically.
|
|
15
|
+
# We assume that no extensions are provided (but will strip them if needed - extensions that are stripped include .recipe or .recipe.yaml).
|
|
16
|
+
|
|
17
|
+
def strip_known_extensions(value: str) -> str:
|
|
18
|
+
value = value.strip()
|
|
19
|
+
if value.endswith(".recipe.yaml"):
|
|
20
|
+
return value[: -len(".recipe.yaml")]
|
|
21
|
+
if value.endswith(".recipe"):
|
|
22
|
+
return value[: -len(".recipe")]
|
|
23
|
+
return value
|
|
24
|
+
|
|
25
|
+
def normalise_processing_order(value):
|
|
26
|
+
if not value:
|
|
27
|
+
return []
|
|
28
|
+
|
|
29
|
+
items: list[str] = []
|
|
30
|
+
if isinstance(value, str):
|
|
31
|
+
raw = value.strip()
|
|
32
|
+
if not raw:
|
|
33
|
+
return []
|
|
34
|
+
# String values generally come from env var defaults; treat as comma-separated.
|
|
35
|
+
items = [v.strip() for v in raw.split(",")]
|
|
36
|
+
else:
|
|
37
|
+
# argparse typically provides a list here, but env var defaults can leak through.
|
|
38
|
+
for v in value:
|
|
39
|
+
if v is None:
|
|
40
|
+
continue
|
|
41
|
+
v = str(v).strip()
|
|
42
|
+
if not v:
|
|
43
|
+
continue
|
|
44
|
+
if "," in v:
|
|
45
|
+
items.extend([p.strip() for p in v.split(",")])
|
|
46
|
+
else:
|
|
47
|
+
items.append(v)
|
|
48
|
+
|
|
49
|
+
normalised: list[str] = []
|
|
50
|
+
seen: set[str] = set()
|
|
51
|
+
for item in items:
|
|
52
|
+
if not item:
|
|
53
|
+
continue
|
|
54
|
+
item = item.lstrip(".")
|
|
55
|
+
item = strip_known_extensions(item)
|
|
56
|
+
if not item or item in seen:
|
|
57
|
+
continue
|
|
58
|
+
seen.add(item)
|
|
59
|
+
normalised.append(item)
|
|
60
|
+
return normalised
|
|
61
|
+
|
|
62
|
+
def recipe_type(recipe_name: str) -> str:
|
|
63
|
+
# Type is everything after the first '.' (e.g. Example.upload.jamf -> upload.jamf)
|
|
64
|
+
parts = recipe_name.split(".", 1)
|
|
65
|
+
return parts[1] if len(parts) == 2 else ""
|
|
66
|
+
|
|
67
|
+
def recipe_segments_after_first_dot(recipe_name: str) -> list[str]:
|
|
68
|
+
after_first = recipe_type(recipe_name)
|
|
69
|
+
return [p for p in after_first.split(".") if p] if after_first else []
|
|
70
|
+
|
|
71
|
+
def pattern_matches_segments(pattern: str, segments: list[str]) -> bool:
|
|
72
|
+
# Pattern can be a single token ("auto_update") or a dot-separated sequence
|
|
73
|
+
# ("upload.jamf", "auto_update.jamf", etc.).
|
|
74
|
+
if not pattern:
|
|
75
|
+
return False
|
|
76
|
+
pattern_parts = [p for p in pattern.split(".") if p]
|
|
77
|
+
if not pattern_parts:
|
|
78
|
+
return False
|
|
79
|
+
|
|
80
|
+
# Case-insensitive matching.
|
|
81
|
+
segments_norm = [s.casefold() for s in segments]
|
|
82
|
+
pattern_parts_norm = [p.casefold() for p in pattern_parts]
|
|
83
|
+
|
|
84
|
+
if len(pattern_parts_norm) == 1:
|
|
85
|
+
return pattern_parts_norm[0] in segments_norm
|
|
86
|
+
|
|
87
|
+
# Contiguous subsequence match.
|
|
88
|
+
for start in range(0, len(segments_norm) - len(pattern_parts_norm) + 1):
|
|
89
|
+
if (
|
|
90
|
+
segments_norm[start : start + len(pattern_parts_norm)]
|
|
91
|
+
== pattern_parts_norm
|
|
92
|
+
):
|
|
93
|
+
return True
|
|
94
|
+
return False
|
|
95
|
+
|
|
96
|
+
if not recipe_list:
|
|
97
|
+
return recipe_list
|
|
98
|
+
|
|
99
|
+
normalised_order = normalise_processing_order(order)
|
|
100
|
+
|
|
101
|
+
# If the provided order contains no usable tokens, do not re-order.
|
|
102
|
+
# (We still strip known extensions, which is order-preserving.)
|
|
103
|
+
if not normalised_order:
|
|
104
|
+
return [
|
|
105
|
+
strip_known_extensions(str(r).strip()) for r in recipe_list if r is not None
|
|
106
|
+
]
|
|
107
|
+
|
|
108
|
+
# First, normalise recipe names by stripping known extensions.
|
|
109
|
+
normalised_recipes: list[str] = []
|
|
110
|
+
for r in recipe_list:
|
|
111
|
+
if r is None:
|
|
112
|
+
continue
|
|
113
|
+
normalised_recipes.append(strip_known_extensions(str(r).strip()))
|
|
114
|
+
|
|
115
|
+
# If a processing order is supplied, match each recipe to the *first* pattern it satisfies.
|
|
116
|
+
# This supports both direct matches ("upload.jamf") and partial matches ("upload",
|
|
117
|
+
# "auto_update") against dot-separated segments after the first '.' in the recipe name.
|
|
118
|
+
pattern_groups: dict[str, list[str]] = {p: [] for p in normalised_order}
|
|
119
|
+
unmatched: list[str] = []
|
|
120
|
+
|
|
121
|
+
for r in normalised_recipes:
|
|
122
|
+
segments = recipe_segments_after_first_dot(r)
|
|
123
|
+
matched = False
|
|
124
|
+
for p in normalised_order:
|
|
125
|
+
if pattern_matches_segments(p, segments):
|
|
126
|
+
pattern_groups[p].append(r)
|
|
127
|
+
matched = True
|
|
128
|
+
break
|
|
129
|
+
if not matched:
|
|
130
|
+
unmatched.append(r)
|
|
131
|
+
|
|
132
|
+
ordered: list[str] = []
|
|
133
|
+
for p in normalised_order:
|
|
134
|
+
ordered.extend(sorted(pattern_groups[p], key=str.casefold))
|
|
135
|
+
|
|
136
|
+
# Remaining recipes: group by their full type string and order groups alphabetically,
|
|
137
|
+
# with empty-type last.
|
|
138
|
+
groups: dict[str, list[str]] = {}
|
|
139
|
+
for r in unmatched:
|
|
140
|
+
t = recipe_type(r)
|
|
141
|
+
groups.setdefault(t, []).append(r)
|
|
142
|
+
|
|
143
|
+
for t in sorted(groups.keys(), key=lambda x: (x == "", x.casefold())):
|
|
144
|
+
ordered.extend(sorted(groups[t], key=str.casefold))
|
|
145
|
+
|
|
146
|
+
logging.debug(f"Recipe processing order: {normalised_order}")
|
|
147
|
+
logging.debug(f"Ordered recipes: {ordered}")
|
|
148
|
+
|
|
149
|
+
return ordered
|