autopkg-wrapper 2024.2.4__py3-none-any.whl → 2026.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,6 +14,7 @@ def validate_file(arg):
14
14
  message = f"Error! This is not valid file: {arg}"
15
15
  raise argparse.ArgumentTypeError(message)
16
16
 
17
+
17
18
  def validate_directory(arg):
18
19
  dir_path = Path(arg).resolve()
19
20
  dir_exists = dir_path.is_dir()
@@ -25,38 +26,115 @@ def validate_directory(arg):
25
26
  raise argparse.ArgumentTypeError(message)
26
27
 
27
28
 
29
+ def validate_bool(arg):
30
+ if isinstance(arg, bool):
31
+ return arg
32
+ elif isinstance(arg, str) and arg.lower() in ["0", "false", "no", "f"]:
33
+ return False
34
+ elif isinstance(arg, str) and arg.lower() in ["1", "true", "yes", "t"]:
35
+ return True
36
+
37
+
38
+ def find_github_token():
39
+ if os.getenv("GITHUB_TOKEN", None):
40
+ return os.getenv("GITHUB_TOKEN")
41
+ elif os.getenv("GH_TOKEN", None):
42
+ return os.getenv("GH_TOKEN")
43
+ else:
44
+ return None
45
+
46
+
28
47
  def setup_args():
29
48
  parser = argparse.ArgumentParser(description="Run autopkg recipes")
30
49
  recipe_arguments = parser.add_mutually_exclusive_group()
31
50
  recipe_arguments.add_argument(
32
51
  "--recipe-file",
33
52
  type=validate_file,
34
- default=None,
35
- help="Path to a list of recipes to run",
53
+ default=os.getenv("AW_RECIPE_FILE", None),
54
+ help="Provide the list of recipes to run via a JSON file for easier management.",
36
55
  )
37
56
  recipe_arguments.add_argument(
38
57
  "--recipes",
39
- "--recipe",
40
58
  nargs="*",
41
- default=os.getenv("AUTOPKG_RECIPES", None),
42
- help="Recipes to run with autopkg",
59
+ default=os.getenv("AW_RECIPES", None),
60
+ help="""
61
+ Recipes to run via CLI flag or environment variable. If the '--recipes' flag is used, simply
62
+ provide a space-separated list on the command line:
63
+ `autopkg-wrapper --recipes recipe_one.download recipe_two.download`
64
+ Alternatively, you can provide a space/comma-separated list in the 'AW_RECIPES' environment
65
+ variable:
66
+ `export AW_RECIPES="recipe_one.download recipe_two.download"`
67
+ `export AW_RECIPES="recipe_one.pkg,recipe_two.pkg"`
68
+ `autopkg-wrapper`
69
+ """,
43
70
  )
71
+ parser.add_argument(
72
+ "--recipe-processing-order",
73
+ nargs="*",
74
+ default=os.getenv("AW_RECIPE_PROCESSING_ORDER", None),
75
+ help="""
76
+ This option comes in handy if you include additional recipe type names in your overrides and wish them to be processed in a specific order.
77
+ We'll specifically look for these recipe types after the first period (.) in the recipe name.
78
+ Order items can be either a full type suffix (e.g. "upload.jamf") or a partial token (e.g. "upload", "auto_update").
79
+ Partial tokens are matched against the dot-separated segments after the first '.' so recipes like "Foo.epz.auto_update.jamf" will match "auto_update".
80
+ This can also be provided via the 'AW_RECIPE_PROCESSING_ORDER' environment variable as a comma-separated list (e.g. "upload,self_service,auto_update").
81
+ For example, if you have the following recipes to be processed:
82
+ ExampleApp.auto_install.jamf
83
+ ExampleApp.upload.jamf
84
+ ExampleApp.self_service.jamf
85
+ And you want to ensure that the .upload recipes are always processed first, followed by .auto_install, and finally .self_service, you would provide the following processing order:
86
+ `--recipe-processing-order upload.jamf auto_install.jamf self_service.jamf`
87
+ This would ensure that all .upload recipes are processed before any other recipe types.
88
+ Within each recipe type, the recipes will be ordered alphabetically.
89
+ We assume that no extensions are provided (but will strip them if needed - extensions that are stripped include .recipe or .recipe.yaml).
90
+ """,
91
+ )
92
+ parser.add_argument(
93
+ "--autopkg-bin",
94
+ default=os.getenv("AW_AUTOPKG_BIN", "/usr/local/bin/autopkg"),
95
+ help="Path to the autopkg binary (default: /usr/local/bin/autopkg). Can also be set via AW_AUTOPKG_BIN.",
96
+ )
97
+
44
98
  parser.add_argument(
45
99
  "--debug",
46
- default=os.getenv("DEBUG", False),
100
+ default=validate_bool(os.getenv("AW_DEBUG", False)),
47
101
  action="store_true",
48
102
  help="Enable debug logging when running script",
49
103
  )
50
104
  parser.add_argument(
51
- "--override-trust",
52
- action="store_false",
53
- help="If set recipe override trust verification will be disabled. (Default: True)",
105
+ "--disable-recipe-trust-check",
106
+ action="store_true",
107
+ help="""
108
+ If this option is used, recipe trust verification will not be run prior to a recipe run.
109
+ This does not set FAIL_RECIPES_WITHOUT_TRUST_INFO to No. You will need to set that outside
110
+ of this application.
111
+ """,
112
+ )
113
+ parser.add_argument(
114
+ "--disable-git-commands",
115
+ action="store_true",
116
+ help="""
117
+ If this option is used, git commands won't be run
118
+ """,
119
+ )
120
+ parser.add_argument(
121
+ "--concurrency",
122
+ type=int,
123
+ default=int(os.getenv("AW_CONCURRENCY", "1")),
124
+ help="Number of recipes to run in parallel (default: 1)",
54
125
  )
55
- parser.add_argument("--slack-token", default=os.getenv("SLACK_WEBHOOK_TOKEN", None), help=argparse.SUPPRESS)
56
- parser.add_argument("--github-token", default=os.getenv("GITHUB_TOKEN", None))
126
+ parser.add_argument(
127
+ "--slack-token",
128
+ default=os.getenv("SLACK_WEBHOOK_TOKEN", None),
129
+ help=argparse.SUPPRESS,
130
+ )
131
+ parser.add_argument("--github-token", default=find_github_token())
57
132
  parser.add_argument(
58
133
  "--branch-name",
59
- default=os.getenv("AUTOPKG_TRUST_BRANCH", f"fix/update_trust_information/{datetime.now().strftime("%Y-%m-%dT%H-%M-%S")}"),
134
+ default=os.getenv(
135
+ "AW_TRUST_BRANCH",
136
+ f"fix/update_trust_information/{datetime.now().strftime('%Y-%m-%dT%H-%M-%S')}",
137
+ ),
60
138
  help="""
61
139
  Branch name to be used recipe overrides have failed their trust verification and need to be updated.
62
140
  By default, this will be in the format of \"fix/update_trust_information/YYYY-MM-DDTHH-MM-SS\"
@@ -64,18 +142,76 @@ def setup_args():
64
142
  )
65
143
  parser.add_argument(
66
144
  "--create-pr",
67
- default=os.getenv("CREATE_PR", False),
145
+ default=os.getenv("AW_CREATE_PR", False),
68
146
  action="store_true",
69
147
  help="If enabled, autopkg_wrapper will open a PR for updated trust information",
70
148
  )
71
149
  parser.add_argument(
72
- "--autopkg-overrides-repo-path",
73
- default=os.getenv("AUTOPKG_OVERRIDES_REPO_PATH", None),
150
+ "--create-issues",
151
+ action="store_true",
152
+ help="Create a GitHub issue for recipes that fail during processing",
153
+ )
154
+ parser.add_argument(
155
+ "--overrides-repo-path",
156
+ default=os.getenv("AW_OVERRIDES_REPO_PATH", None),
74
157
  type=validate_directory,
75
158
  help="""
76
159
  The path on disk to the git repository containing the autopkg overrides directory.
77
160
  If none is provided, we will try to determine it for you.
78
161
  """,
79
162
  )
163
+ parser.add_argument(
164
+ "--post-processors",
165
+ default=os.getenv("AW_POST_PROCESSORS", None),
166
+ nargs="*",
167
+ help="""
168
+ One or more autopkg post processors to run after each recipe execution
169
+ """,
170
+ )
171
+ parser.add_argument(
172
+ "--autopkg-prefs",
173
+ default=os.getenv("AW_AUTOPKG_PREFS_FILE", None),
174
+ type=validate_file,
175
+ help="""
176
+ Path to the autopkg preferences you'd like to use
177
+ """,
178
+ )
179
+
180
+ # Report processing options
181
+ parser.add_argument(
182
+ "--process-reports",
183
+ action="store_true",
184
+ help="Process autopkg report directories or zip and emit markdown summaries",
185
+ )
186
+ parser.add_argument(
187
+ "--reports-zip",
188
+ default=os.getenv("AW_REPORTS_ZIP", None),
189
+ help="Path to an autopkg_report-*.zip to extract and process",
190
+ )
191
+ parser.add_argument(
192
+ "--reports-extract-dir",
193
+ default=os.getenv("AW_REPORTS_EXTRACT_DIR", "autopkg_reports_summary/reports"),
194
+ help="Directory to extract the zip into (default: autopkg_reports_summary/reports)",
195
+ )
196
+ parser.add_argument(
197
+ "--reports-dir",
198
+ default=os.getenv("AW_REPORTS_DIR", None),
199
+ help="Directory of reports to process (if no zip provided)",
200
+ )
201
+ parser.add_argument(
202
+ "--reports-out-dir",
203
+ default=os.getenv("AW_REPORTS_OUT_DIR", "autopkg_reports_summary/summary"),
204
+ help="Directory to write markdown outputs (default: autopkg_reports_summary/summary)",
205
+ )
206
+ parser.add_argument(
207
+ "--reports-run-date",
208
+ default=os.getenv("AW_REPORTS_RUN_DATE", ""),
209
+ help="Run date string to include in the summary",
210
+ )
211
+ parser.add_argument(
212
+ "--reports-strict",
213
+ action="store_true",
214
+ help="Exit non-zero if any errors are detected in processed reports",
215
+ )
80
216
 
81
217
  return parser.parse_args()
@@ -1,5 +1,7 @@
1
1
  import logging
2
+ import re
2
3
  import subprocess
4
+ from datetime import datetime
3
5
 
4
6
  from github import Github
5
7
 
@@ -20,12 +22,26 @@ def git_run(*args):
20
22
 
21
23
 
22
24
  def get_repo_info(override_repo_git_git_dir):
23
- repo_url = (
24
- git_run(override_repo_git_git_dir, "config", "--get", "remote.origin.url")
25
- .stdout.strip()
26
- .split(".git")[0]
25
+ remote = git_run(
26
+ override_repo_git_git_dir, "config", "--get", "remote.origin.url"
27
+ ).stdout.strip()
28
+
29
+ # Supports:
30
+ # - https://github.com/<owner>/<repo>.git
31
+ # - git@github.com:<owner>/<repo>.git
32
+ # - ssh://git@github.com/<owner>/<repo>.git
33
+ m = re.search(
34
+ r"github\.com[:/](?P<owner>[^/]+)/(?P<repo>[^\s/]+?)(?:\.git)?$",
35
+ remote,
36
+ flags=re.IGNORECASE,
27
37
  )
28
- remote_repo_ref = repo_url.split("https://github.com/")[1]
38
+ if not m:
39
+ raise ValueError(f"Unsupported Git remote URL: {remote}")
40
+
41
+ owner = m.group("owner")
42
+ repo = m.group("repo")
43
+ remote_repo_ref = f"{owner}/{repo}"
44
+ repo_url = f"https://github.com/{remote_repo_ref}"
29
45
 
30
46
  logging.debug(f"Repo URL: {repo_url}")
31
47
  logging.debug(f"Remote Repo Ref: {remote_repo_ref}")
@@ -102,8 +118,58 @@ Please review and merge the updated trust information for this override.
102
118
 
103
119
  g = Github(git_info["github_token"])
104
120
  repo = g.get_repo(git_info["override_repo_remote_ref"])
105
- pr = repo.create_pull(title=title, body=body, head=git_info["override_trust_branch"], base="main")
106
- pr_url = f"{git_info["override_repo_url"]}/pull/{pr.number}"
121
+ pr = repo.create_pull(
122
+ title=title, body=body, head=git_info["override_trust_branch"], base="main"
123
+ )
124
+ pr_url = f"{git_info['override_repo_url']}/pull/{pr.number}"
107
125
 
108
126
  logging.debug(f"PR URL: {pr_url}")
109
127
  return pr_url
128
+
129
+
130
+ def create_issue_for_failed_recipes(git_info, failed_recipes):
131
+ """
132
+ Creates a GitHub issue listing all recipes that failed during the run.
133
+
134
+ Args:
135
+ git_info (dict): Dictionary containing Git repository information
136
+ failed_recipes (list): List of Recipe objects that failed during processing
137
+
138
+ Returns:
139
+ str: URL of the created GitHub issue, or None if no issue was created
140
+ """
141
+
142
+ if not failed_recipes:
143
+ logging.debug("No failed recipes to report")
144
+ return None
145
+
146
+ g = Github(git_info["github_token"])
147
+ repo = g.get_repo(git_info["override_repo_remote_ref"])
148
+
149
+ # Create issue title and body
150
+ current_date = datetime.now().strftime("%Y-%m-%d")
151
+ title = f"AutoPkg Recipe Failures - {current_date}"
152
+
153
+ body = "## Recipe Failure Details:\n\n"
154
+ for recipe in failed_recipes:
155
+ body += f"#### {recipe.name}\n"
156
+
157
+ if recipe.results.get("failed"):
158
+ for failure in recipe.results.get("failed", []):
159
+ body += f"- {failure.get('message', 'Unknown error')}\n"
160
+
161
+ body += "\n"
162
+
163
+ body += "\nThis issue was automatically generated by autopkg-wrapper."
164
+
165
+ # Create the issue
166
+ issue = repo.create_issue(
167
+ title=title,
168
+ body=body,
169
+ labels=["autopkg-failure"],
170
+ )
171
+
172
+ issue_url = f"{git_info['override_repo_url']}/issues/{issue.number}"
173
+ logging.debug(f"Issue URL: {issue_url}")
174
+
175
+ return issue_url
@@ -0,0 +1,41 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Iterable, Protocol, TypeVar
4
+
5
+
6
+ class HasFilename(Protocol):
7
+ filename: str
8
+
9
+
10
+ T = TypeVar("T", bound=HasFilename)
11
+
12
+
13
+ def recipe_type_for(recipe: HasFilename) -> str:
14
+ parts = recipe.filename.split(".", 1)
15
+ return parts[1] if len(parts) == 2 else ""
16
+
17
+
18
+ def build_recipe_batches(
19
+ recipe_list: Iterable[T], recipe_processing_order
20
+ ) -> list[list[T]]:
21
+ recipe_list = list(recipe_list)
22
+ if not recipe_list:
23
+ return []
24
+ if not recipe_processing_order:
25
+ return [recipe_list]
26
+
27
+ batches: list[list[T]] = []
28
+ current_batch: list[T] = []
29
+ current_type = None
30
+ for recipe in recipe_list:
31
+ r_type = recipe_type_for(recipe)
32
+ if current_type is None or r_type == current_type:
33
+ current_batch.append(recipe)
34
+ current_type = r_type
35
+ continue
36
+ batches.append(current_batch)
37
+ current_batch = [recipe]
38
+ current_type = r_type
39
+ if current_batch:
40
+ batches.append(current_batch)
41
+ return batches
@@ -0,0 +1,149 @@
1
+ import logging
2
+
3
+
4
+ def order_recipe_list(recipe_list, order):
5
+ # This option comes in handy if you include additional recipe type names in your overrides and wish them to be processed in a specific order.
6
+ # We'll specifically look for these recipe types after the first period (.) in the recipe name.
7
+ # For example, if you have the following recipes to be processed:
8
+ # ExampleApp.auto_install.jamf
9
+ # ExampleApp.upload.jamf
10
+ # ExampleApp.self_service.jamf
11
+ # And you want to ensure that the .upload recipes are always processed first, followed by .auto_install, and finally .self_service, you would provide the following processing order:
12
+ # `--recipe-processing-order upload.jamf auto_install.jamf self_service.jamf`
13
+ # This would ensure that all .upload recipes are processed before any other recipe types.
14
+ # Within each recipe type, the recipes will be ordered alphabetically.
15
+ # We assume that no extensions are provided (but will strip them if needed - extensions that are stripped include .recipe or .recipe.yaml).
16
+
17
+ def strip_known_extensions(value: str) -> str:
18
+ value = value.strip()
19
+ if value.endswith(".recipe.yaml"):
20
+ return value[: -len(".recipe.yaml")]
21
+ if value.endswith(".recipe"):
22
+ return value[: -len(".recipe")]
23
+ return value
24
+
25
+ def normalise_processing_order(value):
26
+ if not value:
27
+ return []
28
+
29
+ items: list[str] = []
30
+ if isinstance(value, str):
31
+ raw = value.strip()
32
+ if not raw:
33
+ return []
34
+ # String values generally come from env var defaults; treat as comma-separated.
35
+ items = [v.strip() for v in raw.split(",")]
36
+ else:
37
+ # argparse typically provides a list here, but env var defaults can leak through.
38
+ for v in value:
39
+ if v is None:
40
+ continue
41
+ v = str(v).strip()
42
+ if not v:
43
+ continue
44
+ if "," in v:
45
+ items.extend([p.strip() for p in v.split(",")])
46
+ else:
47
+ items.append(v)
48
+
49
+ normalised: list[str] = []
50
+ seen: set[str] = set()
51
+ for item in items:
52
+ if not item:
53
+ continue
54
+ item = item.lstrip(".")
55
+ item = strip_known_extensions(item)
56
+ if not item or item in seen:
57
+ continue
58
+ seen.add(item)
59
+ normalised.append(item)
60
+ return normalised
61
+
62
+ def recipe_type(recipe_name: str) -> str:
63
+ # Type is everything after the first '.' (e.g. Example.upload.jamf -> upload.jamf)
64
+ parts = recipe_name.split(".", 1)
65
+ return parts[1] if len(parts) == 2 else ""
66
+
67
+ def recipe_segments_after_first_dot(recipe_name: str) -> list[str]:
68
+ after_first = recipe_type(recipe_name)
69
+ return [p for p in after_first.split(".") if p] if after_first else []
70
+
71
+ def pattern_matches_segments(pattern: str, segments: list[str]) -> bool:
72
+ # Pattern can be a single token ("auto_update") or a dot-separated sequence
73
+ # ("upload.jamf", "auto_update.jamf", etc.).
74
+ if not pattern:
75
+ return False
76
+ pattern_parts = [p for p in pattern.split(".") if p]
77
+ if not pattern_parts:
78
+ return False
79
+
80
+ # Case-insensitive matching.
81
+ segments_norm = [s.casefold() for s in segments]
82
+ pattern_parts_norm = [p.casefold() for p in pattern_parts]
83
+
84
+ if len(pattern_parts_norm) == 1:
85
+ return pattern_parts_norm[0] in segments_norm
86
+
87
+ # Contiguous subsequence match.
88
+ for start in range(0, len(segments_norm) - len(pattern_parts_norm) + 1):
89
+ if (
90
+ segments_norm[start : start + len(pattern_parts_norm)]
91
+ == pattern_parts_norm
92
+ ):
93
+ return True
94
+ return False
95
+
96
+ if not recipe_list:
97
+ return recipe_list
98
+
99
+ normalised_order = normalise_processing_order(order)
100
+
101
+ # If the provided order contains no usable tokens, do not re-order.
102
+ # (We still strip known extensions, which is order-preserving.)
103
+ if not normalised_order:
104
+ return [
105
+ strip_known_extensions(str(r).strip()) for r in recipe_list if r is not None
106
+ ]
107
+
108
+ # First, normalise recipe names by stripping known extensions.
109
+ normalised_recipes: list[str] = []
110
+ for r in recipe_list:
111
+ if r is None:
112
+ continue
113
+ normalised_recipes.append(strip_known_extensions(str(r).strip()))
114
+
115
+ # If a processing order is supplied, match each recipe to the *first* pattern it satisfies.
116
+ # This supports both direct matches ("upload.jamf") and partial matches ("upload",
117
+ # "auto_update") against dot-separated segments after the first '.' in the recipe name.
118
+ pattern_groups: dict[str, list[str]] = {p: [] for p in normalised_order}
119
+ unmatched: list[str] = []
120
+
121
+ for r in normalised_recipes:
122
+ segments = recipe_segments_after_first_dot(r)
123
+ matched = False
124
+ for p in normalised_order:
125
+ if pattern_matches_segments(p, segments):
126
+ pattern_groups[p].append(r)
127
+ matched = True
128
+ break
129
+ if not matched:
130
+ unmatched.append(r)
131
+
132
+ ordered: list[str] = []
133
+ for p in normalised_order:
134
+ ordered.extend(sorted(pattern_groups[p], key=str.casefold))
135
+
136
+ # Remaining recipes: group by their full type string and order groups alphabetically,
137
+ # with empty-type last.
138
+ groups: dict[str, list[str]] = {}
139
+ for r in unmatched:
140
+ t = recipe_type(r)
141
+ groups.setdefault(t, []).append(r)
142
+
143
+ for t in sorted(groups.keys(), key=lambda x: (x == "", x.casefold())):
144
+ ordered.extend(sorted(groups[t], key=str.casefold))
145
+
146
+ logging.debug(f"Recipe processing order: {normalised_order}")
147
+ logging.debug(f"Ordered recipes: {ordered}")
148
+
149
+ return ordered