pytest-allure-host 0.1.2__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,14 @@
1
+ from importlib import metadata as _md
2
+
1
3
  from .utils import PublishConfig, default_run_id # re-export key types
2
4
 
5
+ try: # runtime version (works inside installed env)
6
+ __version__ = _md.version("pytest-allure-host")
7
+ except Exception: # pragma: no cover
8
+ __version__ = "0.0.0+unknown"
9
+
3
10
  __all__ = [
4
11
  "PublishConfig",
5
12
  "default_run_id",
13
+ "__version__",
6
14
  ]
pytest_allure_host/cli.py CHANGED
@@ -2,7 +2,9 @@ from __future__ import annotations
2
2
 
3
3
  import argparse
4
4
  import os
5
+ from pathlib import Path
5
6
 
7
+ from . import __version__
6
8
  from .config import load_effective_config
7
9
  from .publisher import plan_dry_run, preflight, publish
8
10
  from .utils import PublishConfig, default_run_id
@@ -10,6 +12,11 @@ from .utils import PublishConfig, default_run_id
10
12
 
11
13
  def parse_args() -> argparse.Namespace:
12
14
  p = argparse.ArgumentParser("publish-allure")
15
+ p.add_argument(
16
+ "--version",
17
+ action="store_true",
18
+ help="Print version and exit",
19
+ )
13
20
  p.add_argument("--config", help="Path to YAML config (optional)")
14
21
  p.add_argument("--bucket")
15
22
  p.add_argument("--prefix", default=None)
@@ -20,8 +27,18 @@ def parse_args() -> argparse.Namespace:
20
27
  default=os.getenv("ALLURE_RUN_ID", default_run_id()),
21
28
  )
22
29
  p.add_argument("--cloudfront", default=os.getenv("ALLURE_CLOUDFRONT"))
23
- p.add_argument("--results", default="allure-results")
24
- p.add_argument("--report", default="allure-report")
30
+ p.add_argument(
31
+ "--results",
32
+ "--results-dir",
33
+ dest="results",
34
+ default=os.getenv("ALLURE_RESULTS_DIR", "allure-results"),
35
+ help="Path to allure-results directory (alias: --results-dir)",
36
+ )
37
+ p.add_argument(
38
+ "--report",
39
+ default=os.getenv("ALLURE_REPORT_DIR", "allure-report"),
40
+ help="Output directory for generated Allure static report",
41
+ )
25
42
  p.add_argument("--ttl-days", type=int, default=None)
26
43
  p.add_argument("--max-keep-runs", type=int, default=None)
27
44
  p.add_argument(
@@ -37,7 +54,7 @@ def parse_args() -> argparse.Namespace:
37
54
  p.add_argument(
38
55
  "--s3-endpoint",
39
56
  default=os.getenv("ALLURE_S3_ENDPOINT"),
40
- help=("Custom S3 endpoint URL (e.g. http://localhost:4566 for LocalStack)"),
57
+ help=("Custom S3 endpoint URL (e.g. http://localhost:4566)"),
41
58
  )
42
59
  p.add_argument("--summary-json", default=None)
43
60
  p.add_argument(
@@ -51,8 +68,9 @@ def parse_args() -> argparse.Namespace:
51
68
  default=[],
52
69
  metavar="KEY=VAL",
53
70
  help=(
54
- "Attach arbitrary metadata (repeatable). Example: --meta jira=PROJ-123 "
55
- "--meta env=staging. Adds columns to runs index & manifest fields."
71
+ "Attach arbitrary metadata (repeatable). Example: --meta "
72
+ "jira=PROJ-123 --meta env=staging. Adds dynamic columns to "
73
+ "runs index & manifest."
56
74
  ),
57
75
  )
58
76
  p.add_argument("--dry-run", action="store_true", help="Plan only")
@@ -61,6 +79,42 @@ def parse_args() -> argparse.Namespace:
61
79
  action="store_true",
62
80
  help="Run preflight checks (AWS, allure, inputs)",
63
81
  )
82
+ p.add_argument(
83
+ "--verbose-summary",
84
+ action="store_true",
85
+ help="Print extended summary (CDN prefixes, manifest path, metadata)",
86
+ )
87
+ p.add_argument(
88
+ "--allow-duplicate-prefix-project",
89
+ action="store_true",
90
+ help=(
91
+ "Bypass guard preventing prefix==project duplication. "
92
+ "Only use if you intentionally want that folder layout."
93
+ ),
94
+ )
95
+ p.add_argument(
96
+ "--upload-workers",
97
+ type=int,
98
+ default=None,
99
+ help="Parallel upload worker threads (auto if unset)",
100
+ )
101
+ p.add_argument(
102
+ "--copy-workers",
103
+ type=int,
104
+ default=None,
105
+ help="Parallel copy worker threads for latest promotion",
106
+ )
107
+ p.add_argument(
108
+ "--archive-run",
109
+ action="store_true",
110
+ help="Also produce a compressed archive of the run (tar.gz)",
111
+ )
112
+ p.add_argument(
113
+ "--archive-format",
114
+ choices=["tar.gz", "zip"],
115
+ default="tar.gz",
116
+ help="Archive format when --archive-run is set",
117
+ )
64
118
  return p.parse_args()
65
119
 
66
120
 
@@ -82,9 +136,8 @@ def _parse_metadata(pairs: list[str]) -> dict | None:
82
136
  return meta or None
83
137
 
84
138
 
85
- def main() -> int:
86
- args = parse_args()
87
- cli_overrides = {
139
+ def _build_cli_overrides(args: argparse.Namespace) -> dict:
140
+ return {
88
141
  "bucket": args.bucket,
89
142
  "prefix": args.prefix,
90
143
  "project": args.project,
@@ -98,14 +151,19 @@ def main() -> int:
98
151
  "sse": args.sse,
99
152
  "sse_kms_key_id": args.sse_kms_key_id,
100
153
  }
101
- effective = load_effective_config(cli_overrides, args.config)
154
+
155
+
156
+ def _effective_config(args: argparse.Namespace) -> tuple[dict, PublishConfig]:
157
+ overrides = _build_cli_overrides(args)
158
+ effective = load_effective_config(overrides, args.config)
102
159
  cfg_source = effective.get("_config_file")
103
160
  if cfg_source:
104
161
  print(f"[config] loaded settings from {cfg_source}")
105
162
  missing = [k for k in ("bucket", "project") if not effective.get(k)]
106
163
  if missing:
164
+ missing_list = ", ".join(missing)
107
165
  raise SystemExit(
108
- f"Missing required config values: {', '.join(missing)}. Provide via CLI, env, or YAML." # noqa: E501
166
+ f"Missing required config values: {missing_list}. Provide via CLI, env, or YAML."
109
167
  )
110
168
  cfg = PublishConfig(
111
169
  bucket=effective["bucket"],
@@ -121,28 +179,111 @@ def main() -> int:
121
179
  sse=effective.get("sse"),
122
180
  sse_kms_key_id=effective.get("sse_kms_key_id"),
123
181
  metadata=_parse_metadata(args.meta),
182
+ upload_workers=args.upload_workers,
183
+ copy_workers=args.copy_workers,
184
+ archive_run=args.archive_run,
185
+ archive_format=args.archive_format if args.archive_run else None,
124
186
  )
187
+ # Guard against accidental duplication like prefix==project producing
188
+ # 'reports/reports/<branch>/...' paths. This is usually unintentional
189
+ # and makes report URLs longer / redundant. Fail fast so users can
190
+ # correct config explicitly (they can still deliberately choose this
191
+ # by changing either value slightly, e.g. prefix='reports',
192
+ # project='team-reports').
193
+ if cfg.prefix == cfg.project and not getattr(args, "allow_duplicate_prefix_project", False):
194
+ parts = [
195
+ "Invalid config: prefix and project are identical (",
196
+ f"'{cfg.project}'). ",
197
+ "This yields duplicated S3 paths (",
198
+ f"{cfg.prefix}/{cfg.project}/<branch>/...). ",
199
+ "Set distinct values (e.g. prefix='reports', project='payments').",
200
+ ]
201
+ raise SystemExit("".join(parts))
202
+ return effective, cfg
203
+
204
+
205
+ def _write_json(path: str, payload: dict) -> None:
206
+ import json
207
+
208
+ with open(path, "w", encoding="utf-8") as f:
209
+ json.dump(payload, f, indent=2)
210
+
211
+
212
+ def _print_publish_summary(
213
+ cfg: PublishConfig,
214
+ out: dict,
215
+ verbose: bool = False,
216
+ ) -> None:
217
+ print("Publish complete")
218
+ if out.get("run_url"):
219
+ print(f"Run URL: {out['run_url']}")
220
+ if out.get("latest_url"):
221
+ print(f"Latest URL: {out['latest_url']}")
222
+ # Main aggregated runs index (HTML) at branch root if CDN configured
223
+ if cfg.cloudfront_domain:
224
+ branch_root = f"{cfg.prefix}/{cfg.project}/{cfg.branch}"
225
+ cdn_root = cfg.cloudfront_domain.rstrip("/")
226
+ runs_index_url = f"{cdn_root}/{branch_root}/runs/index.html"
227
+ print(f"Runs Index URL: {runs_index_url}")
228
+ run_prefix = out.get("run_prefix") or cfg.s3_run_prefix
229
+ latest_prefix = out.get("latest_prefix") or cfg.s3_latest_prefix
230
+ print(f"S3 run prefix: s3://{cfg.bucket}/{run_prefix}")
231
+ print(f"S3 latest prefix: s3://{cfg.bucket}/{latest_prefix}")
232
+ print(
233
+ "Report files: "
234
+ f"{out.get('report_files', '?')} Size: "
235
+ f"{out.get('report_size_bytes', '?')} bytes"
236
+ )
237
+ if verbose and cfg.cloudfront_domain:
238
+ # Duplicate earlier lines but clarify this is the CDN-root mapping
239
+ print("CDN run prefix (index root):", cfg.url_run())
240
+ print("CDN latest prefix (index root):", cfg.url_latest())
241
+ if verbose:
242
+ # Manifest stored at branch root under runs/index.json
243
+ branch_root = f"{cfg.prefix}/{cfg.project}/{cfg.branch}"
244
+ manifest_key = f"{branch_root}/runs/index.json"
245
+ print("Manifest object:", f"s3://{cfg.bucket}/{manifest_key}")
246
+ if cfg.metadata:
247
+ print("Metadata keys:", ", ".join(sorted(cfg.metadata.keys())))
248
+ if cfg.sse:
249
+ print("Encryption:", cfg.sse, cfg.sse_kms_key_id or "")
250
+
251
+
252
+ def main() -> int: # noqa: C901 (reduced but keep guard just in case)
253
+ args = parse_args()
254
+ if args.version:
255
+ print(__version__)
256
+ return 0
257
+ effective, cfg = _effective_config(args)
258
+ # Construct explicit Paths honoring custom results/report dirs
259
+ paths = None
260
+ try:
261
+ mod = __import__("pytest_allure_host.publisher", fromlist=["Paths"])
262
+ paths = mod.publisher.Paths(
263
+ results=Path(args.results),
264
+ report=Path(args.report),
265
+ )
266
+ except Exception: # pragma: no cover - defensive fallback
267
+ from .publisher import Paths # type: ignore
268
+
269
+ paths = Paths(results=Path(args.results), report=Path(args.report))
270
+
125
271
  if args.check:
126
- checks = preflight(cfg)
272
+ checks = preflight(cfg, paths=paths)
127
273
  print(checks)
128
274
  if not all(checks.values()):
129
275
  return 2
130
276
  if args.dry_run:
131
- plan = plan_dry_run(cfg)
277
+ plan = plan_dry_run(cfg, paths=paths)
132
278
  print(plan)
133
279
  if args.summary_json:
134
- import json
135
-
136
- with open(args.summary_json, "w", encoding="utf-8") as f:
137
- json.dump(plan, f, indent=2)
280
+ _write_json(args.summary_json, plan)
138
281
  return 0
139
- out = publish(cfg)
140
- print(out)
282
+ out = publish(cfg, paths=paths)
283
+ print(out) # raw dict for backward compatibility
284
+ _print_publish_summary(cfg, out, verbose=args.verbose_summary)
141
285
  if args.summary_json:
142
- import json
143
-
144
- with open(args.summary_json, "w", encoding="utf-8") as f:
145
- json.dump(out, f, indent=2)
286
+ _write_json(args.summary_json, out)
146
287
  return 0
147
288
 
148
289