pytest-allure-host 0.1.1__tar.gz → 0.1.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/PKG-INFO +12 -3
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/README.md +9 -0
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/pyproject.toml +6 -4
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/pytest_allure_host/cli.py +46 -0
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/pytest_allure_host/config.py +29 -0
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/pytest_allure_host/plugin.py +3 -0
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/pytest_allure_host/publisher.py +206 -38
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/pytest_allure_host/utils.py +5 -0
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/LICENSE +0 -0
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/pytest_allure_host/__init__.py +0 -0
- {pytest_allure_host-0.1.1 → pytest_allure_host-0.1.2}/pytest_allure_host/__main__.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: pytest-allure-host
|
3
|
-
Version: 0.1.
|
3
|
+
Version: 0.1.2
|
4
4
|
Summary: Publish Allure static reports to private S3 behind CloudFront with history preservation
|
5
5
|
License-Expression: MIT
|
6
6
|
License-File: LICENSE
|
@@ -21,8 +21,8 @@ Classifier: Operating System :: OS Independent
|
|
21
21
|
Requires-Dist: PyYAML (>=6,<7)
|
22
22
|
Requires-Dist: boto3 (>=1.28,<2.0)
|
23
23
|
Project-URL: Bug Tracker, https://github.com/darrenrabbs/allurehosting/issues
|
24
|
-
Project-URL: Changelog, https://github.
|
25
|
-
Project-URL: Documentation, https://github.
|
24
|
+
Project-URL: Changelog, https://darrenrabbs.github.io/allurehosting/changelog/
|
25
|
+
Project-URL: Documentation, https://darrenrabbs.github.io/allurehosting/
|
26
26
|
Project-URL: Homepage, https://github.com/darrenrabbs/allurehosting
|
27
27
|
Project-URL: Repository, https://github.com/darrenrabbs/allurehosting
|
28
28
|
Description-Content-Type: text/markdown
|
@@ -33,11 +33,20 @@ Description-Content-Type: text/markdown
|
|
33
33
|

|
34
34
|

|
35
35
|

|
36
|
+
[](https://darrenrabbs.github.io/allurehosting/)
|
36
37
|
|
37
38
|
Publish Allure static reports to private S3 behind CloudFront with history preservation and SPA-friendly routing.
|
38
39
|
|
39
40
|
See `docs/architecture.md` and `.github/copilot-instructions.md` for architecture and design constraints.
|
40
41
|
|
42
|
+
## Documentation
|
43
|
+
|
44
|
+
Full documentation (quickstart, AWS setup, IAM least-privilege, CLI usage, changelog) is published at:
|
45
|
+
|
46
|
+
https://darrenrabbs.github.io/allurehosting/
|
47
|
+
|
48
|
+
The README intentionally stays lean—refer to the site for detailed guidance.
|
49
|
+
|
41
50
|
## Features
|
42
51
|
|
43
52
|
- Generate Allure static report from `allure-results`
|
@@ -4,11 +4,20 @@
|
|
4
4
|

|
5
5
|

|
6
6
|

|
7
|
+
[](https://darrenrabbs.github.io/allurehosting/)
|
7
8
|
|
8
9
|
Publish Allure static reports to private S3 behind CloudFront with history preservation and SPA-friendly routing.
|
9
10
|
|
10
11
|
See `docs/architecture.md` and `.github/copilot-instructions.md` for architecture and design constraints.
|
11
12
|
|
13
|
+
## Documentation
|
14
|
+
|
15
|
+
Full documentation (quickstart, AWS setup, IAM least-privilege, CLI usage, changelog) is published at:
|
16
|
+
|
17
|
+
https://darrenrabbs.github.io/allurehosting/
|
18
|
+
|
19
|
+
The README intentionally stays lean—refer to the site for detailed guidance.
|
20
|
+
|
12
21
|
## Features
|
13
22
|
|
14
23
|
- Generate Allure static report from `allure-results`
|
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
|
4
4
|
|
5
5
|
[project]
|
6
6
|
name = "pytest-allure-host"
|
7
|
-
version = "0.1.
|
7
|
+
version = "0.1.2"
|
8
8
|
description = "Publish Allure static reports to private S3 behind CloudFront with history preservation"
|
9
9
|
readme = "README.md"
|
10
10
|
license = "MIT"
|
@@ -44,7 +44,9 @@ allure-pytest = ">=2,<3"
|
|
44
44
|
pytest-cov = ">=4,<5"
|
45
45
|
ruff = ">=0.5,<1.0"
|
46
46
|
pip-audit = ">=2.7,<3.0"
|
47
|
-
black = ">=24,<
|
47
|
+
black = ">=24,<26"
|
48
|
+
mkdocs = ">=1.5,<2.0"
|
49
|
+
mkdocs-material = ">=9.5,<10.0"
|
48
50
|
|
49
51
|
[project.scripts]
|
50
52
|
publish-allure = "pytest_allure_host.cli:main"
|
@@ -55,9 +57,9 @@ pytest_allure_host = "pytest_allure_host.plugin"
|
|
55
57
|
[project.urls]
|
56
58
|
Homepage = "https://github.com/darrenrabbs/allurehosting"
|
57
59
|
Repository = "https://github.com/darrenrabbs/allurehosting"
|
58
|
-
Documentation = "https://github.
|
60
|
+
Documentation = "https://darrenrabbs.github.io/allurehosting/"
|
59
61
|
"Bug Tracker" = "https://github.com/darrenrabbs/allurehosting/issues"
|
60
|
-
Changelog = "https://github.
|
62
|
+
Changelog = "https://darrenrabbs.github.io/allurehosting/changelog/"
|
61
63
|
|
62
64
|
# Package include (PEP 621 doesn't specify this; still handled by Poetry configuration)
|
63
65
|
[tool.poetry]
|
@@ -24,6 +24,16 @@ def parse_args() -> argparse.Namespace:
|
|
24
24
|
p.add_argument("--report", default="allure-report")
|
25
25
|
p.add_argument("--ttl-days", type=int, default=None)
|
26
26
|
p.add_argument("--max-keep-runs", type=int, default=None)
|
27
|
+
p.add_argument(
|
28
|
+
"--sse",
|
29
|
+
default=os.getenv("ALLURE_S3_SSE"),
|
30
|
+
help="Server-side encryption algorithm (AES256 or aws:kms)",
|
31
|
+
)
|
32
|
+
p.add_argument(
|
33
|
+
"--sse-kms-key-id",
|
34
|
+
default=os.getenv("ALLURE_S3_SSE_KMS_KEY_ID"),
|
35
|
+
help="KMS Key ID / ARN when --sse=aws:kms",
|
36
|
+
)
|
27
37
|
p.add_argument(
|
28
38
|
"--s3-endpoint",
|
29
39
|
default=os.getenv("ALLURE_S3_ENDPOINT"),
|
@@ -35,6 +45,16 @@ def parse_args() -> argparse.Namespace:
|
|
35
45
|
default=os.getenv("ALLURE_CONTEXT_URL"),
|
36
46
|
help="Optional hyperlink giving change context (e.g. Jira ticket)",
|
37
47
|
)
|
48
|
+
p.add_argument(
|
49
|
+
"--meta",
|
50
|
+
action="append",
|
51
|
+
default=[],
|
52
|
+
metavar="KEY=VAL",
|
53
|
+
help=(
|
54
|
+
"Attach arbitrary metadata (repeatable). Example: --meta jira=PROJ-123 "
|
55
|
+
"--meta env=staging. Adds columns to runs index & manifest fields."
|
56
|
+
),
|
57
|
+
)
|
38
58
|
p.add_argument("--dry-run", action="store_true", help="Plan only")
|
39
59
|
p.add_argument(
|
40
60
|
"--check",
|
@@ -44,6 +64,24 @@ def parse_args() -> argparse.Namespace:
|
|
44
64
|
return p.parse_args()
|
45
65
|
|
46
66
|
|
67
|
+
def _parse_metadata(pairs: list[str]) -> dict | None:
|
68
|
+
if not pairs:
|
69
|
+
return None
|
70
|
+
meta: dict[str, str] = {}
|
71
|
+
for raw in pairs:
|
72
|
+
if "=" not in raw:
|
73
|
+
continue
|
74
|
+
k, v = raw.split("=", 1)
|
75
|
+
k = k.strip()
|
76
|
+
v = v.strip()
|
77
|
+
if not k:
|
78
|
+
continue
|
79
|
+
safe_k = k.lower().replace("-", "_")
|
80
|
+
if safe_k and v:
|
81
|
+
meta[safe_k] = v
|
82
|
+
return meta or None
|
83
|
+
|
84
|
+
|
47
85
|
def main() -> int:
|
48
86
|
args = parse_args()
|
49
87
|
cli_overrides = {
|
@@ -57,8 +95,13 @@ def main() -> int:
|
|
57
95
|
"max_keep_runs": args.max_keep_runs,
|
58
96
|
"s3_endpoint": args.s3_endpoint,
|
59
97
|
"context_url": args.context_url,
|
98
|
+
"sse": args.sse,
|
99
|
+
"sse_kms_key_id": args.sse_kms_key_id,
|
60
100
|
}
|
61
101
|
effective = load_effective_config(cli_overrides, args.config)
|
102
|
+
cfg_source = effective.get("_config_file")
|
103
|
+
if cfg_source:
|
104
|
+
print(f"[config] loaded settings from {cfg_source}")
|
62
105
|
missing = [k for k in ("bucket", "project") if not effective.get(k)]
|
63
106
|
if missing:
|
64
107
|
raise SystemExit(
|
@@ -75,6 +118,9 @@ def main() -> int:
|
|
75
118
|
max_keep_runs=effective.get("max_keep_runs"),
|
76
119
|
s3_endpoint=effective.get("s3_endpoint"),
|
77
120
|
context_url=effective.get("context_url"),
|
121
|
+
sse=effective.get("sse"),
|
122
|
+
sse_kms_key_id=effective.get("sse_kms_key_id"),
|
123
|
+
metadata=_parse_metadata(args.meta),
|
78
124
|
)
|
79
125
|
if args.check:
|
80
126
|
checks = preflight(cfg)
|
@@ -38,10 +38,14 @@ from typing import Any
|
|
38
38
|
import yaml
|
39
39
|
|
40
40
|
CONFIG_FILENAMES = [
|
41
|
+
# YAML (legacy / original)
|
41
42
|
"allure-host.yml",
|
42
43
|
"allure-host.yaml",
|
43
44
|
".allure-host.yml",
|
44
45
|
".allure-host.yaml",
|
46
|
+
# TOML (new preferred simple format)
|
47
|
+
"allurehost.toml",
|
48
|
+
".allurehost.toml",
|
45
49
|
# Additional generic app config names people often use:
|
46
50
|
"application.yml",
|
47
51
|
"application.yaml",
|
@@ -81,9 +85,32 @@ def _read_yaml(path: Path) -> dict[str, Any]:
|
|
81
85
|
return {}
|
82
86
|
|
83
87
|
|
88
|
+
def _read_toml(path: Path) -> dict[str, Any]:
|
89
|
+
try:
|
90
|
+
import sys
|
91
|
+
|
92
|
+
if sys.version_info >= (3, 11): # stdlib tomllib
|
93
|
+
import tomllib # type: ignore
|
94
|
+
else: # fallback to optional dependency
|
95
|
+
import tomli as tomllib # type: ignore
|
96
|
+
except Exception: # pragma: no cover - toml not available
|
97
|
+
return {}
|
98
|
+
try:
|
99
|
+
with path.open("rb") as f:
|
100
|
+
data = tomllib.load(f)
|
101
|
+
return data if isinstance(data, dict) else {}
|
102
|
+
except Exception: # pragma: no cover - malformed
|
103
|
+
return {}
|
104
|
+
|
105
|
+
|
84
106
|
def discover_yaml_config(explicit: str | None = None) -> LoadedConfig:
|
85
107
|
if explicit:
|
86
108
|
p = Path(explicit)
|
109
|
+
if p.suffix.lower() == ".toml":
|
110
|
+
return LoadedConfig(
|
111
|
+
source_file=p if p.exists() else None,
|
112
|
+
data=_read_toml(p),
|
113
|
+
)
|
87
114
|
return LoadedConfig(
|
88
115
|
source_file=p if p.exists() else None,
|
89
116
|
data=_read_yaml(p),
|
@@ -91,6 +118,8 @@ def discover_yaml_config(explicit: str | None = None) -> LoadedConfig:
|
|
91
118
|
for name in CONFIG_FILENAMES:
|
92
119
|
p = Path(name)
|
93
120
|
if p.exists():
|
121
|
+
if p.suffix.lower() == ".toml":
|
122
|
+
return LoadedConfig(source_file=p, data=_read_toml(p))
|
94
123
|
return LoadedConfig(source_file=p, data=_read_yaml(p))
|
95
124
|
return LoadedConfig(source_file=None, data={})
|
96
125
|
|
@@ -79,6 +79,9 @@ def pytest_terminal_summary( # noqa: C901 - central orchestration, readable
|
|
79
79
|
"context_url": context_url,
|
80
80
|
}
|
81
81
|
effective = load_effective_config(cli_overrides, config.getoption("allure_config"))
|
82
|
+
cfg_source = effective.get("_config_file")
|
83
|
+
if cfg_source:
|
84
|
+
terminalreporter.write_line(f"[allure-host] config file: {cfg_source}")
|
82
85
|
# Minimal required
|
83
86
|
if not effective.get("bucket") or not effective.get("project"):
|
84
87
|
return
|
@@ -5,6 +5,7 @@ Responsible for:
|
|
5
5
|
* Uploading run report to S3 (run prefix) + atomic promotion to latest/
|
6
6
|
* Writing manifest (runs/index.json) + human HTML index + trend viewer
|
7
7
|
* Retention (max_keep_runs) + directory placeholder objects
|
8
|
+
* Extracting metadata keys from runs
|
8
9
|
|
9
10
|
The trend viewer (runs/trend.html) is a small dependency‑free canvas page
|
10
11
|
visualising passed / failed / broken counts across historical runs using
|
@@ -57,9 +58,35 @@ class Paths:
|
|
57
58
|
self.report = self.base / "allure-report"
|
58
59
|
|
59
60
|
|
60
|
-
|
61
|
-
|
62
|
-
|
61
|
+
def _discover_meta_keys(runs: list[dict]) -> list[str]:
|
62
|
+
"""Return sorted list of dynamic metadata keys across runs."""
|
63
|
+
core_cols = {
|
64
|
+
"run_id",
|
65
|
+
"time",
|
66
|
+
"size",
|
67
|
+
"project",
|
68
|
+
"branch",
|
69
|
+
"passed",
|
70
|
+
"failed",
|
71
|
+
"broken",
|
72
|
+
"context_url",
|
73
|
+
}
|
74
|
+
keys: list[str] = []
|
75
|
+
for r in runs:
|
76
|
+
for k in r.keys():
|
77
|
+
if k in core_cols or k.endswith("_url"):
|
78
|
+
continue
|
79
|
+
if k not in keys:
|
80
|
+
keys.append(k)
|
81
|
+
keys.sort()
|
82
|
+
return keys
|
83
|
+
|
84
|
+
|
85
|
+
def _format_meta_cell(val) -> str:
|
86
|
+
if val is None:
|
87
|
+
return "<td>-</td>"
|
88
|
+
esc = str(val).replace("<", "<").replace(">", ">")
|
89
|
+
return f"<td>{esc}</td>"
|
63
90
|
|
64
91
|
|
65
92
|
def _s3(cfg: PublishConfig): # allow custom endpoint (tests / local)
|
@@ -69,7 +96,11 @@ def _s3(cfg: PublishConfig): # allow custom endpoint (tests / local)
|
|
69
96
|
return boto3.client("s3")
|
70
97
|
|
71
98
|
|
72
|
-
def list_keys(
|
99
|
+
def list_keys(
|
100
|
+
bucket: str,
|
101
|
+
prefix: str,
|
102
|
+
endpoint: str | None = None,
|
103
|
+
) -> Iterable[str]:
|
73
104
|
s3 = boto3.client("s3", endpoint_url=endpoint) if endpoint else boto3.client("s3")
|
74
105
|
paginator = s3.get_paginator("list_objects_v2")
|
75
106
|
for page in paginator.paginate(Bucket=bucket, Prefix=prefix):
|
@@ -79,13 +110,18 @@ def list_keys(bucket: str, prefix: str, endpoint: str | None = None) -> Iterable
|
|
79
110
|
yield key
|
80
111
|
|
81
112
|
|
82
|
-
def delete_prefix(
|
113
|
+
def delete_prefix(
|
114
|
+
bucket: str,
|
115
|
+
prefix: str,
|
116
|
+
endpoint: str | None = None,
|
117
|
+
) -> None:
|
83
118
|
keys = list(list_keys(bucket, prefix, endpoint))
|
84
119
|
if not keys:
|
85
120
|
return
|
86
121
|
s3 = boto3.client("s3", endpoint_url=endpoint) if endpoint else boto3.client("s3")
|
87
122
|
# Batch delete 1000 at a time
|
88
123
|
for i in range(0, len(keys), 1000):
|
124
|
+
# Ruff style: remove spaces inside slice
|
89
125
|
batch = keys[i : i + 1000]
|
90
126
|
if not batch:
|
91
127
|
continue
|
@@ -143,7 +179,9 @@ def generate_report(paths: Paths) -> None:
|
|
143
179
|
# Validate discovered binary path before executing (Bandit B603 mitigation)
|
144
180
|
exec_path = Path(allure_path).resolve()
|
145
181
|
if not exec_path.is_file() or exec_path.name != "allure": # pragma: no cover
|
146
|
-
raise RuntimeError(
|
182
|
+
raise RuntimeError(
|
183
|
+
f"Unexpected allure exec: {exec_path}" # shorter for line length
|
184
|
+
)
|
147
185
|
# Safety: allure_path validated above; args are static & derived from
|
148
186
|
# controlled paths (no user-provided injection surface).
|
149
187
|
cmd = [
|
@@ -195,6 +233,10 @@ def upload_dir(cfg: PublishConfig, root_dir: Path, key_prefix: str) -> None:
|
|
195
233
|
extra["ContentType"] = ctype
|
196
234
|
if cfg.ttl_days is not None:
|
197
235
|
extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
|
236
|
+
if cfg.sse:
|
237
|
+
extra["ServerSideEncryption"] = cfg.sse
|
238
|
+
if cfg.sse == "aws:kms" and cfg.sse_kms_key_id:
|
239
|
+
extra["SSEKMSKeyId"] = cfg.sse_kms_key_id
|
198
240
|
s3.upload_file(str(p), cfg.bucket, key, ExtraArgs=extra)
|
199
241
|
|
200
242
|
|
@@ -231,7 +273,11 @@ def two_phase_update_latest(cfg: PublishConfig, report_dir: Path) -> None:
|
|
231
273
|
delete_prefix(cfg.bucket, tmp_prefix, getattr(cfg, "s3_endpoint", None))
|
232
274
|
|
233
275
|
|
234
|
-
def _validate_and_repair_latest(
|
276
|
+
def _validate_and_repair_latest(
|
277
|
+
cfg: PublishConfig,
|
278
|
+
report_dir: Path,
|
279
|
+
latest_prefix: str,
|
280
|
+
) -> None:
|
235
281
|
s3 = _s3(cfg)
|
236
282
|
try:
|
237
283
|
s3.head_object(Bucket=cfg.bucket, Key=f"{latest_prefix}index.html")
|
@@ -305,6 +351,10 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
|
|
305
351
|
}
|
306
352
|
if getattr(cfg, "context_url", None):
|
307
353
|
entry["context_url"] = cfg.context_url
|
354
|
+
if cfg.metadata:
|
355
|
+
for mk, mv in cfg.metadata.items():
|
356
|
+
if mk not in entry:
|
357
|
+
entry[mk] = mv
|
308
358
|
counts = _extract_summary_counts(paths.report)
|
309
359
|
if counts:
|
310
360
|
entry.update(counts)
|
@@ -380,34 +430,47 @@ def _build_runs_index_html(
|
|
380
430
|
row_cap: int = 500,
|
381
431
|
) -> bytes:
|
382
432
|
runs_list = manifest.get("runs", [])
|
383
|
-
runs_sorted = sorted(
|
433
|
+
runs_sorted = sorted(
|
434
|
+
runs_list,
|
435
|
+
key=lambda r: r.get("time", 0),
|
436
|
+
reverse=True,
|
437
|
+
)
|
438
|
+
# discover dynamic metadata keys (excluding core + *_url)
|
439
|
+
meta_keys = _discover_meta_keys(runs_sorted)
|
384
440
|
rows: list[str] = []
|
385
441
|
for rinfo in runs_sorted[:row_cap]:
|
386
442
|
rid = rinfo.get("run_id", "?")
|
387
|
-
size = rinfo.get("size") or 0
|
388
|
-
t = rinfo.get("time") or 0
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
else:
|
397
|
-
summary = f"{passed or 0}/{failed or 0}/{broken or 0}"
|
443
|
+
size = int(rinfo.get("size") or 0)
|
444
|
+
t = int(rinfo.get("time") or 0)
|
445
|
+
passed, failed, broken = (
|
446
|
+
rinfo.get("passed"),
|
447
|
+
rinfo.get("failed"),
|
448
|
+
rinfo.get("broken"),
|
449
|
+
)
|
450
|
+
has_counts = any(v is not None for v in (passed, failed, broken))
|
451
|
+
summary = f"{passed or 0}/{failed or 0}/{broken or 0}" if has_counts else "-"
|
398
452
|
ctx_url = rinfo.get("context_url")
|
399
|
-
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
f"
|
406
|
-
f"
|
453
|
+
ctx_cell = (
|
454
|
+
f"<a href='{ctx_url}' target='_blank' rel='noopener'>link</a>" if ctx_url else "-"
|
455
|
+
)
|
456
|
+
meta_cells = "".join(_format_meta_cell(rinfo.get(mk)) for mk in meta_keys)
|
457
|
+
row_html = (
|
458
|
+
"\n<tr"
|
459
|
+
f" data-passed='{passed or 0}'"
|
460
|
+
f" data-failed='{failed or 0}'"
|
461
|
+
f" data-broken='{broken or 0}'><td><code>"
|
462
|
+
f"{rid}</code></td><td>{t}</td><td>{_format_epoch_utc(t)}</td>"
|
463
|
+
f"<td title='{size}'>{_format_bytes(size)}</td><td>{summary}</td>"
|
464
|
+
f"<td>{ctx_cell}</td>{meta_cells}"
|
407
465
|
f"<td><a href='../{rid}/'>run</a></td>"
|
408
466
|
"<td><a href='../latest/'>latest</a></td></tr>"
|
409
467
|
)
|
410
|
-
|
468
|
+
rows.append(row_html)
|
469
|
+
# colspan accounts for base 8 columns + dynamic metadata count
|
470
|
+
empty_cols = 8 + len(meta_keys)
|
471
|
+
table_rows = (
|
472
|
+
"\n".join(rows) if rows else f"<tr><td colspan='{empty_cols}'>No runs yet</td></tr>"
|
473
|
+
)
|
411
474
|
title = f"Allure Runs: {cfg.project} / {cfg.branch}"
|
412
475
|
nav = (
|
413
476
|
"<nav class='quick-links'><strong>Latest:</strong> "
|
@@ -418,6 +481,7 @@ def _build_runs_index_html(
|
|
418
481
|
"<a href='trend.html'>trend-view</a>"
|
419
482
|
"</nav>"
|
420
483
|
)
|
484
|
+
meta_header = "".join(f"<th class='sortable' data-col='meta:{k}'>{k}</th>" for k in meta_keys)
|
421
485
|
parts: list[str] = [
|
422
486
|
"<!doctype html><html><head><meta charset='utf-8'>",
|
423
487
|
f"<title>{title}</title>",
|
@@ -444,16 +508,79 @@ def _build_runs_index_html(
|
|
444
508
|
"nav.quick-links a{margin-right:.65rem;}",
|
445
509
|
"</style></head><body>",
|
446
510
|
f"<h1>{title}</h1>",
|
511
|
+
(
|
512
|
+
"<div id='controls' style='margin:.5rem 0 1rem;display:flex;" # noqa: E501
|
513
|
+
"gap:1rem;flex-wrap:wrap'>" # noqa: E501
|
514
|
+
"<label style='font-size:14px'>Search: <input id='run-filter'" # noqa: E501
|
515
|
+
" type='text' placeholder='substring (id, context, meta)'" # noqa: E501
|
516
|
+
" style='padding:4px 6px;font-size:14px;border:1px solid #ccc;" # noqa: E501
|
517
|
+
"border-radius:4px'></label>" # noqa: E501
|
518
|
+
"<label style='font-size:14px'>" # noqa: E501
|
519
|
+
"<input type='checkbox' id='only-failing' style='margin-right:4px'>" # noqa: E501
|
520
|
+
"Only failing</label>" # noqa: E501
|
521
|
+
"<span id='stats' style='font-size:12px;color:#666'></span></div>" # noqa: E501
|
522
|
+
),
|
447
523
|
nav,
|
448
|
-
"<table><thead><tr>",
|
449
|
-
(
|
450
|
-
|
524
|
+
"<table id='runs-table'><thead><tr>",
|
525
|
+
(
|
526
|
+
"<th class='sortable' data-col='run_id'>Run ID</th>"
|
527
|
+
"<th class='sortable' data-col='epoch'>Epoch</th>"
|
528
|
+
"<th class='sortable' data-col='utc'>UTC Time</th>"
|
529
|
+
"<th class='sortable' data-col='size'>Size</th>"
|
530
|
+
),
|
531
|
+
(
|
532
|
+
"<th class='sortable' data-col='pfb'>P/F/B</th>"
|
533
|
+
"<th class='sortable' data-col='context'>Context</th>"
|
534
|
+
f"{meta_header}<th>Run</th><th>Latest</th></tr></thead><tbody>"
|
535
|
+
),
|
451
536
|
table_rows,
|
452
537
|
"</tbody></table>",
|
453
538
|
(
|
454
539
|
f"<footer>Updated {latest_payload.get('run_id', '?')} • "
|
455
540
|
f"{cfg.project}/{cfg.branch}</footer>"
|
456
541
|
),
|
542
|
+
(
|
543
|
+
"<script>"
|
544
|
+
"(function(){" # IIFE wrapper
|
545
|
+
"const tbl=document.getElementById('runs-table');"
|
546
|
+
"const filter=document.getElementById('run-filter');"
|
547
|
+
"const stats=document.getElementById('stats');"
|
548
|
+
"const onlyFail=document.getElementById('only-failing');"
|
549
|
+
"function updateStats(){const total=tbl.tBodies[0].rows.length;"
|
550
|
+
"const visible=[...tbl.tBodies[0].rows]" # next line filters
|
551
|
+
".filter(r=>r.style.display!=='none').length;"
|
552
|
+
"stats.textContent=visible+' / '+total+' shown';}"
|
553
|
+
"function applyFilter(){const q=filter.value.toLowerCase();"
|
554
|
+
"const onlyF=onlyFail.checked;"
|
555
|
+
"[...tbl.tBodies[0].rows].forEach(r=>{"
|
556
|
+
"const txt=r.textContent.toLowerCase();"
|
557
|
+
"const hasTxt=!q||txt.indexOf(q)>-1;"
|
558
|
+
"const failing=Number(r.getAttribute('data-failed')||'0')>0;"
|
559
|
+
"r.style.display=(hasTxt&&(!onlyF||failing))?'':'none';});"
|
560
|
+
"updateStats();}"
|
561
|
+
"filter.addEventListener('input',applyFilter);"
|
562
|
+
"onlyFail.addEventListener('change',applyFilter);"
|
563
|
+
"let sortState=null;"
|
564
|
+
"function extract(r,col){switch(col){"
|
565
|
+
"case 'epoch':return r.cells[1].textContent;"
|
566
|
+
"case 'size':return r.cells[3].getAttribute('title');"
|
567
|
+
"case 'pfb':return r.cells[4].textContent;"
|
568
|
+
"default:return r.textContent;}}"
|
569
|
+
"function sortBy(col){const tbody=tbl.tBodies[0];"
|
570
|
+
"const rows=[...tbody.rows];let dir=1;"
|
571
|
+
"if(sortState&&sortState.col===col){dir=-sortState.dir;}"
|
572
|
+
"sortState={col,dir};"
|
573
|
+
"const numeric=(col==='epoch'||col==='size');"
|
574
|
+
"rows.sort((r1,r2)=>{const a=extract(r1,col);"
|
575
|
+
"const b=extract(r2,col);if(numeric){return (("
|
576
|
+
"(Number(a)||0)-(Number(b)||0))*dir;}"
|
577
|
+
"return a.localeCompare(b)*dir;});"
|
578
|
+
"rows.forEach(r=>tbody.appendChild(r));}"
|
579
|
+
"tbl.tHead.querySelectorAll('th.sortable')" # split chain
|
580
|
+
".forEach(th=>{th.addEventListener('click',()=>sortBy(th.dataset.col));});" # noqa: E501
|
581
|
+
"updateStats();})();"
|
582
|
+
"</script>"
|
583
|
+
),
|
457
584
|
"</body></html>",
|
458
585
|
]
|
459
586
|
return "".join(parts).encode("utf-8")
|
@@ -565,7 +692,11 @@ def cleanup_old_runs(cfg: PublishConfig, keep: int) -> None:
|
|
565
692
|
# list immediate children (run prefixes)
|
566
693
|
paginator = s3.get_paginator("list_objects_v2")
|
567
694
|
run_prefixes: list[str] = []
|
568
|
-
for page in paginator.paginate(
|
695
|
+
for page in paginator.paginate(
|
696
|
+
Bucket=cfg.bucket,
|
697
|
+
Prefix=f"{root}/",
|
698
|
+
Delimiter="/",
|
699
|
+
):
|
569
700
|
for cp in page.get("CommonPrefixes", []) or []:
|
570
701
|
pfx = cp.get("Prefix")
|
571
702
|
if not pfx:
|
@@ -581,7 +712,11 @@ def cleanup_old_runs(cfg: PublishConfig, keep: int) -> None:
|
|
581
712
|
delete_prefix(cfg.bucket, old, getattr(cfg, "s3_endpoint", None))
|
582
713
|
|
583
714
|
|
584
|
-
def _ensure_directory_placeholder(
|
715
|
+
def _ensure_directory_placeholder(
|
716
|
+
cfg: PublishConfig,
|
717
|
+
index_file: Path,
|
718
|
+
dir_prefix: str,
|
719
|
+
) -> None:
|
585
720
|
if not index_file.exists() or not dir_prefix.endswith("/"):
|
586
721
|
return
|
587
722
|
body = index_file.read_bytes()
|
@@ -632,13 +767,30 @@ def preflight(
|
|
632
767
|
except OSError:
|
633
768
|
results["allure_results"] = False
|
634
769
|
|
770
|
+
region_mismatch = False
|
771
|
+
bucket_region = None
|
635
772
|
try:
|
636
773
|
s3 = _s3(cfg)
|
637
|
-
s3.head_bucket(Bucket=cfg.bucket)
|
638
|
-
|
774
|
+
head = s3.head_bucket(Bucket=cfg.bucket)
|
775
|
+
# region detection (defensive: some stubs may return None)
|
776
|
+
if head:
|
777
|
+
bucket_region = (
|
778
|
+
head.get("ResponseMetadata", {}).get("HTTPHeaders", {}).get("x-amz-bucket-region")
|
779
|
+
)
|
780
|
+
# Attempt a small list to confirm permissions
|
781
|
+
s3.list_objects_v2(
|
782
|
+
Bucket=cfg.bucket,
|
783
|
+
Prefix=cfg.s3_latest_prefix,
|
784
|
+
MaxKeys=1,
|
785
|
+
)
|
639
786
|
results["s3_bucket"] = True
|
640
|
-
except ClientError:
|
787
|
+
except ClientError as e:
|
788
|
+
code = getattr(e, "response", {}).get("Error", {}).get("Code")
|
789
|
+
if code == "301": # permanent redirect / region mismatch
|
790
|
+
region_mismatch = True
|
641
791
|
results["s3_bucket"] = False
|
792
|
+
results["bucket_region"] = bucket_region
|
793
|
+
results["region_mismatch"] = region_mismatch
|
642
794
|
return results
|
643
795
|
|
644
796
|
|
@@ -662,14 +814,30 @@ def plan_dry_run(cfg: PublishConfig, paths: Paths | None = None) -> dict:
|
|
662
814
|
else:
|
663
815
|
samples.append({"note": "Report missing; would run allure generate."})
|
664
816
|
root = branch_root(cfg.prefix, cfg.project, cfg.branch)
|
665
|
-
|
817
|
+
latest_tmp = f"{root}/latest_tmp/"
|
818
|
+
mapping = {
|
666
819
|
"bucket": cfg.bucket,
|
820
|
+
"prefix": cfg.prefix,
|
821
|
+
"project": cfg.project,
|
822
|
+
"branch": cfg.branch,
|
823
|
+
"run_id": cfg.run_id,
|
667
824
|
"run_prefix": cfg.s3_run_prefix,
|
668
|
-
|
825
|
+
# Backwards compat: historical key name pointed to temp swap area
|
826
|
+
"latest_prefix": latest_tmp,
|
827
|
+
"latest_tmp_prefix": latest_tmp,
|
828
|
+
"latest_final_prefix": cfg.s3_latest_prefix,
|
829
|
+
}
|
830
|
+
return {
|
831
|
+
**mapping,
|
669
832
|
"run_url": cfg.url_run(),
|
670
833
|
"latest_url": cfg.url_latest(),
|
671
834
|
"context_url": getattr(cfg, "context_url", None),
|
835
|
+
"metadata": cfg.metadata or {},
|
672
836
|
"samples": samples,
|
837
|
+
"encryption": {
|
838
|
+
"sse": cfg.sse,
|
839
|
+
"sse_kms_key_id": cfg.sse_kms_key_id,
|
840
|
+
},
|
673
841
|
}
|
674
842
|
|
675
843
|
|
@@ -53,6 +53,11 @@ class PublishConfig:
|
|
53
53
|
s3_endpoint: str | None = None # custom S3 endpoint (e.g. LocalStack)
|
54
54
|
# optional link to change context (e.g. Jira ticket / work item)
|
55
55
|
context_url: str | None = None
|
56
|
+
# encryption parameters (optional)
|
57
|
+
sse: str | None = None # e.g. 'AES256' or 'aws:kms'
|
58
|
+
sse_kms_key_id: str | None = None
|
59
|
+
# arbitrary metadata (jira ticket, environment, etc.)
|
60
|
+
metadata: dict | None = None
|
56
61
|
|
57
62
|
@property
|
58
63
|
def s3_run_prefix(self) -> str:
|
File without changes
|
File without changes
|
File without changes
|