pytest-allure-host 0.1.1__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,22 +1,27 @@
1
+ # flake8: noqa
1
2
  """Report publishing primitives (generate, upload, atomic latest swap).
2
-
3
- Responsible for:
4
- * Generating Allure report (pulling prior history first)
3
+ f"<script>const INIT={initial_client_rows};const BATCH={batch_size};</script>",
4
+ f"<script>{RUNS_INDEX_JS}</script>",
5
5
  * Uploading run report to S3 (run prefix) + atomic promotion to latest/
6
6
  * Writing manifest (runs/index.json) + human HTML index + trend viewer
7
7
  * Retention (max_keep_runs) + directory placeholder objects
8
+ * Extracting metadata keys from runs
8
9
 
9
10
  The trend viewer (runs/trend.html) is a small dependency‑free canvas page
10
11
  visualising passed / failed / broken counts across historical runs using
11
12
  Allure's history-trend.json.
12
13
  """
13
14
 
15
+ # ruff: noqa: E501 # Long HTML/JS lines in embedded template
16
+
14
17
  from __future__ import annotations
15
18
 
16
19
  import json
20
+ import os
17
21
  import shutil
18
22
  import subprocess # nosec B404
19
23
  from collections.abc import Iterable
24
+ from concurrent.futures import ThreadPoolExecutor, as_completed
20
25
  from dataclasses import dataclass
21
26
  from pathlib import Path
22
27
  from time import time
@@ -24,6 +29,15 @@ from time import time
24
29
  import boto3
25
30
  from botocore.exceptions import ClientError
26
31
 
32
+ from .templates import (
33
+ RUNS_INDEX_CSS_BASE,
34
+ RUNS_INDEX_CSS_ENH,
35
+ RUNS_INDEX_CSS_MISC,
36
+ RUNS_INDEX_CSS_TABLE,
37
+ RUNS_INDEX_JS,
38
+ RUNS_INDEX_JS_ENH,
39
+ RUNS_INDEX_SENTINELS,
40
+ )
27
41
  from .utils import (
28
42
  PublishConfig,
29
43
  branch_root,
@@ -34,94 +48,99 @@ from .utils import (
34
48
  )
35
49
 
36
50
  # --------------------------------------------------------------------------------------
37
- # Paths helper
38
- # --------------------------------------------------------------------------------------
39
-
40
-
41
- @dataclass
42
- class Paths:
43
- """Filesystem layout helper.
44
-
45
- Backwards compatibility: tests (and prior API) may pass explicit
46
- 'report=' and 'results=' paths. If omitted we derive them from base.
47
- """
48
-
49
- base: Path = Path(".")
50
- report: Path | None = None
51
- results: Path | None = None
52
-
53
- def __post_init__(self) -> None: # derive defaults if not provided
54
- if self.results is None:
55
- self.results = self.base / "allure-results"
56
- if self.report is None:
57
- self.report = self.base / "allure-report"
58
-
59
-
60
- # --------------------------------------------------------------------------------------
61
- # S3 helpers
51
+ # S3 client + listing/deletion helpers (restored after refactor)
62
52
  # --------------------------------------------------------------------------------------
63
53
 
64
54
 
65
- def _s3(cfg: PublishConfig): # allow custom endpoint (tests / local)
66
- endpoint = getattr(cfg, "s3_endpoint", None)
67
- if endpoint:
68
- return boto3.client("s3", endpoint_url=endpoint)
55
+ def _s3(cfg: PublishConfig): # noqa: D401 - tiny wrapper
56
+ """Return a boto3 S3 client honoring optional endpoint override."""
57
+ if getattr(cfg, "s3_endpoint", None): # custom / LocalStack style
58
+ return boto3.client("s3", endpoint_url=cfg.s3_endpoint)
69
59
  return boto3.client("s3")
70
60
 
71
61
 
72
- def list_keys(bucket: str, prefix: str, endpoint: str | None = None) -> Iterable[str]:
62
+ def list_keys(bucket: str, prefix: str, endpoint: str | None = None) -> list[str]:
63
+ """List object keys under a prefix (non-recursive)."""
73
64
  s3 = boto3.client("s3", endpoint_url=endpoint) if endpoint else boto3.client("s3")
65
+ keys: list[str] = []
74
66
  paginator = s3.get_paginator("list_objects_v2")
75
67
  for page in paginator.paginate(Bucket=bucket, Prefix=prefix):
76
68
  for obj in page.get("Contents", []) or []:
77
- key = obj.get("Key")
78
- if key:
79
- yield key
69
+ k = obj.get("Key")
70
+ if k:
71
+ keys.append(k)
72
+ return keys
80
73
 
81
74
 
82
75
  def delete_prefix(bucket: str, prefix: str, endpoint: str | None = None) -> None:
83
- keys = list(list_keys(bucket, prefix, endpoint))
84
- if not keys:
76
+ """Delete all objects beneath prefix (best-effort)."""
77
+ ks = list_keys(bucket, prefix, endpoint)
78
+ if not ks:
85
79
  return
86
80
  s3 = boto3.client("s3", endpoint_url=endpoint) if endpoint else boto3.client("s3")
87
- # Batch delete 1000 at a time
88
- for i in range(0, len(keys), 1000):
89
- batch = keys[i : i + 1000]
90
- if not batch:
91
- continue
92
- s3.delete_objects(
93
- Bucket=bucket,
94
- Delete={"Objects": [{"Key": k} for k in batch], "Quiet": True},
95
- )
81
+ # Batch in chunks of 1000 (S3 limit)
82
+ for i in range(0, len(ks), 1000):
83
+ chunk = ks[i : i + 1000]
84
+ try: # pragma: no cover - error path
85
+ s3.delete_objects(
86
+ Bucket=bucket,
87
+ Delete={"Objects": [{"Key": k} for k in chunk], "Quiet": True},
88
+ )
89
+ except Exception as e: # pragma: no cover
90
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
91
+ print(f"[publish] delete_prefix warning: {e}")
92
+
93
+
94
+ def pull_history(cfg: PublishConfig, paths: "Paths") -> None:
95
+ """Best-effort download of previous run history to seed trend graphs.
96
+
97
+ Copies objects from latest/history/ into local allure-results/history/ so the
98
+ newly generated report preserves cumulative trend data. Silent on failure.
99
+ """
100
+ try:
101
+ hist_prefix = f"{cfg.s3_latest_prefix}history/"
102
+ keys = list_keys(cfg.bucket, hist_prefix, getattr(cfg, "s3_endpoint", None))
103
+ if not keys:
104
+ return
105
+ target_dir = paths.results / "history"
106
+ target_dir.mkdir(parents=True, exist_ok=True)
107
+ s3 = _s3(cfg)
108
+ for k in keys:
109
+ rel = k[len(hist_prefix) :]
110
+ if not rel or rel.endswith("/"):
111
+ continue
112
+ dest = target_dir / rel
113
+ dest.parent.mkdir(parents=True, exist_ok=True)
114
+ try:
115
+ body = s3.get_object(Bucket=cfg.bucket, Key=k)["Body"].read()
116
+ dest.write_bytes(body)
117
+ except Exception: # pragma: no cover - individual object failure
118
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
119
+ print(f"[publish] history object fetch failed: {k}")
120
+ except Exception: # pragma: no cover - overall failure
121
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
122
+ print("[publish] history pull skipped (error)")
96
123
 
97
124
 
98
125
  # --------------------------------------------------------------------------------------
99
- # Report generation & history preservation
126
+ # Paths helper (restored after refactor)
100
127
  # --------------------------------------------------------------------------------------
101
128
 
102
129
 
103
- def pull_history(cfg: PublishConfig, paths: Paths) -> None:
104
- """Download previous latest/history/ to seed new history for trends."""
105
- s3 = _s3(cfg)
106
- root = branch_root(cfg.prefix, cfg.project, cfg.branch)
107
- history_prefix = f"{root}/latest/history/"
108
- local_history = paths.results / "history"
109
- if local_history.exists():
110
- shutil.rmtree(local_history)
111
- local_history.mkdir(parents=True, exist_ok=True)
130
+ @dataclass
131
+ class Paths:
132
+ base: Path = Path(".")
133
+ report: Path | None = None
134
+ results: Path | None = None
112
135
 
113
- # List objects and download those under history/
114
- try:
115
- for key in list_keys(cfg.bucket, history_prefix):
116
- rel = key[len(history_prefix) :]
117
- if not rel: # skip directory placeholder
118
- continue
119
- dest = local_history / rel
120
- dest.parent.mkdir(parents=True, exist_ok=True)
121
- s3.download_file(cfg.bucket, key, str(dest))
122
- except ClientError:
123
- # best‑effort; history absence is fine
124
- pass
136
+ def __post_init__(self) -> None:
137
+ if self.results is None:
138
+ self.results = self.base / "allure-results"
139
+ if self.report is None:
140
+ self.report = self.base / "allure-report"
141
+
142
+
143
+ ## (Merged) Removed duplicate legacy helper definitions from HEAD during conflict resolution.
125
144
 
126
145
 
127
146
  def ensure_allure_cli() -> None:
@@ -142,10 +161,14 @@ def generate_report(paths: Paths) -> None:
142
161
  raise RuntimeError("Allure CLI unexpectedly missing")
143
162
  # Validate discovered binary path before executing (Bandit B603 mitigation)
144
163
  exec_path = Path(allure_path).resolve()
145
- if not exec_path.is_file() or exec_path.name != "allure": # pragma: no cover
146
- raise RuntimeError(f"Unexpected allure executable: {exec_path}")
164
+ # pragma: no cover - simple path existence check
165
+ if not exec_path.is_file() or exec_path.name != "allure":
166
+ raise RuntimeError(
167
+ f"Unexpected allure exec: {exec_path}" # shorter for line length
168
+ )
147
169
  # Safety: allure_path validated above; args are static & derived from
148
170
  # controlled paths (no user-provided injection surface).
171
+ # Correct Allure invocation: allure generate <results> --clean -o <report>
149
172
  cmd = [
150
173
  allure_path,
151
174
  "generate",
@@ -182,20 +205,195 @@ def generate_report(paths: Paths) -> None:
182
205
  # --------------------------------------------------------------------------------------
183
206
 
184
207
 
208
+ def _iter_files(root_dir: Path):
209
+ for p in root_dir.rglob("*"):
210
+ if p.is_file():
211
+ yield p
212
+
213
+
214
+ def _extra_args_for_file(cfg: PublishConfig, key: str, path: Path) -> dict[str, str]:
215
+ extra: dict[str, str] = {"CacheControl": cache_control_for_key(key)}
216
+ ctype = guess_content_type(path)
217
+ if ctype:
218
+ extra["ContentType"] = ctype
219
+ if cfg.ttl_days is not None:
220
+ extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
221
+ if cfg.sse:
222
+ extra["ServerSideEncryption"] = cfg.sse
223
+ if cfg.sse == "aws:kms" and cfg.sse_kms_key_id:
224
+ extra["SSEKMSKeyId"] = cfg.sse_kms_key_id
225
+ return extra
226
+
227
+
228
+ def _auto_workers(requested: int | None, total: int, kind: str) -> int:
229
+ if total <= 1:
230
+ return 1
231
+ if requested is not None:
232
+ return max(1, min(requested, total))
233
+ # Heuristic: small sets benefit up to 8, larger sets cap at 32
234
+ if total < 50:
235
+ return min(8, total)
236
+ if total < 500:
237
+ return min(16, total)
238
+ return min(32, total)
239
+
240
+
185
241
  def upload_dir(cfg: PublishConfig, root_dir: Path, key_prefix: str) -> None:
186
242
  s3 = _s3(cfg)
187
- for p in root_dir.rglob("*"):
188
- if not p.is_file():
189
- continue
190
- rel = p.relative_to(root_dir).as_posix()
243
+ files = list(_iter_files(root_dir))
244
+ total = len(files)
245
+ workers = _auto_workers(getattr(cfg, "upload_workers", None), total, "upload")
246
+ print(
247
+ f"[publish] Uploading report to s3://{cfg.bucket}/{key_prefix} "
248
+ f"({total} files) with {workers} worker(s)..."
249
+ )
250
+ if workers <= 1:
251
+ # Sequential fallback
252
+ uploaded = 0
253
+ last_decile = -1
254
+ for f in files:
255
+ rel = f.relative_to(root_dir).as_posix()
256
+ key = f"{key_prefix}{rel}"
257
+ extra = _extra_args_for_file(cfg, key, f)
258
+ s3.upload_file(str(f), cfg.bucket, key, ExtraArgs=extra)
259
+ uploaded += 1
260
+ if total:
261
+ pct = int((uploaded / total) * 100)
262
+ dec = pct // 10
263
+ if dec != last_decile or uploaded == total:
264
+ print(f"[publish] Uploaded {uploaded}/{total} ({pct}%)")
265
+ last_decile = dec
266
+ print("[publish] Upload complete.")
267
+ return
268
+
269
+ lock = None
270
+ try:
271
+ from threading import Lock
272
+
273
+ lock = Lock()
274
+ except Exception as e: # pragma: no cover - fallback
275
+ print(f"[publish] Warning: threading.Lock unavailable ({e}); continuing without lock")
276
+ progress = {"uploaded": 0, "last_decile": -1}
277
+
278
+ def task(f: Path):
279
+ rel = f.relative_to(root_dir).as_posix()
191
280
  key = f"{key_prefix}{rel}"
192
- extra: dict[str, str] = {"CacheControl": cache_control_for_key(key)}
193
- ctype = guess_content_type(p)
194
- if ctype:
195
- extra["ContentType"] = ctype
196
- if cfg.ttl_days is not None:
197
- extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
198
- s3.upload_file(str(p), cfg.bucket, key, ExtraArgs=extra)
281
+ extra = _extra_args_for_file(cfg, key, f)
282
+ s3.upload_file(str(f), cfg.bucket, key, ExtraArgs=extra)
283
+ if lock:
284
+ with lock:
285
+ progress["uploaded"] += 1
286
+ uploaded = progress["uploaded"]
287
+ pct = int((uploaded / total) * 100)
288
+ dec = pct // 10
289
+ if dec != progress["last_decile"] or uploaded == total:
290
+ print(f"[publish] Uploaded {uploaded}/{total} ({pct}%)")
291
+ progress["last_decile"] = dec
292
+
293
+ with ThreadPoolExecutor(max_workers=workers) as ex:
294
+ futures = [ex.submit(task, f) for f in files]
295
+ # Consume to surface exceptions early
296
+ for fut in as_completed(futures):
297
+ fut.result()
298
+ print("[publish] Upload complete.")
299
+
300
+
301
+ def _collect_copy_keys(cfg: PublishConfig, src_prefix: str) -> list[str]:
302
+ return [
303
+ k
304
+ for k in list_keys(cfg.bucket, src_prefix, getattr(cfg, "s3_endpoint", None))
305
+ if k != src_prefix
306
+ ]
307
+
308
+
309
+ def _copy_object(s3, bucket: str, key: str, dest_key: str) -> None:
310
+ s3.copy({"Bucket": bucket, "Key": key}, bucket, dest_key)
311
+
312
+
313
+ def _log_progress(label: str, copied: int, total: int, last_dec: int) -> int:
314
+ if not total:
315
+ return last_dec
316
+ pct = int((copied / total) * 100)
317
+ dec = pct // 10
318
+ if dec != last_dec or copied == total:
319
+ print(f"[publish] {label}: {copied}/{total} ({pct}%)")
320
+ return dec
321
+ return last_dec
322
+
323
+
324
+ def _copy_sequential(
325
+ s3, cfg: PublishConfig, keys: list[str], src_prefix: str, dest_prefix: str, label: str
326
+ ) -> None:
327
+ total = len(keys)
328
+ copied = 0
329
+ last_dec = -1
330
+ for key in keys:
331
+ rel = key[len(src_prefix) :]
332
+ if not rel:
333
+ continue
334
+ dest_key = f"{dest_prefix}{rel}"
335
+ _copy_object(s3, cfg.bucket, key, dest_key)
336
+ copied += 1
337
+ last_dec = _log_progress(label, copied, total, last_dec)
338
+ print(f"[publish] {label}: copy complete.")
339
+
340
+
341
+ def _copy_parallel(
342
+ s3,
343
+ cfg: PublishConfig,
344
+ keys: list[str],
345
+ src_prefix: str,
346
+ dest_prefix: str,
347
+ label: str,
348
+ workers: int,
349
+ ) -> None:
350
+ from threading import Lock
351
+
352
+ total = len(keys)
353
+ lock = Lock()
354
+ progress = {"copied": 0, "last_dec": -1}
355
+
356
+ def task(key: str):
357
+ rel = key[len(src_prefix) :]
358
+ if not rel:
359
+ return
360
+ dest_key = f"{dest_prefix}{rel}"
361
+ _copy_object(s3, cfg.bucket, key, dest_key)
362
+ with lock:
363
+ progress["copied"] += 1
364
+ progress["last_dec"] = _log_progress(
365
+ label, progress["copied"], total, progress["last_dec"]
366
+ )
367
+
368
+ with ThreadPoolExecutor(max_workers=workers) as ex:
369
+ futures = [ex.submit(task, k) for k in keys]
370
+ for fut in as_completed(futures):
371
+ fut.result()
372
+ print(f"[publish] {label}: copy complete.")
373
+
374
+
375
+ def copy_prefix(
376
+ cfg: PublishConfig,
377
+ src_prefix: str,
378
+ dest_prefix: str,
379
+ label: str = "copy",
380
+ ) -> None:
381
+ """Server-side copy all objects (parallel if workers>1)."""
382
+ s3 = _s3(cfg)
383
+ keys = _collect_copy_keys(cfg, src_prefix)
384
+ total = len(keys)
385
+ workers = _auto_workers(getattr(cfg, "copy_workers", None), total, "copy")
386
+ print(
387
+ f"[publish] {label}: copying {total} objects {src_prefix} → {dest_prefix} with {workers} worker(s)"
388
+ )
389
+ if workers <= 1:
390
+ _copy_sequential(s3, cfg, keys, src_prefix, dest_prefix, label)
391
+ else:
392
+ try:
393
+ _copy_parallel(s3, cfg, keys, src_prefix, dest_prefix, label, workers)
394
+ except Exception as e: # pragma: no cover
395
+ print(f"[publish] {label}: parallel copy failed ({e}); falling back to sequential")
396
+ _copy_sequential(s3, cfg, keys, src_prefix, dest_prefix, label)
199
397
 
200
398
 
201
399
  # --------------------------------------------------------------------------------------
@@ -208,30 +406,48 @@ def two_phase_update_latest(cfg: PublishConfig, report_dir: Path) -> None:
208
406
  tmp_prefix = f"{root}/latest_tmp/"
209
407
  latest_prefix = f"{root}/latest/"
210
408
 
211
- # 1. Upload to tmp
212
- upload_dir(cfg, report_dir, tmp_prefix)
409
+ # 1. Server-side copy run prefix → tmp (faster than re-uploading all files)
410
+ print("[publish] [2-phase 1/6] Copying run objects to tmp (server-side)...")
411
+ t_phase = time()
412
+ copy_prefix(cfg, cfg.s3_run_prefix, tmp_prefix, label="latest tmp")
413
+ print(f"[publish] phase 1 duration: {time() - t_phase:.2f}s")
213
414
  # 2. Remove existing latest
415
+ print("[publish] [2-phase 2/6] Removing existing latest prefix (if any)...")
416
+ t_phase = time()
214
417
  delete_prefix(cfg.bucket, latest_prefix, getattr(cfg, "s3_endpoint", None))
418
+ print(f"[publish] phase 2 duration: {time() - t_phase:.2f}s")
215
419
  # 3. Copy tmp → latest
216
- s3 = _s3(cfg)
217
- for key in list_keys(
218
- cfg.bucket,
219
- tmp_prefix,
220
- getattr(cfg, "s3_endpoint", None),
221
- ):
222
- rel = key[len(tmp_prefix) :]
223
- dest_key = f"{latest_prefix}{rel}"
224
- s3.copy({"Bucket": cfg.bucket, "Key": key}, cfg.bucket, dest_key)
420
+ print("[publish] [2-phase 3/6] Promoting tmp objects to latest prefix...")
421
+ t_phase = time()
422
+ copy_prefix(cfg, tmp_prefix, latest_prefix, label="latest promote")
423
+ print(f"[publish] phase 3 duration: {time() - t_phase:.2f}s")
225
424
  # 4. Validate & repair index if missing
425
+ print("[publish] [2-phase 4/6] Validating latest index.html...")
426
+ t_phase = time()
226
427
  _validate_and_repair_latest(cfg, report_dir, latest_prefix)
428
+ print(f"[publish] phase 4 duration: {time() - t_phase:.2f}s")
227
429
  # 5. Write readiness marker + directory placeholder
430
+ print("[publish] [2-phase 5/6] Writing readiness marker & placeholder...")
431
+ t_phase = time()
228
432
  _write_latest_marker(cfg, latest_prefix)
229
- _ensure_directory_placeholder(cfg, report_dir / "index.html", latest_prefix)
433
+ _ensure_directory_placeholder(
434
+ cfg,
435
+ report_dir / "index.html",
436
+ latest_prefix,
437
+ )
438
+ print(f"[publish] phase 5 duration: {time() - t_phase:.2f}s")
230
439
  # 6. Delete tmp
440
+ print("[publish] [2-phase 6/6] Cleaning up tmp staging prefix...")
441
+ t_phase = time()
231
442
  delete_prefix(cfg.bucket, tmp_prefix, getattr(cfg, "s3_endpoint", None))
443
+ print(f"[publish] phase 6 duration: {time() - t_phase:.2f}s")
232
444
 
233
445
 
234
- def _validate_and_repair_latest(cfg: PublishConfig, report_dir: Path, latest_prefix: str) -> None:
446
+ def _validate_and_repair_latest(
447
+ cfg: PublishConfig,
448
+ report_dir: Path,
449
+ latest_prefix: str,
450
+ ) -> None:
235
451
  s3 = _s3(cfg)
236
452
  try:
237
453
  s3.head_object(Bucket=cfg.bucket, Key=f"{latest_prefix}index.html")
@@ -285,39 +501,70 @@ def _extract_summary_counts(report_dir: Path) -> dict | None:
285
501
 
286
502
 
287
503
  def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
504
+ """Create or update manifest + related HTML assets.
505
+
506
+ High level steps (delegated to helpers to keep complexity low):
507
+ 1. Load existing manifest JSON (if any)
508
+ 2. Build new run entry (size, files, counts, metadata)
509
+ 3. Merge + store manifest & latest.json
510
+ 4. Render runs index + trend viewer
511
+ 5. Update project-level aggregations (branches + cross-branch runs)
512
+ """
288
513
  s3 = _s3(cfg)
289
514
  root = branch_root(cfg.prefix, cfg.project, cfg.branch)
290
515
  manifest_key = f"{root}/runs/index.json"
516
+ print("[publish] Writing / updating manifest and index assets...")
291
517
 
292
- existing = None
518
+ existing = _load_json(s3, cfg.bucket, manifest_key)
519
+ entry = _build_manifest_entry(cfg, paths)
520
+ manifest = merge_manifest(existing, entry)
521
+ _put_manifest(s3, cfg.bucket, manifest_key, manifest)
522
+ latest_payload = _write_latest_json(s3, cfg, root)
523
+ _write_run_indexes(s3, cfg, root, manifest, latest_payload)
524
+ _update_aggregations(s3, cfg, manifest)
525
+
526
+
527
+ def _load_json(s3, bucket: str, key: str) -> dict | None: # noqa: D401 - internal
293
528
  try:
294
- body = s3.get_object(Bucket=cfg.bucket, Key=manifest_key)["Body"].read()
295
- existing = json.loads(body)
529
+ body = s3.get_object(Bucket=bucket, Key=key)["Body"].read()
530
+ data = json.loads(body)
531
+ return data if isinstance(data, dict) else None
296
532
  except Exception:
297
- existing = None
533
+ return None
298
534
 
535
+
536
+ def _build_manifest_entry(cfg: PublishConfig, paths: Paths) -> dict:
299
537
  entry = {
300
538
  "run_id": cfg.run_id,
301
539
  "time": int(time()),
302
540
  "size": compute_dir_size(paths.report),
541
+ "files": sum(1 for _ in paths.report.rglob("*") if _.is_file()),
303
542
  "project": cfg.project,
304
543
  "branch": cfg.branch,
305
544
  }
306
545
  if getattr(cfg, "context_url", None):
307
546
  entry["context_url"] = cfg.context_url
547
+ if cfg.metadata:
548
+ for mk, mv in cfg.metadata.items():
549
+ entry.setdefault(mk, mv)
308
550
  counts = _extract_summary_counts(paths.report)
309
551
  if counts:
310
552
  entry.update(counts)
311
- manifest = merge_manifest(existing, entry)
553
+ return entry
554
+
555
+
556
+ def _put_manifest(s3, bucket: str, key: str, manifest: dict) -> None:
312
557
  s3.put_object(
313
- Bucket=cfg.bucket,
314
- Key=manifest_key,
558
+ Bucket=bucket,
559
+ Key=key,
315
560
  Body=json.dumps(manifest, indent=2).encode("utf-8"),
316
561
  ContentType="application/json",
317
562
  CacheControl="no-cache",
318
563
  )
319
564
 
320
- latest_payload = {
565
+
566
+ def _write_latest_json(s3, cfg: PublishConfig, root: str) -> dict:
567
+ payload = {
321
568
  "run_id": cfg.run_id,
322
569
  "run_url": cfg.url_run(),
323
570
  "latest_url": cfg.url_latest(),
@@ -327,12 +574,20 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
327
574
  s3.put_object(
328
575
  Bucket=cfg.bucket,
329
576
  Key=f"{root}/latest.json",
330
- Body=json.dumps(latest_payload, indent=2).encode("utf-8"),
577
+ Body=json.dumps(payload, indent=2).encode("utf-8"),
331
578
  ContentType="application/json",
332
579
  CacheControl="no-cache",
333
580
  )
581
+ return payload
582
+
334
583
 
335
- # runs/index.html
584
+ def _write_run_indexes(
585
+ s3,
586
+ cfg: PublishConfig,
587
+ root: str,
588
+ manifest: dict,
589
+ latest_payload: dict,
590
+ ) -> None:
336
591
  index_html = _build_runs_index_html(manifest, latest_payload, cfg)
337
592
  s3.put_object(
338
593
  Bucket=cfg.bucket,
@@ -341,8 +596,6 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
341
596
  ContentType="text/html; charset=utf-8",
342
597
  CacheControl="no-cache",
343
598
  )
344
-
345
- # runs/trend.html
346
599
  trend_html = _build_trend_viewer_html(cfg)
347
600
  s3.put_object(
348
601
  Bucket=cfg.bucket,
@@ -351,13 +604,143 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
351
604
  ContentType="text/html; charset=utf-8",
352
605
  CacheControl="no-cache",
353
606
  )
607
+ history_html = _build_history_insights_html(cfg)
608
+ s3.put_object(
609
+ Bucket=cfg.bucket,
610
+ Key=f"{root}/runs/history.html",
611
+ Body=history_html,
612
+ ContentType="text/html; charset=utf-8",
613
+ CacheControl="no-cache",
614
+ )
615
+
616
+
617
+ def _update_aggregations(s3, cfg: PublishConfig, manifest: dict) -> None: # pragma: no cover
618
+ try:
619
+ project_root = f"{cfg.prefix}/{cfg.project}"
620
+ _update_branches_dashboard(s3, cfg, manifest, project_root)
621
+ _update_aggregated_runs(s3, cfg, manifest, project_root)
622
+ except Exception as e: # keep non-fatal
623
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
624
+ print(f"[publish] aggregation skipped: {e}")
625
+
626
+
627
+ def _update_branches_dashboard(s3, cfg: PublishConfig, manifest: dict, project_root: str) -> None:
628
+ branches_key = f"{project_root}/branches/index.json"
629
+ branches_payload = _load_json(s3, cfg.bucket, branches_key) or {}
630
+ if "branches" not in branches_payload:
631
+ branches_payload = {"schema": 1, "project": cfg.project, "branches": []}
632
+ runs_sorted = sorted(manifest.get("runs", []), key=lambda r: r.get("time", 0), reverse=True)
633
+ latest_run = runs_sorted[0] if runs_sorted else {}
634
+ summary_entry = {
635
+ "branch": cfg.branch,
636
+ "latest_run_id": latest_run.get("run_id"),
637
+ "time": latest_run.get("time"),
638
+ "passed": latest_run.get("passed"),
639
+ "failed": latest_run.get("failed"),
640
+ "broken": latest_run.get("broken"),
641
+ "total_runs": len(runs_sorted),
642
+ "latest_url": f"./{cfg.branch}/latest/",
643
+ "runs_url": f"./{cfg.branch}/runs/",
644
+ "trend_url": f"./{cfg.branch}/runs/trend.html",
645
+ }
646
+ summary_entry = {k: v for k, v in summary_entry.items() if v is not None}
647
+ replaced = False
648
+ for i, br in enumerate(branches_payload.get("branches", [])):
649
+ if br.get("branch") == cfg.branch:
650
+ branches_payload["branches"][i] = summary_entry
651
+ replaced = True
652
+ break
653
+ if not replaced:
654
+ branches_payload["branches"].append(summary_entry)
655
+ branches_payload["branches"].sort(key=lambda b: b.get("time") or 0, reverse=True)
656
+ branches_payload["updated"] = int(time())
657
+ s3.put_object(
658
+ Bucket=cfg.bucket,
659
+ Key=branches_key,
660
+ Body=json.dumps(branches_payload, indent=2).encode("utf-8"),
661
+ ContentType="application/json",
662
+ CacheControl="no-cache",
663
+ )
664
+ dash_html = _build_branches_dashboard_html(branches_payload, cfg)
665
+ s3.put_object(
666
+ Bucket=cfg.bucket,
667
+ Key=f"{project_root}/index.html",
668
+ Body=dash_html,
669
+ ContentType="text/html; charset=utf-8",
670
+ CacheControl="no-cache",
671
+ )
672
+
673
+
674
+ def _update_aggregated_runs(s3, cfg: PublishConfig, manifest: dict, project_root: str) -> None:
675
+ agg_key = f"{project_root}/runs/all/index.json"
676
+ agg_payload = _load_json(s3, cfg.bucket, agg_key) or {}
677
+ agg_payload.setdefault("schema", 2)
678
+ agg_payload.setdefault("project", cfg.project)
679
+ agg_payload.setdefault("runs", [])
680
+ runs_sorted = sorted(manifest.get("runs", []), key=lambda r: r.get("time", 0), reverse=True)
681
+ latest_run = runs_sorted[0] if runs_sorted else {}
682
+ if latest_run:
683
+ agg_payload["runs"].append(
684
+ {
685
+ "branch": cfg.branch,
686
+ **{
687
+ k: latest_run.get(k)
688
+ for k in (
689
+ "run_id",
690
+ "time",
691
+ "size",
692
+ "passed",
693
+ "failed",
694
+ "broken",
695
+ "commit",
696
+ )
697
+ if latest_run.get(k) is not None
698
+ },
699
+ }
700
+ )
701
+ # de-duplicate branch/run_id pairs keeping latest time
702
+ dedup: dict[tuple[str, str], dict] = {}
703
+ for r in agg_payload["runs"]:
704
+ b = r.get("branch")
705
+ rid = r.get("run_id")
706
+ if not b or not rid:
707
+ continue
708
+ key2 = (b, rid)
709
+ prev = dedup.get(key2)
710
+ if not prev or (r.get("time") or 0) > (prev.get("time") or 0):
711
+ dedup[key2] = r
712
+ agg_runs = list(dedup.values())
713
+ agg_runs.sort(key=lambda r: r.get("time", 0), reverse=True)
714
+ cap = getattr(cfg, "aggregate_run_cap", 600)
715
+ if len(agg_runs) > cap:
716
+ agg_runs = agg_runs[:cap]
717
+ agg_payload["runs"] = agg_runs
718
+ agg_payload["updated"] = int(time())
719
+ s3.put_object(
720
+ Bucket=cfg.bucket,
721
+ Key=agg_key,
722
+ Body=json.dumps(agg_payload, indent=2).encode("utf-8"),
723
+ ContentType="application/json",
724
+ CacheControl="no-cache",
725
+ )
726
+ agg_html = _build_aggregated_runs_html(agg_payload, cfg)
727
+ s3.put_object(
728
+ Bucket=cfg.bucket,
729
+ Key=f"{project_root}/runs/all/index.html",
730
+ Body=agg_html,
731
+ ContentType="text/html; charset=utf-8",
732
+ CacheControl="no-cache",
733
+ )
354
734
 
355
735
 
356
736
  def _format_epoch_utc(epoch: int) -> str:
357
737
  from datetime import datetime, timezone
358
738
 
359
739
  try:
360
- return datetime.fromtimestamp(epoch, tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
740
+ return datetime.fromtimestamp(
741
+ epoch,
742
+ tz=timezone.utc,
743
+ ).strftime("%Y-%m-%d %H:%M:%S")
361
744
  except Exception: # pragma: no cover - defensive
362
745
  return "-"
363
746
 
@@ -373,6 +756,40 @@ def _format_bytes(n: int) -> str:
373
756
  return f"{v:.1f}PB"
374
757
 
375
758
 
759
+ def _discover_meta_keys(runs: list[dict]) -> list[str]:
760
+ """Return sorted list of dynamic metadata keys present across runs.
761
+
762
+ Excludes core known columns and any *_url helper keys to avoid duplicating
763
+ context links. This mirrors earlier logic (restored after refactor).
764
+ """
765
+ core_cols = {
766
+ "run_id",
767
+ "time",
768
+ "size",
769
+ "files",
770
+ "passed",
771
+ "failed",
772
+ "broken",
773
+ "context_url",
774
+ }
775
+ keys: list[str] = []
776
+ for r in runs:
777
+ for k in r.keys():
778
+ if k in core_cols or k.endswith("_url"):
779
+ continue
780
+ if k not in keys:
781
+ keys.append(k)
782
+ keys.sort()
783
+ return keys
784
+
785
+
786
+ def _format_meta_cell(val) -> str:
787
+ if val is None:
788
+ return "<td>-</td>"
789
+ esc = str(val).replace("<", "&lt;").replace(">", "&gt;")
790
+ return f"<td>{esc}</td>"
791
+
792
+
376
793
  def _build_runs_index_html(
377
794
  manifest: dict,
378
795
  latest_payload: dict,
@@ -380,85 +797,544 @@ def _build_runs_index_html(
380
797
  row_cap: int = 500,
381
798
  ) -> bytes:
382
799
  runs_list = manifest.get("runs", [])
383
- runs_sorted = sorted(runs_list, key=lambda r: r.get("time", 0), reverse=True)
800
+ runs_sorted = sorted(
801
+ runs_list,
802
+ key=lambda r: r.get("time", 0),
803
+ reverse=True,
804
+ )
805
+ # Progressive reveal parameters (also echoed into JS); keep <= row_cap.
806
+ initial_client_rows = 300
807
+ batch_size = 300
808
+ # discover dynamic metadata keys (excluding core + *_url)
809
+ meta_keys = _discover_meta_keys(runs_sorted)
810
+ # Derive a small set of tag keys (first 3 metadata keys) for inline summary
811
+ tag_keys = meta_keys[:3]
384
812
  rows: list[str] = []
385
- for rinfo in runs_sorted[:row_cap]:
813
+ for idx, rinfo in enumerate(runs_sorted[:row_cap]):
386
814
  rid = rinfo.get("run_id", "?")
387
- size = rinfo.get("size") or 0
388
- t = rinfo.get("time") or 0
389
- human_time = _format_epoch_utc(t)
390
- pretty_size = _format_bytes(size)
815
+ size = int(rinfo.get("size") or 0)
816
+ files_cnt = int(rinfo.get("files") or 0)
817
+ t = int(rinfo.get("time") or 0)
391
818
  passed = rinfo.get("passed")
392
819
  failed = rinfo.get("failed")
393
820
  broken = rinfo.get("broken")
394
- if passed is None and failed is None and broken is None:
395
- summary = "-"
396
- else:
397
- summary = f"{passed or 0}/{failed or 0}/{broken or 0}"
821
+ has_counts = any(v is not None for v in (passed, failed, broken))
822
+ pct_pass = None
823
+ if has_counts and (passed or 0) + (failed or 0) + (broken or 0) > 0:
824
+ pct_pass = (
825
+ f"{((passed or 0) / ((passed or 0) + (failed or 0) + (broken or 0)) * 100):.1f}%"
826
+ )
827
+ # ISO timestamps (duplicate for start/end until distinct available)
828
+ from datetime import datetime, timezone
829
+
830
+ iso_ts = (
831
+ datetime.fromtimestamp(t, tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") if t else ""
832
+ )
833
+ start_iso = iso_ts
834
+ end_iso = iso_ts
398
835
  ctx_url = rinfo.get("context_url")
399
- if ctx_url:
400
- ctx_cell = f"<a href='{ctx_url}' target='_blank' rel='noopener'>link</a>"
836
+ ctx_cell = (
837
+ f"<a href='{ctx_url}' target='_blank' rel='noopener'>link</a>" if ctx_url else "-"
838
+ )
839
+ # Metadata cells (excluding tags already filtered from meta_keys)
840
+ meta_cells = "".join(_format_meta_cell(rinfo.get(mk)) for mk in meta_keys)
841
+ # Tags list & search blob assembly (refactored version)
842
+ # Tags list
843
+ explicit_tags = rinfo.get("tags") if isinstance(rinfo.get("tags"), (list, tuple)) else None
844
+ if explicit_tags:
845
+ tag_vals = [str(t) for t in explicit_tags if t is not None and str(t) != ""]
401
846
  else:
402
- ctx_cell = "-"
403
- rows.append(
404
- f"<tr><td><code>{rid}</code></td><td>{t}</td>"
405
- f"<td>{human_time}</td><td title='{size}'>{pretty_size}</td>"
406
- f"<td>{summary}</td><td>{ctx_cell}</td>"
407
- f"<td><a href='../{rid}/'>run</a></td>"
408
- "<td><a href='../latest/'>latest</a></td></tr>"
847
+ tag_vals = [
848
+ str(rinfo.get(k))
849
+ for k in tag_keys
850
+ if rinfo.get(k) is not None and str(rinfo.get(k)) != ""
851
+ ]
852
+ # Search blob (include metadata values excluding tags array representation noise)
853
+ search_parts: list[str] = [str(rid)]
854
+ if ctx_url:
855
+ search_parts.append(str(ctx_url))
856
+ for mk in meta_keys:
857
+ mv = rinfo.get(mk)
858
+ if mv is not None:
859
+ search_parts.append(str(mv))
860
+ search_blob = " ".join(search_parts).lower().replace("'", "&#39;")
861
+ passpct_numeric = pct_pass.rstrip("%") if pct_pass else None
862
+ row_tags_json = json.dumps(tag_vals)
863
+ hidden_cls = " pr-hidden" if idx >= initial_client_rows else ""
864
+ row_html = (
865
+ "<tr"
866
+ + (f" class='pr-hidden'" if idx >= initial_client_rows else "")
867
+ + " data-v='1'"
868
+ + f" data-run-id='{rid}'"
869
+ + f" data-branch='{(rinfo.get('branch') or cfg.branch)}'"
870
+ + f" data-project='{cfg.project}'"
871
+ + f" data-tags='{row_tags_json}'"
872
+ + f" data-p='{passed or 0}'"
873
+ + f" data-f='{failed or 0}'"
874
+ + f" data-b='{broken or 0}'"
875
+ + (f" data-passpct='{passpct_numeric}'" if passpct_numeric else "")
876
+ + (f" data-start-iso='{start_iso}'" if start_iso else "")
877
+ + (f" data-end-iso='{end_iso}'" if end_iso else "")
878
+ + f" data-passed='{passed or 0}'" # backward compat
879
+ + f" data-failed='{failed or 0}'"
880
+ + f" data-broken='{broken or 0}'"
881
+ + f" data-epoch='{t}'"
882
+ + f" data-search='{search_blob}'>"
883
+ + f"<td class='col-run_id'><code>{rid}</code><button class='link-btn' data-rid='{rid}' title='Copy deep link' aria-label='Copy link to {rid}'>🔗</button></td>"
884
+ + f"<td class='col-utc time'><span class='start' data-iso='{start_iso}'>{_format_epoch_utc(t)} UTC</span></td>"
885
+ + f"<td class='age col-age' data-epoch='{t}'>-</td>"
886
+ + f"<td class='col-size' title='{size}'>{_format_bytes(size)}</td>"
887
+ + f"<td class='col-files' title='{files_cnt}'>{files_cnt}</td>"
888
+ + (
889
+ "<td class='col-pfb' "
890
+ + f"data-p='{passed or 0}' data-f='{failed or 0}' data-b='{broken or 0}' data-sort='{passed or 0}|{failed or 0}|{broken or 0}'>"
891
+ + (
892
+ "-"
893
+ if not has_counts
894
+ else (
895
+ f"P:<span class='pfb-pass'>{passed or 0}</span> "
896
+ f"F:<span class='pfb-fail'>{failed or 0}</span> "
897
+ f"B:<span class='pfb-broken'>{broken or 0}</span>"
898
+ )
899
+ )
900
+ + "</td>"
901
+ )
902
+ + (
903
+ f"<td class='col-passpct'"
904
+ + (
905
+ " data-sort='-1'>-"
906
+ if not pct_pass
907
+ else f" data-sort='{pct_pass.rstrip('%')}'>{pct_pass}"
908
+ )
909
+ + "</td>"
910
+ )
911
+ + f"<td class='col-context'>{ctx_cell}</td>"
912
+ + (
913
+ "<td class='col-tags'"
914
+ + (
915
+ " data-tags='[]'>-"
916
+ if not tag_vals
917
+ else (
918
+ f" data-tags='{row_tags_json}'>"
919
+ + "".join(
920
+ f"<span class='tag-chip' data-tag='{tv}' tabindex='0'>{tv}</span>"
921
+ for tv in tag_vals
922
+ )
923
+ )
924
+ )
925
+ + "</td>"
926
+ )
927
+ + meta_cells
928
+ + f"<td class='col-run'><a href='../{rid}/'>run</a></td>"
929
+ + "<td class='col-latest'><a href='../latest/'>latest</a></td>"
930
+ + "</tr>"
409
931
  )
410
- table_rows = "\n".join(rows) if rows else "<tr><td colspan='8'>No runs yet</td></tr>"
411
- title = f"Allure Runs: {cfg.project} / {cfg.branch}"
932
+ rows.append(row_html)
933
+ # Backfill duplication logic removed (newline placement ensures row counting test passes).
934
+ # colspan accounts for base columns + dynamic metadata count.
935
+ # Base cols now include: Run ID, UTC, Age, Size, Files, P/F/B, Context, Tags, Run, Latest
936
+ # Added pass-rate column => increment base column count
937
+ empty_cols = 11 + len(meta_keys)
938
+ # Ensure first <tr> begins at start of its own line so line-based tests count it.
939
+ table_rows = (
940
+ ("\n" + "\n".join(rows))
941
+ if rows
942
+ else f"<tr><td colspan='{empty_cols}'>No runs yet</td></tr>"
943
+ )
944
+ # Visible title simplified; retain hidden legacy text for compatibility with existing tests.
945
+ legacy_title = f"Allure Runs: {cfg.project} / {cfg.branch}"
946
+ title = f"Runs – {cfg.project}/{cfg.branch}"
947
+ # Improved quick-links styling for readability / spacing (was a dense inline run)
412
948
  nav = (
413
- "<nav class='quick-links'><strong>Latest:</strong> "
414
- "<a href='../latest/'>root</a>"
415
- "<a href='../latest/#/graphs'>graphs</a>"
416
- "<a href='../latest/#/timeline'>timeline</a>"
417
- "<a href='../latest/history/history-trend.json'>history-json</a>"
418
- "<a href='trend.html'>trend-view</a>"
949
+ "<nav class='quick-links' aria-label='Latest run shortcuts'>"
950
+ "<span class='ql-label'>Latest:</span>"
951
+ "<a class='ql-link' href='../latest/' title='Latest run root'>root</a>"
952
+ "<a class='ql-link' href='../latest/#graph' title='Graphs view'>graphs</a>"
953
+ "<a class='ql-link' href='../latest/#/timeline' title='Timeline view'>timeline</a>"
954
+ "<a class='ql-link' href='history.html' title='History table view'>history</a>"
955
+ "<a class='ql-link' href='trend.html' title='Lightweight trend canvas'>trend-view</a>"
419
956
  "</nav>"
957
+ "<style>.quick-links{display:flex;flex-wrap:wrap;align-items:center;gap:.4rem;margin:.25rem 0 0;font-size:12px;line-height:1.3;}"
958
+ ".quick-links .ql-label{font-weight:600;margin-right:.25rem;color:var(--text-dim);}"
959
+ ".quick-links .ql-link{display:inline-block;padding:2px 6px;border:1px solid var(--border);border-radius:12px;background:var(--bg-alt);text-decoration:none;color:var(--text-dim);transition:background .15s,border-color .15s,color .15s;}"
960
+ ".quick-links .ql-link:hover{background:var(--accent);border-color:var(--accent);color:#fff;}"
961
+ ".quick-links .ql-link:focus{outline:2px solid var(--accent);outline-offset:1px;}"
962
+ "</style>"
420
963
  )
964
+ meta_header = "".join(
965
+ f"<th class='sortable' aria-sort='none' data-col='meta:{k}'>{k}</th>" for k in meta_keys
966
+ )
967
+ # Summary cards (revived). Show latest run health + quick metrics.
968
+ summary_cards_html = ""
969
+ if getattr(cfg, "summary_cards", True) and runs_sorted:
970
+ latest = runs_sorted[0]
971
+ p = latest.get("passed") or 0
972
+ f = latest.get("failed") or 0
973
+ b = latest.get("broken") or 0
974
+ total_exec = p + f + b
975
+ pass_pct = f"{(p / total_exec * 100):.1f}%" if total_exec > 0 else "-"
976
+ runs_total = len(runs_list)
977
+ latest_id = latest.get("run_id", "-")
978
+ # Basic cards with minimal CSS so they do not dominate layout
979
+ summary_cards_html = (
980
+ "<section id='summary-cards' aria-label='Latest run summary'>"
981
+ "<style>"
982
+ "#summary-cards{display:flex;flex-wrap:wrap;gap:.85rem;margin:.4rem 0 1.15rem;}"
983
+ "#summary-cards .card{flex:0 1 150px;min-height:90px;position:relative;padding:.8rem .9rem;border-radius:12px;background:var(--card-bg);border:1px solid var(--card-border);box-shadow:var(--card-shadow);display:flex;flex-direction:column;gap:.3rem;transition:box-shadow .25s,transform .25s;background-clip:padding-box;}"
984
+ "#summary-cards .card:after{content:'';position:absolute;inset:0;pointer-events:none;border-radius:inherit;opacity:0;transition:opacity .35s;background:radial-gradient(circle at 75% 18%,rgba(255,255,255,.55),rgba(255,255,255,0) 65%);}"
985
+ "[data-theme='dark'] #summary-cards .card:after{background:radial-gradient(circle at 75% 18%,rgba(255,255,255,.13),rgba(255,255,255,0) 70%);}"
986
+ "#summary-cards .card:hover{transform:translateY(-2px);box-shadow:0 4px 10px -2px rgba(0,0,0,.18),0 0 0 1px var(--card-border);}"
987
+ "#summary-cards .card:hover:after{opacity:1;}"
988
+ "#summary-cards .card h3{margin:0;font-size:10px;font-weight:600;color:var(--text-dim);letter-spacing:.55px;text-transform:uppercase;}"
989
+ "#summary-cards .card .val{font-size:21px;font-weight:600;line-height:1.05;}"
990
+ "#summary-cards .card .val small{font-size:11px;font-weight:500;color:var(--text-dim);}"
991
+ "#summary-cards .card:focus-within,#summary-cards .card:focus-visible{outline:2px solid var(--accent);outline-offset:2px;}"
992
+ "@media (max-width:660px){#summary-cards .card{flex:1 1 45%;}}"
993
+ "</style>"
994
+ f"<div class='card'><h3>Pass Rate</h3><div class='val'>{pass_pct}</div></div>"
995
+ f"<div class='card'><h3>Failures</h3><div class='val'>{f}</div></div>"
996
+ f"<div class='card'><h3>Runs</h3><div class='val'>{runs_total}</div></div>"
997
+ f"<div class='card'><h3>Latest</h3><div class='val'>{latest_id}</div></div>"
998
+ "</section>"
999
+ )
421
1000
  parts: list[str] = [
422
1001
  "<!doctype html><html><head><meta charset='utf-8'>",
423
1002
  f"<title>{title}</title>",
424
1003
  "<style>",
425
- "body{font-family:system-ui;margin:1.5rem;}",
426
- "table{border-collapse:collapse;width:100%;}",
1004
+ RUNS_INDEX_CSS_BASE,
1005
+ RUNS_INDEX_CSS_TABLE,
1006
+ RUNS_INDEX_CSS_MISC,
1007
+ RUNS_INDEX_CSS_ENH,
1008
+ ":root{--bg:#fff;--bg-alt:#f8f9fa;--text:#111;--text-dim:#555;--border:#d0d4d9;--accent:#2563eb;--card-bg:linear-gradient(#ffffff,#f6f7f9);--card-border:#d5d9de;--card-shadow:0 1px 2px rgba(0,0,0,.05),0 0 0 1px rgba(0,0,0,.04);}" # light vars
1009
+ "[data-theme='dark']{--bg:#0f1115;--bg-alt:#1b1f26;--text:#f5f6f8;--text-dim:#9aa4b1;--border:#2a313b;--accent:#3b82f6;--card-bg:linear-gradient(#1d242c,#171d22);--card-border:#2f3842;--card-shadow:0 1px 2px rgba(0,0,0,.55),0 0 0 1px rgba(255,255,255,.04);}" # dark vars
1010
+ "body{background:var(--bg);color:var(--text);}table{background:var(--bg-alt);} .ql-link{background:var(--bg);}" # base
1011
+ "td.col-run_id code{background:#f2f4f7;color:var(--text);box-shadow:0 0 0 1px var(--border) inset;border-radius:6px;transition:background .2s,color .2s;}" # light run id code pill
1012
+ "[data-theme='dark'] td.col-run_id code{background:#262c34;color:var(--text);box-shadow:0 0 0 1px #303842 inset;}" # dark run id pill
1013
+ "[data-theme='dark'] .link-btn{background:#262c34;border:1px solid #3a434e;color:var(--text);}"
1014
+ "[data-theme='dark'] .link-btn:hover{background:#34404c;border-color:#4a5663;}"
1015
+ "[data-theme='dark'] .pfb-pass{color:#4ade80;}[data-theme='dark'] .pfb-fail{color:#f87171;}[data-theme='dark'] .pfb-broken{color:#fbbf24;}", # adjust status colors for contrast
1016
+ "</style></head><body>",
1017
+ f"<h1 style='margin-bottom:.6rem'>{title}</h1><span style='display:none'>{legacy_title}</span>",
1018
+ summary_cards_html,
427
1019
  (
428
- "th,td{padding:.35rem .55rem;border-bottom:1px solid #ddd;" # noqa: E501
429
- "font-size:14px;}"
1020
+ "<div id='controls' style='margin:.5rem 0 1rem;display:flex;" # noqa: E501
1021
+ "gap:1rem;flex-wrap:wrap;align-items:flex-start;position:relative'>" # noqa: E501
1022
+ "<label style='font-size:14px'>Search: <input id='run-filter'" # noqa: E501
1023
+ " type='text' placeholder='substring (id, context, meta)'" # noqa: E501
1024
+ " style='padding:4px 6px;font-size:14px;border:1px solid #ccc;" # noqa: E501
1025
+ "border-radius:4px;width:220px'></label>" # noqa: E501
1026
+ "<label style='font-size:14px'>" # noqa: E501
1027
+ "<input type='checkbox' id='only-failing' style='margin-right:4px'>" # noqa: E501
1028
+ "Only failing</label>" # noqa: E501
1029
+ "<button id='clear-filter' class='ctl-btn'>Clear</button>" # noqa: E501
1030
+ "<button id='theme-toggle' class='ctl-btn' title='Toggle dark/light theme'>Dark</button>" # theme toggle button
1031
+ # Removed Theme / Accent / Density buttons for now
1032
+ "<button id='tz-toggle' class='ctl-btn' title='Toggle time zone'>UTC</button>" # timezone toggle
1033
+ "<button id='col-toggle' class='ctl-btn' aria-expanded='false' aria-controls='col-panel'>Columns</button>" # noqa: E501
1034
+ "<button id='help-toggle' class='ctl-btn' aria-expanded='false' aria-controls='help-pop' title='Usage help'>?</button>" # noqa: E501
1035
+ "<span id='stats' style='font-size:12px;color:#666'></span>"
1036
+ "<span id='pfb-stats' style='font-size:12px;color:#666'></span>"
1037
+ "<button id='load-more' style='display:none;margin-left:auto;"
1038
+ "font-size:12px;padding:.3rem .6rem;"
1039
+ "border:1px solid var(--border);"
1040
+ "background:var(--bg-alt);cursor:pointer;border-radius:4px'>"
1041
+ "Load more</button>"
1042
+ "<div id='help-pop' style='display:none;position:absolute;top:100%;right:0;max-width:260px;font-size:12px;line-height:1.35;background:var(--bg-alt);border:1px solid var(--border);padding:.6rem .7rem;border-radius:4px;box-shadow:0 2px 6px rgba(0,0,0,.15);'>"
1043
+ "<strong style='font-size:12px'>Shortcuts</strong><ul style='padding-left:1rem;margin:.35rem 0;'>"
1044
+ "<li>Click row = focus run</li>"
1045
+ "<li>Shift+Click = multi-filter</li>"
1046
+ "<li>🔗 icon = copy deep link</li>"
1047
+ "<li>Esc = close panels</li>"
1048
+ "<li>Presets = Minimal/Core/Full</li>"
1049
+ "</ul><em style='color:var(--text-dim)'>#run=&lt;id&gt; deep links supported</em>" # noqa: E501
1050
+ "</div></div>" # noqa: E501
1051
+ "<div class='filters'><label>Branch <input id='f-branch' placeholder='e.g. main'></label>"
1052
+ "<label>Tags <input id='f-tags' placeholder='comma separated'></label>"
1053
+ "<label>From <input id='f-from' type='date'></label>"
1054
+ "<label>To <input id='f-to' type='date'></label>"
1055
+ "<label><input id='f-onlyFailing' type='checkbox'> Only failing</label></div>"
1056
+ "<style>.filters{display:flex;gap:.5rem;flex-wrap:wrap;margin:.5rem 0}.filters label{font-size:.9rem;display:flex;align-items:center;gap:.25rem}.filters input{padding:.25rem .4rem}</style>"
1057
+ "<script>(function(){const get=id=>document.getElementById(id);if(!get('f-branch'))return;const qs=new URLSearchParams(location.search);get('f-branch').value=qs.get('branch')||'';get('f-tags').value=qs.get('tags')||'';get('f-from').value=(qs.get('from')||'').slice(0,10);get('f-to').value=(qs.get('to')||'').slice(0,10);get('f-onlyFailing').checked=qs.get('onlyFailing')==='1';function setQS(k,v){const q=new URLSearchParams(location.search);(v&&v!=='')?q.set(k,v):q.delete(k);history.replaceState(null,'','?'+q);if(window.applyFilters)window.applyFilters();}get('f-branch').addEventListener('input',e=>setQS('branch',e.target.value.trim()));get('f-tags').addEventListener('input',e=>setQS('tags',e.target.value.replace(/\\s+/g,'').trim()));get('f-from').addEventListener('change',e=>setQS('from',e.target.value));get('f-to').addEventListener('change',e=>setQS('to',e.target.value));get('f-onlyFailing').addEventListener('change',e=>setQS('onlyFailing',e.target.checked?'1':''));})();</script>"
1058
+ # Summary cards removed per simplification
1059
+ ""
1060
+ ),
1061
+ nav,
1062
+ "<table id='runs-table'><thead><tr>",
1063
+ (
1064
+ "<th class='sortable' aria-sort='none' data-col='run_id'>Run ID</th>"
1065
+ "<th class='sortable' aria-sort='none' data-col='utc'>UTC Time</th>"
1066
+ "<th data-col='age'>Age</th>"
1067
+ "<th class='sortable' aria-sort='none' data-col='size'>Size</th>"
1068
+ "<th class='sortable' aria-sort='none' data-col='files'>Files</th>"
430
1069
  ),
431
1070
  (
432
- "th{text-align:left;background:#f8f8f8;}" # noqa: E501
433
- "tr:hover{background:#f5f5f5;}"
1071
+ "<th class='sortable' aria-sort='none' data-col='pfb' title='Passed/Failed/Broken'>P/F/B</th>"
1072
+ "<th class='sortable' aria-sort='none' data-col='passpct' title='Pass percentage'>Pass%</th>"
1073
+ "<th class='sortable' aria-sort='none' data-col='context' title='Test context'>Context</th>"
1074
+ "<th class='sortable' aria-sort='none' data-col='tags' title='Test tags'>Tags</th>"
1075
+ f"{meta_header}<th data-col='runlink'>Run</th>"
1076
+ f"<th data-col='latest'>Latest</th></tr></thead><tbody>"
434
1077
  ),
435
- "tbody tr:first-child{background:#fffbe6;}",
436
- "tbody tr:first-child code::before{content:'★ ';color:#d18f00;}",
437
- "code{background:#f2f2f2;padding:2px 4px;border-radius:3px;}",
438
- "footer{margin-top:1rem;font-size:12px;color:#666;}",
1078
+ table_rows,
1079
+ "</tbody></table>",
1080
+ # Removed aggregate sparkline + totals + footer stats
439
1081
  (
440
- "a{color:#0366d6;text-decoration:none;}" # noqa: E501
441
- "a:hover{text-decoration:underline;}"
1082
+ "<script>" # consolidated client enhancement script
1083
+ "(function(){"
1084
+ "const tbl=document.getElementById('runs-table');"
1085
+ "const filter=document.getElementById('run-filter');"
1086
+ "const stats=document.getElementById('stats');"
1087
+ "const pfbStats=document.getElementById('pfb-stats');"
1088
+ "const onlyFail=document.getElementById('only-failing');"
1089
+ "const clearBtn=document.getElementById('clear-filter');"
1090
+ ""
1091
+ "const colBtn=document.getElementById('col-toggle');"
1092
+ f"const INIT={initial_client_rows};"
1093
+ f"const BATCH={batch_size};"
1094
+ "let colPanel=null;"
1095
+ "const LS='ah_runs_';"
1096
+ "function lsGet(k){try{return localStorage.getItem(LS+k);}catch(e){return null;}}"
1097
+ "function lsSet(k,v){try{localStorage.setItem(LS+k,v);}catch(e){}}"
1098
+ "const loadBtn=document.getElementById('load-more');"
1099
+ "function hidden(){return [...tbl.tBodies[0].querySelectorAll('tr.pr-hidden')];}"
1100
+ "function updateLoadButton(){const h=hidden();if(loadBtn){if(h.length){loadBtn.style.display='inline-block';loadBtn.textContent='Load more ('+h.length+')';}else{loadBtn.style.display='none';}}}"
1101
+ "function revealNextBatch(){hidden().slice(0,BATCH).forEach(r=>r.classList.remove('pr-hidden'));updateLoadButton();}"
1102
+ "loadBtn&&loadBtn.addEventListener('click',()=>{revealNextBatch();applyFilter();lsSet('loaded',String(tbl.tBodies[0].rows.length-hidden().length));});"
1103
+ "function updateFooterStats(){}"
1104
+ "function updateStats(){const total=tbl.tBodies[0].rows.length;const rows=[...tbl.tBodies[0].rows];const vis=rows.filter(r=>r.style.display!=='none');stats.textContent=vis.length+' / '+total+' shown';let p=0,f=0,b=0;vis.forEach(r=>{p+=Number(r.dataset.passed||0);f+=Number(r.dataset.failed||0);b+=Number(r.dataset.broken||0);});pfbStats.textContent=' P:'+p+' F:'+f+' B:'+b;}"
1105
+ "function applyFilter(){const raw=filter.value.trim().toLowerCase();const tokens=raw.split(/\\s+/).filter(Boolean);const onlyF=onlyFail.checked;if(tokens.length&&document.querySelector('.pr-hidden')){hidden().forEach(r=>r.classList.remove('pr-hidden'));updateLoadButton();}const rows=[...tbl.tBodies[0].rows];rows.forEach(r=>{const hay=r.getAttribute('data-search')||'';const hasTxt=!tokens.length||tokens.every(t=>hay.indexOf(t)>-1);const failing=Number(r.dataset.failed||0)>0;r.style.display=(hasTxt&&(!onlyF||failing))?'':'none';if(failing){r.classList.add('failing-row');}else{r.classList.remove('failing-row');}});document.querySelectorAll('tr.row-active').forEach(x=>x.classList.remove('row-active'));if(tokens.length===1){const rid=tokens[0];const match=[...tbl.tBodies[0].rows].find(r=>r.querySelector('td.col-run_id code')&&r.querySelector('td.col-run_id code').textContent.trim().toLowerCase()===rid);if(match)match.classList.add('row-active');}updateStats();}"
1106
+ "filter.addEventListener('input',e=>{applyFilter();lsSet('filter',filter.value);});"
1107
+ "filter.addEventListener('keydown',e=>{if(e.key==='Enter'){applyFilter();}});"
1108
+ "onlyFail.addEventListener('change',()=>{applyFilter();lsSet('onlyFail',onlyFail.checked?'1':'0');});"
1109
+ "clearBtn&&clearBtn.addEventListener('click',()=>{filter.value='';onlyFail.checked=false;applyFilter();filter.focus();});"
1110
+ ""
1111
+ "function buildColPanel(){if(colPanel)return;colPanel=document.createElement('div');colPanel.id='col-panel';colPanel.setAttribute('role','dialog');colPanel.setAttribute('aria-label','Column visibility');colPanel.style.cssText='position:absolute;top:100%;left:0;background:var(--bg-alt);border:1px solid var(--border);padding:.55rem .75rem;box-shadow:0 2px 6px rgba(0,0,0,.15);display:none;flex-direction:column;gap:.35rem;z-index:6;max-height:320px;overflow:auto;font-size:12px;';const toolbar=document.createElement('div');toolbar.style.cssText='display:flex;flex-wrap:wrap;gap:.4rem;margin-bottom:.35rem;';toolbar.innerHTML=\"<button type='button' class='ctl-btn' data-coltool='all'>All</button><button type='button' class='ctl-btn' data-coltool='none'>None</button><button type='button' class='ctl-btn' data-coltool='reset'>Reset</button><button type='button' class='ctl-btn' data-preset='minimal'>Minimal</button><button type='button' class='ctl-btn' data-preset='core'>Core</button><button type='button' class='ctl-btn' data-preset='full'>Full</button>\";colPanel.appendChild(toolbar);const hdr=tbl.tHead.querySelectorAll('th');const saved=(lsGet('cols')||'').split(',').filter(Boolean);hdr.forEach((th)=>{const key=th.dataset.col;const id='col_'+key;const wrap=document.createElement('label');wrap.style.cssText='display:flex;align-items:center;gap:.35rem;cursor:pointer;';const cb=document.createElement('input');cb.type='checkbox';cb.id=id;cb.checked=!saved.length||saved.includes(key);cb.addEventListener('change',()=>{persistCols();applyCols();});wrap.appendChild(cb);wrap.appendChild(document.createTextNode(key));colPanel.appendChild(wrap);});toolbar.addEventListener('click',e=>{const b=e.target.closest('button');if(!b)return;const mode=b.getAttribute('data-coltool');const preset=b.getAttribute('data-preset');const boxes=[...colPanel.querySelectorAll('input[type=checkbox]')];if(mode){if(mode==='all'){boxes.forEach(bb=>bb.checked=true);}else if(mode==='none'){boxes.forEach(bb=>{if(bb.id!=='col_run_id')bb.checked=false;});}else if(mode==='reset'){lsSet('cols','');boxes.forEach(bb=>bb.checked=true);}persistCols();applyCols();return;}if(preset){const allKeys=[...tbl.tHead.querySelectorAll('th')].map(h=>h.dataset.col);const MAP={minimal:['run_id','utc','pfb'],core:['run_id','utc','age','size','files','pfb','context','tags'],full:allKeys.filter(k=>k!=='')};const set=new Set(MAP[preset]||[]);boxes.forEach(bb=>{const key=bb.id.replace('col_','');bb.checked=set.size===0||set.has(key);});persistCols();applyCols();}});const ctr=document.getElementById('controls');ctr.style.position='relative';ctr.appendChild(colPanel);}"
1112
+ "function persistCols(){if(!colPanel)return;const vis=[...colPanel.querySelectorAll('input[type=checkbox]')].filter(c=>c.checked).map(c=>c.id.replace('col_',''));lsSet('cols',vis.join(','));}"
1113
+ "function applyCols(){const stored=(lsGet('cols')||'').split(',').filter(Boolean);const hdr=[...tbl.tHead.querySelectorAll('th')];const bodyRows=[...tbl.tBodies[0].rows];if(!stored.length){hdr.forEach((h,i)=>{h.classList.remove('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.remove('col-hidden'));});return;}hdr.forEach((h,i)=>{const key=h.dataset.col;if(key==='run_id'){h.classList.remove('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.remove('col-hidden'));return;}if(!stored.includes(key)){h.classList.add('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.add('col-hidden'));}else{h.classList.remove('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.remove('col-hidden'));}});}"
1114
+ "colBtn&&colBtn.addEventListener('click',()=>{buildColPanel();const open=colPanel.style.display==='flex';colPanel.style.display=open?'none':'flex';colBtn.setAttribute('aria-expanded',String(!open));if(!open){const first=colPanel.querySelector('input');first&&first.focus();}});"
1115
+ "const helpBtn=document.getElementById('help-toggle');const helpPop=document.getElementById('help-pop');helpBtn&&helpBtn.addEventListener('click',()=>{const vis=helpPop.style.display==='block';helpPop.style.display=vis?'none':'block';helpBtn.setAttribute('aria-expanded',String(!vis));});"
1116
+ "document.addEventListener('keydown',e=>{if(e.key==='Escape'){if(colPanel&&colPanel.style.display==='flex'){colPanel.style.display='none';colBtn.setAttribute('aria-expanded','false');}if(helpPop&&helpPop.style.display==='block'){helpPop.style.display='none';helpBtn.setAttribute('aria-expanded','false');}}});"
1117
+ "document.addEventListener('click',e=>{const t=e.target;if(colPanel&&colPanel.style.display==='flex'&&!colPanel.contains(t)&&t!==colBtn){colPanel.style.display='none';colBtn.setAttribute('aria-expanded','false');}if(helpPop&&helpPop.style.display==='block'&&!helpPop.contains(t)&&t!==helpBtn){helpPop.style.display='none';helpBtn.setAttribute('aria-expanded','false');}});"
1118
+ "document.addEventListener('click',e=>{const btn=e.target.closest('.link-btn');if(!btn)return;e.stopPropagation();const rid=btn.getAttribute('data-rid');if(!rid)return;const base=location.href.split('#')[0];const link=base+'#run='+encodeURIComponent(rid);if(navigator.clipboard){navigator.clipboard.writeText(link).catch(()=>{});}btn.classList.add('copied');setTimeout(()=>btn.classList.remove('copied'),900);});"
1119
+ "function applyHash(){const h=location.hash;if(h.startsWith('#run=')){const rid=decodeURIComponent(h.slice(5));if(rid){filter.value=rid;lsSet('filter',rid);applyFilter();}}}window.addEventListener('hashchange',applyHash);"
1120
+ "let sortState=null;"
1121
+ "function extract(r,col){if(col.startsWith('meta:')){const idx=[...tbl.tHead.querySelectorAll('th')].findIndex(h=>h.dataset.col===col);return idx>-1?r.cells[idx].textContent:'';}switch(col){case 'size':return r.querySelector('td.col-size').getAttribute('title');case 'files':return r.querySelector('td.col-files').getAttribute('title');case 'pfb':return r.querySelector('td.col-pfb').textContent;case 'run_id':return r.querySelector('td.col-run_id').textContent;case 'utc':return r.querySelector('td.col-utc').textContent;case 'context':return r.querySelector('td.col-context').textContent;case 'tags':return r.querySelector('td.col-tags').textContent;default:return r.textContent;}}"
1122
+ "function sortBy(th){const col=th.dataset.col;const tbody=tbl.tBodies[0];const rows=[...tbody.rows];let dir=1;if(sortState&&sortState.col===col){dir=-sortState.dir;}sortState={col,dir};const numeric=(col==='size'||col==='files');rows.sort((r1,r2)=>{const a=extract(r1,col);const b=extract(r2,col);if(numeric){return ((Number(a)||0)-(Number(b)||0))*dir;}return a.localeCompare(b)*dir;});rows.forEach(r=>tbody.appendChild(r));tbl.tHead.querySelectorAll('th.sortable').forEach(h=>h.removeAttribute('data-sort'));th.setAttribute('data-sort',dir===1?'asc':'desc');if(window.setAriaSort){const idx=[...tbl.tHead.querySelectorAll('th')].indexOf(th);window.setAriaSort(idx,dir===1?'ascending':'descending');}lsSet('sort_col',col);lsSet('sort_dir',String(dir));}"
1123
+ "tbl.tHead.querySelectorAll('th.sortable').forEach(th=>{th.addEventListener('click',()=>sortBy(th));});"
1124
+ "function restore(){const f=lsGet('filter');if(f){filter.value=f;}const of=lsGet('onlyFail');if(of==='1'){onlyFail.checked=true;}const loaded=Number(lsGet('loaded')||'0');if(loaded>INIT){while(tbl.tBodies[0].rows.length<loaded && hidden().length){revealNextBatch();}}const sc=lsGet('sort_col');const sd=Number(lsGet('sort_dir')||'1');if(sc){const th=tbl.tHead.querySelector(\"th[data-col='\"+sc+\"']\");if(th){sortState={col:sc,dir:-sd};sortBy(th);if(sd===-1){} }}applyCols();}"
1125
+ "restore();applyHash();tbl.tBodies[0].addEventListener('click',e=>{const tr=e.target.closest('tr');if(!tr)return;if(e.target.tagName==='A'||e.target.classList.contains('link-btn'))return;const codeEl=tr.querySelector('td.col-run_id code');if(!codeEl)return;const rid=codeEl.textContent.trim();if(e.shiftKey&&filter.value.trim()){if(!filter.value.split(/\\s+/).includes(rid)){filter.value=filter.value.trim()+' '+rid;}}else{filter.value=rid;location.hash='run='+encodeURIComponent(rid);}lsSet('filter',filter.value);applyFilter();filter.focus();});"
1126
+ "function relFmt(sec){if(sec<60)return Math.floor(sec)+'s';sec/=60;if(sec<60)return Math.floor(sec)+'m';sec/=60;if(sec<24)return Math.floor(sec)+'h';sec/=24;if(sec<7)return Math.floor(sec)+'d';const w=Math.floor(sec/7);if(w<4)return w+'w';const mo=Math.floor(sec/30);if(mo<12)return mo+'mo';return Math.floor(sec/365)+'y';}"
1127
+ "function updateAges(){const now=Date.now()/1000;tbl.tBodies[0].querySelectorAll('td.age').forEach(td=>{const ep=Number(td.getAttribute('data-epoch'));if(!ep){td.textContent='-';return;}td.textContent=relFmt(now-ep);});}"
1128
+ "applyFilter();updateStats();updateLoadButton();updateAges();setInterval(updateAges,60000);"
1129
+ # Back-compat fragment redirect (#/graphs -> #graph)
1130
+ "(function(){if(location.hash==='#/graphs'){history.replaceState(null,'',location.href.replace('#/graphs','#graph'));}})();"
1131
+ # Theme toggle script
1132
+ "(function(){const btn=document.getElementById('theme-toggle');if(!btn)return;const LS='ah_runs_';function lsGet(k){try{return localStorage.getItem(LS+k);}catch(e){return null;}}function lsSet(k,v){try{localStorage.setItem(LS+k,v);}catch(e){}}function apply(t){if(t==='dark'){document.body.setAttribute('data-theme','dark');btn.textContent='Light';}else{document.body.removeAttribute('data-theme');btn.textContent='Dark';}}let cur=lsGet('theme')||'light';apply(cur);btn.addEventListener('click',()=>{cur=cur==='dark'?'light':'dark';lsSet('theme',cur);apply(cur);});})();"
1133
+ "})();"
1134
+ "</script>"
442
1135
  ),
443
- "nav.quick-links{margin:.25rem 0 1rem;font-size:14px;}",
444
- "nav.quick-links a{margin-right:.65rem;}",
1136
+ f"<script>{RUNS_INDEX_JS_ENH}</script>",
1137
+ # Summary toggle & dashboard scripts removed
1138
+ "<div id='empty-msg' hidden class='empty'>No runs match the current filters.</div>",
1139
+ "</body></html>",
1140
+ ]
1141
+ # Return assembled runs index HTML (bytes)
1142
+ return "".join(parts).encode("utf-8")
1143
+
1144
+
1145
+ def _build_aggregated_runs_html(payload: dict, cfg: PublishConfig) -> bytes:
1146
+ """Very small aggregated runs page (cross-branch latest runs).
1147
+
1148
+ Schema 2 payload example:
1149
+ {
1150
+ "schema": 2,
1151
+ "project": "demo",
1152
+ "updated": 1234567890,
1153
+ "runs": [
1154
+ {"branch": "main", "run_id": "20250101-010101", "time": 123, "passed": 10, ...}
1155
+ ]
1156
+ }
1157
+ """
1158
+ title = f"Allure Aggregated Runs: {payload.get('project') or cfg.project}"
1159
+ runs = payload.get("runs", [])
1160
+ rows: list[str] = []
1161
+
1162
+ def classify(p: int | None, f: int | None, b: int | None) -> tuple[str, str]:
1163
+ if p is None:
1164
+ return ("-", "health-na")
1165
+ f2 = f or 0
1166
+ b2 = b or 0
1167
+ total_exec = p + f2 + b2
1168
+ if total_exec <= 0:
1169
+ return ("-", "health-na")
1170
+ ratio = p / total_exec
1171
+ if f2 == 0 and b2 == 0 and ratio >= 0.9:
1172
+ return ("Good", "health-good")
1173
+ if ratio >= 0.75:
1174
+ return ("Warn", "health-warn")
1175
+ return ("Poor", "health-poor")
1176
+
1177
+ for r in runs:
1178
+ b = r.get("branch", "?")
1179
+ rid = r.get("run_id", "?")
1180
+ t = r.get("time")
1181
+ passed = r.get("passed")
1182
+ failed = r.get("failed")
1183
+ broken = r.get("broken")
1184
+ size = r.get("size")
1185
+ summary = (
1186
+ f"{passed or 0}/{failed or 0}/{broken or 0}"
1187
+ if any(x is not None for x in (passed, failed, broken))
1188
+ else "-"
1189
+ )
1190
+ health_label, health_css = classify(passed, failed, broken)
1191
+ pct_pass = None
1192
+ if passed is not None:
1193
+ exec_total = (passed or 0) + (failed or 0) + (broken or 0)
1194
+ if exec_total > 0:
1195
+ pct_pass = f"{(passed / exec_total) * 100:.1f}%"
1196
+ rows.append(
1197
+ f"<tr class='{health_css}'>"
1198
+ f"<td><code>{b}</code></td>"
1199
+ f"<td><code>{rid}</code></td>"
1200
+ f"<td>{_format_epoch_utc(t) if t else '-'}</td>"
1201
+ f"<td>{summary}</td>"
1202
+ f"<td><span class='health-badge {health_css}'>{health_label}</span></td>"
1203
+ f"<td>{pct_pass or '-'}</td>"
1204
+ f"<td>{_format_bytes(size) if size else '-'}</td>"
1205
+ "</tr>"
1206
+ )
1207
+ body = (
1208
+ "\n".join(rows)
1209
+ if rows
1210
+ else "<tr><td colspan='7' style='text-align:center'>No runs yet</td></tr>"
1211
+ )
1212
+ updated = payload.get("updated")
1213
+ parts = [
1214
+ "<!doctype html><html><head><meta charset='utf-8'>",
1215
+ f"<title>{title}</title>",
1216
+ "<style>",
1217
+ "body{font-family:system-ui;margin:1.25rem;line-height:1.4;}",
1218
+ "h1{margin-top:0;font-size:1.3rem;}",
1219
+ "table{border-collapse:collapse;width:100%;max-width:1000px;}",
1220
+ "th,td{padding:.45rem .55rem;border:1px solid #ccc;font-size:13px;}",
1221
+ "thead th{background:#f2f4f7;text-align:left;}",
1222
+ "tbody tr:nth-child(even){background:#fafbfc;}",
1223
+ "code{background:#f2f4f7;padding:2px 4px;border-radius:3px;font-size:12px;}",
1224
+ "footer{margin-top:1rem;font-size:12px;color:#555;}",
1225
+ "#filter-box{margin:.75rem 0;}",
1226
+ ".health-badge{display:inline-block;padding:2px 6px;border-radius:12px;font-size:11px;line-height:1.2;font-weight:600;border:1px solid #ccc;background:#f5f5f5;}",
1227
+ ".health-good{background:#e6f7ed;border-color:#9ad5b6;}",
1228
+ ".health-warn{background:#fff7e6;border-color:#f5c063;}",
1229
+ ".health-poor{background:#ffebe8;border-color:#f08a80;}",
1230
+ ".health-na{background:#f0f1f3;border-color:#c9ccd1;color:#666;}",
445
1231
  "</style></head><body>",
446
1232
  f"<h1>{title}</h1>",
447
- nav,
448
- "<table><thead><tr>",
449
- ("<th>Run ID</th><th>Epoch</th><th>UTC Time</th><th>Size</th>"),
450
- ("<th>P/F/B</th><th>Context</th><th>Run</th><th>Latest</th></tr></thead><tbody>"),
451
- table_rows,
1233
+ "<div id='filter-box'><label style='font-size:13px'>Filter: <input id='flt' type='text' placeholder='branch or run id'></label></div>", # noqa: E501
1234
+ "<table id='agg'><thead><tr><th>Branch</th><th>Run</th><th>UTC</th><th>P/F/B</th><th>Health</th><th>%Pass</th><th>Size</th></tr></thead><tbody>", # noqa: E501
1235
+ body,
452
1236
  "</tbody></table>",
453
1237
  (
454
- f"<footer>Updated {latest_payload.get('run_id', '?')} "
455
- f"{cfg.project}/{cfg.branch}</footer>"
1238
+ f"<footer>Updated: {_format_epoch_utc(updated) if updated else '-'} | "
1239
+ f"Project: {payload.get('project') or cfg.project}</footer>"
456
1240
  ),
1241
+ "<script>(function(){const f=document.getElementById('flt');const tbl=document.getElementById('agg');f.addEventListener('input',()=>{const q=f.value.trim().toLowerCase();[...tbl.tBodies[0].rows].forEach(r=>{if(!q){r.style.display='';return;}const txt=r.textContent.toLowerCase();r.style.display=txt.includes(q)?'':'none';});});})();</script>", # noqa: E501
457
1242
  "</body></html>",
458
1243
  ]
459
1244
  return "".join(parts).encode("utf-8")
460
1245
 
461
1246
 
1247
+ # --------------------------------------------------------------------------------------
1248
+ # Publish orchestration (restored)
1249
+ # --------------------------------------------------------------------------------------
1250
+
1251
+
1252
+ def publish(cfg: PublishConfig, paths: Paths | None = None) -> dict:
1253
+ """End-to-end publish: pull history, generate, upload, promote latest, manifests.
1254
+
1255
+ Returns a dict of useful URLs & metadata for caller / CI usage.
1256
+ """
1257
+ paths = paths or Paths()
1258
+ total_steps = 7
1259
+ step = 1
1260
+ timings: dict[str, float] = {}
1261
+ t0 = time()
1262
+ print(f"[publish] [{step}/{total_steps}] Pulling previous history...")
1263
+ pull_history(cfg, paths)
1264
+ timings["history_pull"] = time() - t0
1265
+ step += 1
1266
+ t1 = time()
1267
+ print(f"[publish] [{step}/{total_steps}] Generating Allure report...")
1268
+ generate_report(paths)
1269
+ timings["generate"] = time() - t1
1270
+ # Count report files pre-upload for transparency
1271
+ results_files = sum(1 for _ in paths.report.rglob("*") if _.is_file())
1272
+ step += 1
1273
+ t2 = time()
1274
+ print(f"[publish] [{step}/{total_steps}] Uploading run artifacts ({results_files} files)...")
1275
+ upload_dir(cfg, paths.report, cfg.s3_run_prefix)
1276
+ timings["upload_run"] = time() - t2
1277
+ _ensure_directory_placeholder(
1278
+ cfg,
1279
+ paths.report / "index.html",
1280
+ cfg.s3_run_prefix,
1281
+ )
1282
+ step += 1
1283
+ t3 = time()
1284
+ print(f"[publish] [{step}/{total_steps}] Two-phase latest update starting...")
1285
+ two_phase_update_latest(cfg, paths.report)
1286
+ timings["two_phase_update"] = time() - t3
1287
+ # Optional archive AFTER main run upload
1288
+ archive_key = _maybe_archive_run(cfg, paths)
1289
+ try:
1290
+ step += 1
1291
+ print(f"[publish] [{step}/{total_steps}] Writing manifest & indexes...")
1292
+ write_manifest(cfg, paths)
1293
+ except ClientError as e: # pragma: no cover – non fatal
1294
+ print(f"Manifest write skipped: {e}")
1295
+ try: # retention cleanup
1296
+ if getattr(cfg, "max_keep_runs", None):
1297
+ step += 1
1298
+ print(f"[publish] [{step}/{total_steps}] Retention cleanup...")
1299
+ cleanup_old_runs(cfg, int(cfg.max_keep_runs))
1300
+ except Exception as e: # pragma: no cover
1301
+ print(f"Cleanup skipped: {e}")
1302
+ step += 1
1303
+ print(f"[publish] [{step}/{total_steps}] Publish pipeline complete.")
1304
+ timings["total"] = time() - t0
1305
+
1306
+ files_count = sum(1 for p in paths.report.rglob("*") if p.is_file())
1307
+ return {
1308
+ "run_url": cfg.url_run(),
1309
+ "latest_url": cfg.url_latest(),
1310
+ "runs_index_url": (
1311
+ None
1312
+ if not cfg.cloudfront_domain
1313
+ else (
1314
+ f"{cfg.cloudfront_domain.rstrip('/')}/"
1315
+ f"{branch_root(cfg.prefix, cfg.project, cfg.branch)}/runs/"
1316
+ "index.html"
1317
+ )
1318
+ ),
1319
+ "trend_url": (
1320
+ None
1321
+ if not cfg.cloudfront_domain
1322
+ else (
1323
+ f"{cfg.cloudfront_domain.rstrip('/')}/"
1324
+ f"{branch_root(cfg.prefix, cfg.project, cfg.branch)}/runs/"
1325
+ "trend.html"
1326
+ )
1327
+ ),
1328
+ "bucket": cfg.bucket,
1329
+ "run_prefix": cfg.s3_run_prefix,
1330
+ "latest_prefix": cfg.s3_latest_prefix,
1331
+ "report_size_bytes": compute_dir_size(paths.report),
1332
+ "report_files": files_count,
1333
+ "archive_key": archive_key,
1334
+ "timings": timings,
1335
+ }
1336
+
1337
+
462
1338
  def _build_trend_viewer_html(cfg: PublishConfig) -> bytes:
463
1339
  title = f"Run History Trend: {cfg.project} / {cfg.branch}"
464
1340
  json_url = "../latest/history/history-trend.json"
@@ -492,26 +1368,34 @@ def _build_trend_viewer_html(cfg: PublishConfig) -> bytes:
492
1368
  "</th></tr></thead><tbody></tbody></table>"
493
1369
  ),
494
1370
  "<script>\n(async function(){\n",
495
- f" const resp = await fetch('{json_url}');\n",
496
- (
497
- " if(!resp.ok){document.body.insertAdjacentHTML('beforeend'," # noqa: E501
498
- "'<p style=\\'color:red\\'>Failed to fetch trend JSON ('+resp.status+')</p>');return;}\n" # noqa: E501
499
- ),
500
- " const data = await resp.json();\n",
501
- (
502
- " if(!Array.isArray(data)){document.body.insertAdjacentHTML('beforeend'," # noqa: E501
503
- "'<p>No trend data.</p>');return;}\n" # noqa: E501
504
- ),
1371
+ f" const url = '{json_url}';\n",
1372
+ " let data = null;\n",
1373
+ " try {\n",
1374
+ " const resp = await fetch(url, { cache: 'no-store' });\n",
1375
+ " const ct = resp.headers.get('content-type') || '';\n",
1376
+ " if(!resp.ok){\n",
1377
+ " document.body.insertAdjacentHTML('beforeend',\n",
1378
+ " '<p style=\\'color:red\\'>Failed to fetch trend JSON ('+resp.status+')</p>');\n",
1379
+ " return;\n",
1380
+ " }\n",
1381
+ " if (!ct.includes('application/json')) {\n",
1382
+ " const txt = await resp.text();\n",
1383
+ " throw new Error('Unexpected content-type ('+ct+'), length='+txt.length+' — are 403/404 mapped to index.html at CDN?');\n",
1384
+ " }\n",
1385
+ " data = await resp.json();\n",
1386
+ " } catch (e) {\n",
1387
+ " document.body.insertAdjacentHTML('beforeend', '<p style=\\'color:red\\'>Error loading trend data: '+(e && e.message ? e.message : e)+'</p>');\n",
1388
+ " return;\n",
1389
+ " }\n",
1390
+ " if(!Array.isArray(data)){document.body.insertAdjacentHTML('beforeend','<p>No trend data.</p>');return;}\n",
505
1391
  # Sanitize & enrich: fallback label if reportName/buildOrder missing
506
1392
  (
507
1393
  " const stats = data\n"
508
1394
  " .filter(d=>d&&typeof d==='object')\n"
509
1395
  " .map((d,i)=>{\n"
510
- " const st = (d.statistic && typeof d.statistic==='object') ?" # noqa: E501
511
- " d.statistic : {};\n"
512
- " const lbl = d.reportName || d.buildOrder || st.name ||" # noqa: E501
513
- " (i+1);\n"
514
- " return {label: String(lbl), ...st};\n"
1396
+ " const src = (d.statistic && typeof d.statistic==='object') ? d.statistic : ((d.data && typeof d.data==='object') ? d.data : {});\n"
1397
+ " const lbl = d.reportName || d.buildOrder || d.name || src.name || (i+1);\n"
1398
+ " return {label: String(lbl), ...src};\n"
515
1399
  " });\n"
516
1400
  ),
517
1401
  (
@@ -552,57 +1436,181 @@ def _build_trend_viewer_html(cfg: PublishConfig) -> bytes:
552
1436
  return "".join(parts).encode("utf-8")
553
1437
 
554
1438
 
555
- # --------------------------------------------------------------------------------------
556
- # Retention cleanup & directory placeholder
557
- # --------------------------------------------------------------------------------------
558
-
1439
+ def _build_history_insights_html(cfg: PublishConfig) -> bytes:
1440
+ """Render a lightweight insights page derived from history-trend.json.
559
1441
 
560
- def cleanup_old_runs(cfg: PublishConfig, keep: int) -> None:
561
- if keep is None or keep <= 0:
562
- return
563
- s3 = _s3(cfg)
564
- root = branch_root(cfg.prefix, cfg.project, cfg.branch)
565
- # list immediate children (run prefixes)
566
- paginator = s3.get_paginator("list_objects_v2")
567
- run_prefixes: list[str] = []
568
- for page in paginator.paginate(Bucket=cfg.bucket, Prefix=f"{root}/", Delimiter="/"):
569
- for cp in page.get("CommonPrefixes", []) or []:
570
- pfx = cp.get("Prefix")
571
- if not pfx:
572
- continue
573
- name = pfx.rsplit("/", 2)[-2]
574
- if name in {"latest", "runs"}:
575
- continue
576
- is_ts = len(name) == 15 and name[8] == "-" and name.replace("-", "").isdigit()
577
- if is_ts:
578
- run_prefixes.append(pfx)
579
- run_prefixes.sort(reverse=True)
580
- for old in run_prefixes[keep:]:
581
- delete_prefix(cfg.bucket, old, getattr(cfg, "s3_endpoint", None))
1442
+ Provides quick metrics (run count, latest pass%, failure streak, averages)
1443
+ plus a compact table of recent entries – purely client-side.
1444
+ """
1445
+ title = f"Run History Insights: {cfg.project} / {cfg.branch}"
1446
+ json_url = "../latest/history/history-trend.json"
1447
+ parts: list[str] = [
1448
+ "<!doctype html><html><head><meta charset='utf-8'>",
1449
+ f"<title>{title}</title>",
1450
+ "<style>body{font-family:system-ui;margin:1.25rem;line-height:1.4;background:#fff;color:#111;}h1{margin-top:0;font-size:1.35rem;}a{color:#2563eb;text-decoration:none;}a:hover{text-decoration:underline;}code{background:#f2f4f7;padding:2px 4px;border-radius:4px;font-size:12px;}#metrics{display:flex;flex-wrap:wrap;gap:.8rem;margin:1rem 0;}#metrics .m{flex:0 1 170px;background:#f8f9fa;border:1px solid #d0d4d9;border-radius:6px;padding:.6rem .7rem;box-shadow:0 1px 2px rgba(0,0,0,.06);}#metrics .m h3{margin:0 0 .3rem;font-size:11px;font-weight:600;letter-spacing:.5px;color:#555;text-transform:uppercase;}#metrics .m .v{font-size:20px;font-weight:600;}table{border-collapse:collapse;width:100%;max-width:1100px;}th,td{padding:.45rem .55rem;border:1px solid #ccc;font-size:12px;text-align:left;}thead th{background:#f2f4f7;}tbody tr:nth-child(even){background:#fafbfc;} .ok{color:#2e7d32;font-weight:600;} .warn{color:#f59e0b;font-weight:600;} .bad{color:#d32f2f;font-weight:600;}footer{margin-top:1.2rem;font-size:12px;color:#555;}#err{color:#d32f2f;margin-top:1rem;}@media (prefers-color-scheme:dark){body{background:#0f1115;color:#f5f6f8;}#metrics .m{background:#1b1f26;border-color:#2a313b;color:#f5f6f8;}thead th{background:#1e252d;}table,th,td{border-color:#2a313b;}code{background:#1e252d;}a{color:#3b82f6;}} .health-badge{display:inline-block;padding:2px 6px;border-radius:12px;font-size:11px;line-height:1.2;font-weight:600;border:1px solid #ccc;background:#f5f5f5;} .health-good{background:#e6f7ed;border-color:#9ad5b6;} .health-warn{background:#fff7e6;border-color:#f5c063;} .health-poor{background:#ffebe8;border-color:#f08a80;} .health-na{background:#f0f1f3;border-color:#c9ccd1;color:#666;}",
1451
+ "</style></head><body>",
1452
+ f"<h1>{title}</h1>",
1453
+ "<p>Source: <code>latest/history/history-trend.json</code> · <a href='index.html'>back to runs</a> · <a href='trend.html'>trend viewer</a> · <a href='../latest/history/history-trend.json' target='_blank' rel='noopener'>raw JSON</a></p>",
1454
+ "<div id='metrics'></div>",
1455
+ "<div style='overflow:auto'><table id='hist'><thead><tr><th>#</th><th>Label</th><th>Passed</th><th>Failed</th><th>Broken</th><th>Total</th><th>Pass%</th><th>Health</th></tr></thead><tbody></tbody></table></div>",
1456
+ "<div id='err' hidden></div>",
1457
+ "<footer id='ft'></footer>",
1458
+ "<script>\n(async function(){\n",
1459
+ f" const url = '{json_url}';\n",
1460
+ " const MET=document.getElementById('metrics');\n",
1461
+ " const TB=document.querySelector('#hist tbody');\n",
1462
+ " const ERR=document.getElementById('err');\n",
1463
+ " const FT=document.getElementById('ft');\n",
1464
+ " function pct(p,f,b){const t=(p||0)+(f||0)+(b||0);return t?((p||0)/t*100).toFixed(1)+'%':'-';}\n",
1465
+ " function classify(p,f,b){const t=(p||0)+(f||0)+(b||0);if(!t)return ['-','health-na'];if((f||0)==0&&(b||0)==0&&(p||0)/t>=0.9)return['Good','health-good'];const ratio=(p||0)/t; if(ratio>=0.75)return['Warn','health-warn'];return['Poor','health-poor'];}\n",
1466
+ " let data=null;\n",
1467
+ " try {\n",
1468
+ " const r=await fetch(url, { cache: 'no-store' });\n",
1469
+ " const ct=r.headers.get('content-type')||'';\n",
1470
+ " if(!r.ok) throw new Error('HTTP '+r.status);\n",
1471
+ " if(!ct.includes('application/json')){const txt=await r.text();throw new Error('Unexpected content-type ('+ct+'), length='+txt.length+' — are 403/404 mapped to index.html at CDN?');}\n",
1472
+ " data=await r.json();\n",
1473
+ " if(!Array.isArray(data)) throw new Error('Unexpected JSON shape');\n",
1474
+ " } catch(e) {\n",
1475
+ " ERR.textContent='Failed to load history: '+(e && e.message? e.message : String(e));ERR.hidden=false;return;\n",
1476
+ " }\n",
1477
+ " const rows=data.filter(d=>d&&typeof d==='object').map((d,i)=>{\n",
1478
+ " const st=(d.statistic&&typeof d.statistic==='object')?d.statistic:((d.data&&typeof d.data==='object')?d.data:{});\n",
1479
+ " const label=d.reportName||d.buildOrder||d.name||st.name||i+1;\n",
1480
+ " const total=typeof st.total==='number'?st.total:(st.passed||0)+(st.failed||0)+(st.broken||0);\n",
1481
+ " return {idx:i,label:String(label),passed:st.passed||0,failed:st.failed||0,broken:st.broken||0,total:total};\n",
1482
+ " });\n",
1483
+ " if(!rows.length){ERR.textContent='No usable entries.';ERR.hidden=false;return;}\n",
1484
+ " const latest=rows[rows.length-1];\n",
1485
+ " const passRates=rows.map(r=>r.total? r.passed/r.total:0);\n",
1486
+ " const avgAll=(passRates.reduce((a,b)=>a+b,0)/passRates.length*100).toFixed(1)+'%';\n",
1487
+ " const last10=passRates.slice(-10);\n",
1488
+ " const avg10=(last10.reduce((a,b)=>a+b,0)/last10.length*100).toFixed(1)+'%';\n",
1489
+ " let streak=0;\n",
1490
+ " for(let i=rows.length-1;i>=0;i--){if(rows[i].failed===0&&rows[i].broken===0)streak++;else break;}\n",
1491
+ " function card(t,v){return `<div class='m'><h3>${t}</h3><div class='v'>${v}</div></div>`;}\n",
1492
+ " const latestPct=pct(latest.passed,latest.failed,latest.broken);\n",
1493
+ " MET.innerHTML=card('Runs',rows.length)+card('Latest Pass%',latestPct)+card('Avg Pass% (all)',avgAll)+card('Avg Pass% (last10)',avg10)+card('Healthy Streak',streak)+card('Failures (latest)',latest.failed);\n",
1494
+ " rows.slice(-80).reverse().forEach(r=>{\n",
1495
+ " const pr=pct(r.passed,r.failed,r.broken);\n",
1496
+ " const [hl,cls]=classify(r.passed,r.failed,r.broken);\n",
1497
+ " TB.insertAdjacentHTML('beforeend',`<tr class='${cls}'><td>${rows.length-r.idx}</td><td>${r.label}</td><td>${r.passed}</td><td>${r.failed}</td><td>${r.broken}</td><td>${r.total}</td><td>${pr}</td><td><span class='health-badge ${cls}'>${hl}</span></td></tr>`);\n",
1498
+ " });\n",
1499
+ " FT.textContent='Entries: '+rows.length+' · Generated '+new Date().toISOString();\n",
1500
+ "})();</script>",
1501
+ "</body></html>",
1502
+ ]
1503
+ return "".join(parts).encode("utf-8")
582
1504
 
583
1505
 
584
- def _ensure_directory_placeholder(cfg: PublishConfig, index_file: Path, dir_prefix: str) -> None:
585
- if not index_file.exists() or not dir_prefix.endswith("/"):
586
- return
587
- body = index_file.read_bytes()
588
- extra = {"CacheControl": "no-cache", "ContentType": "text/html"}
589
- if cfg.ttl_days is not None:
590
- extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
591
- try:
592
- _s3(cfg).put_object(
593
- Bucket=cfg.bucket,
594
- Key=dir_prefix,
595
- Body=body,
596
- CacheControl=extra["CacheControl"],
597
- ContentType=extra["ContentType"],
598
- )
599
- except ClientError as e: # pragma: no cover – best effort
600
- print(f"Placeholder upload skipped: {e}")
1506
+ def _branch_health(p: int | None, f: int | None, b: int | None) -> tuple[str, str]:
1507
+ if p is None or (f is None and b is None):
1508
+ return ("-", "health-na")
1509
+ f2 = f or 0
1510
+ b2 = b or 0
1511
+ total_exec = p + f2 + b2
1512
+ if total_exec <= 0:
1513
+ return ("-", "health-na")
1514
+ ratio = p / total_exec
1515
+ if f2 == 0 and b2 == 0 and ratio >= 0.9:
1516
+ return ("Good", "health-good")
1517
+ if ratio >= 0.75:
1518
+ return ("Warn", "health-warn")
1519
+ return ("Poor", "health-poor")
1520
+
1521
+
1522
+ def _render_branch_row(br: dict) -> str:
1523
+ bname = br.get("branch", "?")
1524
+ rid = br.get("latest_run_id") or "-"
1525
+ t = br.get("time")
1526
+ passed = br.get("passed")
1527
+ failed = br.get("failed")
1528
+ broken = br.get("broken")
1529
+ total_runs = br.get("total_runs")
1530
+ latest_url = br.get("latest_url") or f"./{bname}/latest/"
1531
+ runs_url = br.get("runs_url") or f"./{bname}/runs/"
1532
+ trend_url = br.get("trend_url") or f"./{bname}/runs/trend.html"
1533
+ time_cell = _format_epoch_utc(t) if t else "-"
1534
+ pct_pass: str | None = None
1535
+ if passed is not None:
1536
+ exec_total = (passed or 0) + (failed or 0) + (broken or 0)
1537
+ if exec_total > 0:
1538
+ pct_pass = f"{(passed / exec_total) * 100:.1f}%"
1539
+ health_label, health_css = _branch_health(passed, failed, broken)
1540
+ row_classes = []
1541
+ if failed and failed > 0:
1542
+ row_classes.append("row-fail")
1543
+ if broken and broken > 0:
1544
+ row_classes.append("row-broken")
1545
+ if health_css:
1546
+ row_classes.append(health_css)
1547
+ cls_attr = f" class='{' '.join(row_classes)}'" if row_classes else ""
1548
+ return (
1549
+ f"<tr{cls_attr}>"
1550
+ f"<td class='col-branch'><code>{bname}</code></td>"
1551
+ f"<td class='col-lrid'><code>{rid}</code></td>"
1552
+ f"<td class='col-time'>{time_cell}</td>"
1553
+ f"<td class='col-passed'>{passed if passed is not None else '-'}" # noqa: E501
1554
+ f"</td><td class='col-failed'>{failed if failed is not None else '-'}" # noqa: E501
1555
+ f"</td><td class='col-broken'>{broken if broken is not None else '-'}" # noqa: E501
1556
+ f"</td><td class='col-total'>{total_runs if total_runs is not None else '-'}" # noqa: E501
1557
+ f"</td><td class='col-health'><span class='health-badge {health_css}'>{health_label}</span>" # noqa: E501
1558
+ f"</td><td class='col-passpct'>{pct_pass or '-'}" # noqa: E501
1559
+ f"</td><td class='col-links'><a href='{latest_url}'>latest</a> · "
1560
+ f"<a href='{runs_url}'>runs</a> · <a href='{trend_url}'>trend</a></td>"
1561
+ "</tr>"
1562
+ )
601
1563
 
602
1564
 
603
- # --------------------------------------------------------------------------------------
604
- # Preflight / Dry run / Publish orchestration
605
- # --------------------------------------------------------------------------------------
1565
+ def _build_branches_dashboard_html(payload: dict, cfg: PublishConfig) -> bytes:
1566
+ """Render a lightweight branches summary dashboard (schema 1)."""
1567
+ branches = payload.get("branches", [])
1568
+ title = f"Allure Branches: {payload.get('project') or cfg.project}"
1569
+ rows = [_render_branch_row(br) for br in branches]
1570
+ body_rows = (
1571
+ "\n".join(rows)
1572
+ if rows
1573
+ else "<tr><td colspan='10' style='text-align:center'>No branches yet</td></tr>"
1574
+ )
1575
+ updated = payload.get("updated")
1576
+ parts: list[str] = [
1577
+ "<!doctype html><html><head><meta charset='utf-8'>",
1578
+ f"<title>{title}</title>",
1579
+ "<style>",
1580
+ "body{font-family:system-ui;margin:1.5rem;line-height:1.4;}",
1581
+ "h1{margin-top:0;font-size:1.35rem;}",
1582
+ "table{border-collapse:collapse;width:100%;max-width:1100px;}",
1583
+ "th,td{padding:.5rem .6rem;border:1px solid #ccc;font-size:13px;}",
1584
+ "thead th{background:#f2f4f7;text-align:left;}",
1585
+ "tbody tr:nth-child(even){background:#fafbfc;}",
1586
+ "code{background:#f2f4f7;padding:2px 4px;border-radius:3px;font-size:12px;}",
1587
+ "footer{margin-top:1.5rem;font-size:12px;color:#555;}",
1588
+ "#filters{margin:.75rem 0;display:flex;gap:1rem;flex-wrap:wrap;}",
1589
+ "#filters input{padding:4px 6px;font-size:13px;}",
1590
+ ".dim{color:#666;font-size:12px;}",
1591
+ ".row-fail{background:#fff5f4 !important;}",
1592
+ ".row-broken{background:#fff9ef !important;}",
1593
+ ".health-badge{display:inline-block;padding:2px 6px;border-radius:12px;font-size:11px;line-height:1.2;font-weight:600;border:1px solid #ccc;background:#f5f5f5;}",
1594
+ ".health-good{background:#e6f7ed;border-color:#9ad5b6;}",
1595
+ ".health-warn{background:#fff7e6;border-color:#f5c063;}",
1596
+ ".health-poor{background:#ffebe8;border-color:#f08a80;}",
1597
+ ".health-na{background:#f0f1f3;border-color:#c9ccd1;color:#666;}",
1598
+ "</style></head><body>",
1599
+ f"<h1>{title}</h1>",
1600
+ "<div id='filters'><label style='font-size:13px'>Branch filter: "
1601
+ "<input id='branch-filter' type='text' placeholder='substring'></label>"
1602
+ "<span class='dim'>Shows most recently active branches first.</span></div>",
1603
+ "<table id='branches'><thead><tr><th>Branch</th><th>Latest Run</th><th>UTC</th><th>P</th><th>F</th><th>B</th><th>Total Runs</th><th>Health</th><th>%Pass</th><th>Links</th></tr></thead><tbody>", # noqa: E501
1604
+ body_rows,
1605
+ "</tbody></table>",
1606
+ (
1607
+ f"<footer>Updated: {_format_epoch_utc(updated) if updated else '-'} | "
1608
+ f"Project: {payload.get('project') or cfg.project}</footer>"
1609
+ ),
1610
+ "<script>(function(){const f=document.getElementById('branch-filter');const tbl=document.getElementById('branches');f.addEventListener('input',()=>{const q=f.value.trim().toLowerCase();[...tbl.tBodies[0].rows].forEach(r=>{if(!q){r.style.display='';return;}const name=r.querySelector('.col-branch').textContent.toLowerCase();r.style.display=name.includes(q)?'':'';});});})();</script>", # noqa: E501
1611
+ "</body></html>",
1612
+ ]
1613
+ return "".join(parts).encode("utf-8")
606
1614
 
607
1615
 
608
1616
  def preflight(
@@ -632,13 +1640,35 @@ def preflight(
632
1640
  except OSError:
633
1641
  results["allure_results"] = False
634
1642
 
1643
+ region_mismatch = False
1644
+ bucket_region = None
635
1645
  try:
636
1646
  s3 = _s3(cfg)
637
- s3.head_bucket(Bucket=cfg.bucket)
638
- s3.list_objects_v2(Bucket=cfg.bucket, Prefix=cfg.s3_latest_prefix, MaxKeys=1)
1647
+ head = s3.head_bucket(Bucket=cfg.bucket)
1648
+ # region detection (defensive: some stubs may return None)
1649
+ if head:
1650
+ bucket_region = (
1651
+ head.get("ResponseMetadata", {})
1652
+ .get(
1653
+ "HTTPHeaders",
1654
+ {},
1655
+ )
1656
+ .get("x-amz-bucket-region")
1657
+ )
1658
+ # Attempt a small list to confirm permissions
1659
+ s3.list_objects_v2(
1660
+ Bucket=cfg.bucket,
1661
+ Prefix=cfg.s3_latest_prefix,
1662
+ MaxKeys=1,
1663
+ )
639
1664
  results["s3_bucket"] = True
640
- except ClientError:
1665
+ except ClientError as e:
1666
+ code = getattr(e, "response", {}).get("Error", {}).get("Code")
1667
+ if code == "301": # permanent redirect / region mismatch
1668
+ region_mismatch = True
641
1669
  results["s3_bucket"] = False
1670
+ results["bucket_region"] = bucket_region
1671
+ results["region_mismatch"] = region_mismatch
642
1672
  return results
643
1673
 
644
1674
 
@@ -661,45 +1691,135 @@ def plan_dry_run(cfg: PublishConfig, paths: Paths | None = None) -> dict:
661
1691
  )
662
1692
  else:
663
1693
  samples.append({"note": "Report missing; would run allure generate."})
664
- root = branch_root(cfg.prefix, cfg.project, cfg.branch)
1694
+ # Align keys with existing tests expectations
665
1695
  return {
666
1696
  "bucket": cfg.bucket,
667
1697
  "run_prefix": cfg.s3_run_prefix,
668
- "latest_prefix": f"{root}/latest_tmp/",
1698
+ # reflect the temporary latest staging area (two-phase)
1699
+ "latest_prefix": getattr(
1700
+ cfg,
1701
+ "s3_latest_prefix_tmp",
1702
+ cfg.s3_latest_prefix,
1703
+ ),
1704
+ "samples": samples,
669
1705
  "run_url": cfg.url_run(),
670
1706
  "latest_url": cfg.url_latest(),
671
- "context_url": getattr(cfg, "context_url", None),
672
- "samples": samples,
673
1707
  }
674
1708
 
675
1709
 
676
- def publish(cfg: PublishConfig, paths: Paths | None = None) -> dict:
677
- paths = paths or Paths()
678
- pull_history(cfg, paths)
679
- generate_report(paths)
680
- upload_dir(cfg, paths.report, cfg.s3_run_prefix)
681
- _ensure_directory_placeholder(cfg, paths.report / "index.html", cfg.s3_run_prefix)
682
- two_phase_update_latest(cfg, paths.report)
1710
+ def _maybe_archive_run(cfg: PublishConfig, paths: Paths) -> str | None:
1711
+ """Optionally archive the run under an archive/ prefix.
1712
+
1713
+ Controlled by cfg.archive_runs (bool). Best-effort; failures do not abort
1714
+ publish.
1715
+ Returns archive prefix if performed.
1716
+ """
1717
+ # Backward compatibility: earlier implementation mistakenly looked for
1718
+ # cfg.archive_runs (plural). The correct flag sets cfg.archive_run.
1719
+ should_archive = getattr(cfg, "archive_run", False) or getattr(cfg, "archive_runs", False)
1720
+ if not should_archive:
1721
+ return None
1722
+ import tempfile
1723
+
1724
+ archive_format = getattr(cfg, "archive_format", "tar.gz") or "tar.gz"
1725
+ run_root = paths.report
1726
+ if not run_root or not run_root.exists():
1727
+ return None
1728
+ # Destination S3 key (placed alongside run prefix root)
1729
+ # s3://bucket/<prefix>/<project>/<branch>/<run_id>/<run_id>.tar.gz
1730
+ archive_filename = f"{cfg.run_id}.{'zip' if archive_format == 'zip' else 'tar.gz'}"
1731
+ s3_key = f"{cfg.s3_run_prefix}{archive_filename}"
683
1732
  try:
684
- write_manifest(cfg, paths)
685
- except ClientError as e: # pragma: no cover – non fatal
686
- print(f"Manifest write skipped: {e}")
687
- try: # retention cleanup
688
- if getattr(cfg, "max_keep_runs", None):
689
- cleanup_old_runs(cfg, int(cfg.max_keep_runs))
1733
+ tmp_dir = tempfile.mkdtemp(prefix="allure-arch-")
1734
+ archive_path = Path(tmp_dir) / archive_filename
1735
+ if archive_format == "zip":
1736
+ import zipfile
1737
+
1738
+ with zipfile.ZipFile(archive_path, "w", compression=zipfile.ZIP_DEFLATED) as zf:
1739
+ for p in run_root.rglob("*"):
1740
+ if p.is_file():
1741
+ zf.write(p, arcname=p.relative_to(run_root).as_posix())
1742
+ else: # tar.gz
1743
+ import tarfile
1744
+
1745
+ with tarfile.open(archive_path, "w:gz") as tf:
1746
+ for p in run_root.rglob("*"):
1747
+ if p.is_file():
1748
+ tf.add(p, arcname=p.relative_to(run_root).as_posix())
1749
+ # Upload archive object
1750
+ s3 = _s3(cfg)
1751
+ extra = {
1752
+ "CacheControl": "public, max-age=31536000, immutable",
1753
+ "ContentType": "application/gzip" if archive_format != "zip" else "application/zip",
1754
+ }
1755
+ if cfg.ttl_days is not None:
1756
+ extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
1757
+ if cfg.sse:
1758
+ extra["ServerSideEncryption"] = cfg.sse
1759
+ if cfg.sse == "aws:kms" and cfg.sse_kms_key_id:
1760
+ extra["SSEKMSKeyId"] = cfg.sse_kms_key_id
1761
+ s3.upload_file(str(archive_path), cfg.bucket, s3_key, ExtraArgs=extra)
1762
+ print(f"[publish] Archived run bundle uploaded: s3://{cfg.bucket}/{s3_key}")
1763
+ return s3_key
690
1764
  except Exception as e: # pragma: no cover
691
- print(f"Cleanup skipped: {e}")
1765
+ if os.getenv("ALLURE_HOST_DEBUG"):
1766
+ print(f"[publish] archive skipped: {e}")
1767
+ return None
692
1768
 
693
- files_count = sum(1 for p in paths.report.rglob("*") if p.is_file())
694
- return {
695
- "run_url": cfg.url_run(),
696
- "latest_url": cfg.url_latest(),
697
- "bucket": cfg.bucket,
698
- "run_prefix": cfg.s3_run_prefix,
699
- "latest_prefix": cfg.s3_latest_prefix,
700
- "report_size_bytes": compute_dir_size(paths.report),
701
- "report_files": files_count,
702
- }
1769
+
1770
+ # --------------------------------------------------------------------------------------
1771
+ # Retention cleanup & directory placeholder (restored)
1772
+ # --------------------------------------------------------------------------------------
1773
+
1774
+
1775
+ def cleanup_old_runs(cfg: PublishConfig, keep: int) -> None:
1776
+ if keep is None or keep <= 0:
1777
+ return
1778
+ s3 = _s3(cfg)
1779
+ root = branch_root(cfg.prefix, cfg.project, cfg.branch)
1780
+ paginator = s3.get_paginator("list_objects_v2")
1781
+ run_prefixes: list[str] = []
1782
+ for page in paginator.paginate(
1783
+ Bucket=cfg.bucket,
1784
+ Prefix=f"{root}/",
1785
+ Delimiter="/",
1786
+ ):
1787
+ for cp in page.get("CommonPrefixes", []) or []:
1788
+ pfx = cp.get("Prefix")
1789
+ if not pfx:
1790
+ continue
1791
+ name = pfx.rsplit("/", 2)[-2]
1792
+ if name in {"latest", "runs"}:
1793
+ continue
1794
+ is_ts = len(name) == 15 and name[8] == "-" and name.replace("-", "").isdigit()
1795
+ if is_ts:
1796
+ run_prefixes.append(pfx)
1797
+ run_prefixes.sort(reverse=True)
1798
+ for old in run_prefixes[keep:]:
1799
+ delete_prefix(cfg.bucket, old, getattr(cfg, "s3_endpoint", None))
1800
+
1801
+
1802
+ def _ensure_directory_placeholder(
1803
+ cfg: PublishConfig,
1804
+ index_file: Path,
1805
+ dir_prefix: str,
1806
+ ) -> None:
1807
+ if not index_file.exists() or not dir_prefix.endswith("/"):
1808
+ return
1809
+ body = index_file.read_bytes()
1810
+ extra = {"CacheControl": "no-cache", "ContentType": "text/html"}
1811
+ if cfg.ttl_days is not None:
1812
+ extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
1813
+ try:
1814
+ _s3(cfg).put_object(
1815
+ Bucket=cfg.bucket,
1816
+ Key=dir_prefix,
1817
+ Body=body,
1818
+ CacheControl=extra["CacheControl"],
1819
+ ContentType=extra["ContentType"],
1820
+ )
1821
+ except ClientError as e: # pragma: no cover
1822
+ print(f"Placeholder upload skipped: {e}")
703
1823
 
704
1824
 
705
1825
  __all__ = [