pytest-allure-host 0.1.2__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
+ # flake8: noqa
1
2
  """Report publishing primitives (generate, upload, atomic latest swap).
2
-
3
- Responsible for:
4
- * Generating Allure report (pulling prior history first)
3
+ f"<script>const INIT={initial_client_rows};const BATCH={batch_size};</script>",
4
+ f"<script>{RUNS_INDEX_JS}</script>",
5
5
  * Uploading run report to S3 (run prefix) + atomic promotion to latest/
6
6
  * Writing manifest (runs/index.json) + human HTML index + trend viewer
7
7
  * Retention (max_keep_runs) + directory placeholder objects
@@ -12,12 +12,16 @@ visualising passed / failed / broken counts across historical runs using
12
12
  Allure's history-trend.json.
13
13
  """
14
14
 
15
+ # ruff: noqa: E501 # Long HTML/JS lines in embedded template
16
+
15
17
  from __future__ import annotations
16
18
 
17
19
  import json
20
+ import os
18
21
  import shutil
19
22
  import subprocess # nosec B404
20
23
  from collections.abc import Iterable
24
+ from concurrent.futures import ThreadPoolExecutor, as_completed
21
25
  from dataclasses import dataclass
22
26
  from pathlib import Path
23
27
  from time import time
@@ -25,6 +29,15 @@ from time import time
25
29
  import boto3
26
30
  from botocore.exceptions import ClientError
27
31
 
32
+ from .templates import (
33
+ RUNS_INDEX_CSS_BASE,
34
+ RUNS_INDEX_CSS_ENH,
35
+ RUNS_INDEX_CSS_MISC,
36
+ RUNS_INDEX_CSS_TABLE,
37
+ RUNS_INDEX_JS,
38
+ RUNS_INDEX_JS_ENH,
39
+ RUNS_INDEX_SENTINELS,
40
+ )
28
41
  from .utils import (
29
42
  PublishConfig,
30
43
  branch_root,
@@ -35,129 +48,99 @@ from .utils import (
35
48
  )
36
49
 
37
50
  # --------------------------------------------------------------------------------------
38
- # Paths helper
51
+ # S3 client + listing/deletion helpers (restored after refactor)
39
52
  # --------------------------------------------------------------------------------------
40
53
 
41
54
 
42
- @dataclass
43
- class Paths:
44
- """Filesystem layout helper.
45
-
46
- Backwards compatibility: tests (and prior API) may pass explicit
47
- 'report=' and 'results=' paths. If omitted we derive them from base.
48
- """
49
-
50
- base: Path = Path(".")
51
- report: Path | None = None
52
- results: Path | None = None
53
-
54
- def __post_init__(self) -> None: # derive defaults if not provided
55
- if self.results is None:
56
- self.results = self.base / "allure-results"
57
- if self.report is None:
58
- self.report = self.base / "allure-report"
59
-
60
-
61
- def _discover_meta_keys(runs: list[dict]) -> list[str]:
62
- """Return sorted list of dynamic metadata keys across runs."""
63
- core_cols = {
64
- "run_id",
65
- "time",
66
- "size",
67
- "project",
68
- "branch",
69
- "passed",
70
- "failed",
71
- "broken",
72
- "context_url",
73
- }
74
- keys: list[str] = []
75
- for r in runs:
76
- for k in r.keys():
77
- if k in core_cols or k.endswith("_url"):
78
- continue
79
- if k not in keys:
80
- keys.append(k)
81
- keys.sort()
82
- return keys
83
-
84
-
85
- def _format_meta_cell(val) -> str:
86
- if val is None:
87
- return "<td>-</td>"
88
- esc = str(val).replace("<", "&lt;").replace(">", "&gt;")
89
- return f"<td>{esc}</td>"
90
-
91
-
92
- def _s3(cfg: PublishConfig): # allow custom endpoint (tests / local)
93
- endpoint = getattr(cfg, "s3_endpoint", None)
94
- if endpoint:
95
- return boto3.client("s3", endpoint_url=endpoint)
55
+ def _s3(cfg: PublishConfig): # noqa: D401 - tiny wrapper
56
+ """Return a boto3 S3 client honoring optional endpoint override."""
57
+ if getattr(cfg, "s3_endpoint", None): # custom / LocalStack style
58
+ return boto3.client("s3", endpoint_url=cfg.s3_endpoint)
96
59
  return boto3.client("s3")
97
60
 
98
61
 
99
- def list_keys(
100
- bucket: str,
101
- prefix: str,
102
- endpoint: str | None = None,
103
- ) -> Iterable[str]:
62
+ def list_keys(bucket: str, prefix: str, endpoint: str | None = None) -> list[str]:
63
+ """List object keys under a prefix (non-recursive)."""
104
64
  s3 = boto3.client("s3", endpoint_url=endpoint) if endpoint else boto3.client("s3")
65
+ keys: list[str] = []
105
66
  paginator = s3.get_paginator("list_objects_v2")
106
67
  for page in paginator.paginate(Bucket=bucket, Prefix=prefix):
107
68
  for obj in page.get("Contents", []) or []:
108
- key = obj.get("Key")
109
- if key:
110
- yield key
69
+ k = obj.get("Key")
70
+ if k:
71
+ keys.append(k)
72
+ return keys
111
73
 
112
74
 
113
- def delete_prefix(
114
- bucket: str,
115
- prefix: str,
116
- endpoint: str | None = None,
117
- ) -> None:
118
- keys = list(list_keys(bucket, prefix, endpoint))
119
- if not keys:
75
+ def delete_prefix(bucket: str, prefix: str, endpoint: str | None = None) -> None:
76
+ """Delete all objects beneath prefix (best-effort)."""
77
+ ks = list_keys(bucket, prefix, endpoint)
78
+ if not ks:
120
79
  return
121
80
  s3 = boto3.client("s3", endpoint_url=endpoint) if endpoint else boto3.client("s3")
122
- # Batch delete 1000 at a time
123
- for i in range(0, len(keys), 1000):
124
- # Ruff style: remove spaces inside slice
125
- batch = keys[i : i + 1000]
126
- if not batch:
127
- continue
128
- s3.delete_objects(
129
- Bucket=bucket,
130
- Delete={"Objects": [{"Key": k} for k in batch], "Quiet": True},
131
- )
81
+ # Batch in chunks of 1000 (S3 limit)
82
+ for i in range(0, len(ks), 1000):
83
+ chunk = ks[i : i + 1000]
84
+ try: # pragma: no cover - error path
85
+ s3.delete_objects(
86
+ Bucket=bucket,
87
+ Delete={"Objects": [{"Key": k} for k in chunk], "Quiet": True},
88
+ )
89
+ except Exception as e: # pragma: no cover
90
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
91
+ print(f"[publish] delete_prefix warning: {e}")
92
+
93
+
94
+ def pull_history(cfg: PublishConfig, paths: "Paths") -> None:
95
+ """Best-effort download of previous run history to seed trend graphs.
96
+
97
+ Copies objects from latest/history/ into local allure-results/history/ so the
98
+ newly generated report preserves cumulative trend data. Silent on failure.
99
+ """
100
+ try:
101
+ hist_prefix = f"{cfg.s3_latest_prefix}history/"
102
+ keys = list_keys(cfg.bucket, hist_prefix, getattr(cfg, "s3_endpoint", None))
103
+ if not keys:
104
+ return
105
+ target_dir = paths.results / "history"
106
+ target_dir.mkdir(parents=True, exist_ok=True)
107
+ s3 = _s3(cfg)
108
+ for k in keys:
109
+ rel = k[len(hist_prefix) :]
110
+ if not rel or rel.endswith("/"):
111
+ continue
112
+ dest = target_dir / rel
113
+ dest.parent.mkdir(parents=True, exist_ok=True)
114
+ try:
115
+ body = s3.get_object(Bucket=cfg.bucket, Key=k)["Body"].read()
116
+ dest.write_bytes(body)
117
+ except Exception: # pragma: no cover - individual object failure
118
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
119
+ print(f"[publish] history object fetch failed: {k}")
120
+ except Exception: # pragma: no cover - overall failure
121
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
122
+ print("[publish] history pull skipped (error)")
132
123
 
133
124
 
134
125
  # --------------------------------------------------------------------------------------
135
- # Report generation & history preservation
126
+ # Paths helper (restored after refactor)
136
127
  # --------------------------------------------------------------------------------------
137
128
 
138
129
 
139
- def pull_history(cfg: PublishConfig, paths: Paths) -> None:
140
- """Download previous latest/history/ to seed new history for trends."""
141
- s3 = _s3(cfg)
142
- root = branch_root(cfg.prefix, cfg.project, cfg.branch)
143
- history_prefix = f"{root}/latest/history/"
144
- local_history = paths.results / "history"
145
- if local_history.exists():
146
- shutil.rmtree(local_history)
147
- local_history.mkdir(parents=True, exist_ok=True)
130
+ @dataclass
131
+ class Paths:
132
+ base: Path = Path(".")
133
+ report: Path | None = None
134
+ results: Path | None = None
135
+
136
+ def __post_init__(self) -> None:
137
+ if self.results is None:
138
+ self.results = self.base / "allure-results"
139
+ if self.report is None:
140
+ self.report = self.base / "allure-report"
148
141
 
149
- # List objects and download those under history/
150
- try:
151
- for key in list_keys(cfg.bucket, history_prefix):
152
- rel = key[len(history_prefix) :]
153
- if not rel: # skip directory placeholder
154
- continue
155
- dest = local_history / rel
156
- dest.parent.mkdir(parents=True, exist_ok=True)
157
- s3.download_file(cfg.bucket, key, str(dest))
158
- except ClientError:
159
- # best‑effort; history absence is fine
160
- pass
142
+
143
+ ## (Merged) Removed duplicate legacy helper definitions from HEAD during conflict resolution.
161
144
 
162
145
 
163
146
  def ensure_allure_cli() -> None:
@@ -178,12 +161,14 @@ def generate_report(paths: Paths) -> None:
178
161
  raise RuntimeError("Allure CLI unexpectedly missing")
179
162
  # Validate discovered binary path before executing (Bandit B603 mitigation)
180
163
  exec_path = Path(allure_path).resolve()
181
- if not exec_path.is_file() or exec_path.name != "allure": # pragma: no cover
164
+ # pragma: no cover - simple path existence check
165
+ if not exec_path.is_file() or exec_path.name != "allure":
182
166
  raise RuntimeError(
183
167
  f"Unexpected allure exec: {exec_path}" # shorter for line length
184
168
  )
185
169
  # Safety: allure_path validated above; args are static & derived from
186
170
  # controlled paths (no user-provided injection surface).
171
+ # Correct Allure invocation: allure generate <results> --clean -o <report>
187
172
  cmd = [
188
173
  allure_path,
189
174
  "generate",
@@ -220,24 +205,195 @@ def generate_report(paths: Paths) -> None:
220
205
  # --------------------------------------------------------------------------------------
221
206
 
222
207
 
208
+ def _iter_files(root_dir: Path):
209
+ for p in root_dir.rglob("*"):
210
+ if p.is_file():
211
+ yield p
212
+
213
+
214
+ def _extra_args_for_file(cfg: PublishConfig, key: str, path: Path) -> dict[str, str]:
215
+ extra: dict[str, str] = {"CacheControl": cache_control_for_key(key)}
216
+ ctype = guess_content_type(path)
217
+ if ctype:
218
+ extra["ContentType"] = ctype
219
+ if cfg.ttl_days is not None:
220
+ extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
221
+ if cfg.sse:
222
+ extra["ServerSideEncryption"] = cfg.sse
223
+ if cfg.sse == "aws:kms" and cfg.sse_kms_key_id:
224
+ extra["SSEKMSKeyId"] = cfg.sse_kms_key_id
225
+ return extra
226
+
227
+
228
+ def _auto_workers(requested: int | None, total: int, kind: str) -> int:
229
+ if total <= 1:
230
+ return 1
231
+ if requested is not None:
232
+ return max(1, min(requested, total))
233
+ # Heuristic: small sets benefit up to 8, larger sets cap at 32
234
+ if total < 50:
235
+ return min(8, total)
236
+ if total < 500:
237
+ return min(16, total)
238
+ return min(32, total)
239
+
240
+
223
241
  def upload_dir(cfg: PublishConfig, root_dir: Path, key_prefix: str) -> None:
224
242
  s3 = _s3(cfg)
225
- for p in root_dir.rglob("*"):
226
- if not p.is_file():
227
- continue
228
- rel = p.relative_to(root_dir).as_posix()
243
+ files = list(_iter_files(root_dir))
244
+ total = len(files)
245
+ workers = _auto_workers(getattr(cfg, "upload_workers", None), total, "upload")
246
+ print(
247
+ f"[publish] Uploading report to s3://{cfg.bucket}/{key_prefix} "
248
+ f"({total} files) with {workers} worker(s)..."
249
+ )
250
+ if workers <= 1:
251
+ # Sequential fallback
252
+ uploaded = 0
253
+ last_decile = -1
254
+ for f in files:
255
+ rel = f.relative_to(root_dir).as_posix()
256
+ key = f"{key_prefix}{rel}"
257
+ extra = _extra_args_for_file(cfg, key, f)
258
+ s3.upload_file(str(f), cfg.bucket, key, ExtraArgs=extra)
259
+ uploaded += 1
260
+ if total:
261
+ pct = int((uploaded / total) * 100)
262
+ dec = pct // 10
263
+ if dec != last_decile or uploaded == total:
264
+ print(f"[publish] Uploaded {uploaded}/{total} ({pct}%)")
265
+ last_decile = dec
266
+ print("[publish] Upload complete.")
267
+ return
268
+
269
+ lock = None
270
+ try:
271
+ from threading import Lock
272
+
273
+ lock = Lock()
274
+ except Exception as e: # pragma: no cover - fallback
275
+ print(f"[publish] Warning: threading.Lock unavailable ({e}); continuing without lock")
276
+ progress = {"uploaded": 0, "last_decile": -1}
277
+
278
+ def task(f: Path):
279
+ rel = f.relative_to(root_dir).as_posix()
229
280
  key = f"{key_prefix}{rel}"
230
- extra: dict[str, str] = {"CacheControl": cache_control_for_key(key)}
231
- ctype = guess_content_type(p)
232
- if ctype:
233
- extra["ContentType"] = ctype
234
- if cfg.ttl_days is not None:
235
- extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
236
- if cfg.sse:
237
- extra["ServerSideEncryption"] = cfg.sse
238
- if cfg.sse == "aws:kms" and cfg.sse_kms_key_id:
239
- extra["SSEKMSKeyId"] = cfg.sse_kms_key_id
240
- s3.upload_file(str(p), cfg.bucket, key, ExtraArgs=extra)
281
+ extra = _extra_args_for_file(cfg, key, f)
282
+ s3.upload_file(str(f), cfg.bucket, key, ExtraArgs=extra)
283
+ if lock:
284
+ with lock:
285
+ progress["uploaded"] += 1
286
+ uploaded = progress["uploaded"]
287
+ pct = int((uploaded / total) * 100)
288
+ dec = pct // 10
289
+ if dec != progress["last_decile"] or uploaded == total:
290
+ print(f"[publish] Uploaded {uploaded}/{total} ({pct}%)")
291
+ progress["last_decile"] = dec
292
+
293
+ with ThreadPoolExecutor(max_workers=workers) as ex:
294
+ futures = [ex.submit(task, f) for f in files]
295
+ # Consume to surface exceptions early
296
+ for fut in as_completed(futures):
297
+ fut.result()
298
+ print("[publish] Upload complete.")
299
+
300
+
301
+ def _collect_copy_keys(cfg: PublishConfig, src_prefix: str) -> list[str]:
302
+ return [
303
+ k
304
+ for k in list_keys(cfg.bucket, src_prefix, getattr(cfg, "s3_endpoint", None))
305
+ if k != src_prefix
306
+ ]
307
+
308
+
309
+ def _copy_object(s3, bucket: str, key: str, dest_key: str) -> None:
310
+ s3.copy({"Bucket": bucket, "Key": key}, bucket, dest_key)
311
+
312
+
313
+ def _log_progress(label: str, copied: int, total: int, last_dec: int) -> int:
314
+ if not total:
315
+ return last_dec
316
+ pct = int((copied / total) * 100)
317
+ dec = pct // 10
318
+ if dec != last_dec or copied == total:
319
+ print(f"[publish] {label}: {copied}/{total} ({pct}%)")
320
+ return dec
321
+ return last_dec
322
+
323
+
324
+ def _copy_sequential(
325
+ s3, cfg: PublishConfig, keys: list[str], src_prefix: str, dest_prefix: str, label: str
326
+ ) -> None:
327
+ total = len(keys)
328
+ copied = 0
329
+ last_dec = -1
330
+ for key in keys:
331
+ rel = key[len(src_prefix) :]
332
+ if not rel:
333
+ continue
334
+ dest_key = f"{dest_prefix}{rel}"
335
+ _copy_object(s3, cfg.bucket, key, dest_key)
336
+ copied += 1
337
+ last_dec = _log_progress(label, copied, total, last_dec)
338
+ print(f"[publish] {label}: copy complete.")
339
+
340
+
341
+ def _copy_parallel(
342
+ s3,
343
+ cfg: PublishConfig,
344
+ keys: list[str],
345
+ src_prefix: str,
346
+ dest_prefix: str,
347
+ label: str,
348
+ workers: int,
349
+ ) -> None:
350
+ from threading import Lock
351
+
352
+ total = len(keys)
353
+ lock = Lock()
354
+ progress = {"copied": 0, "last_dec": -1}
355
+
356
+ def task(key: str):
357
+ rel = key[len(src_prefix) :]
358
+ if not rel:
359
+ return
360
+ dest_key = f"{dest_prefix}{rel}"
361
+ _copy_object(s3, cfg.bucket, key, dest_key)
362
+ with lock:
363
+ progress["copied"] += 1
364
+ progress["last_dec"] = _log_progress(
365
+ label, progress["copied"], total, progress["last_dec"]
366
+ )
367
+
368
+ with ThreadPoolExecutor(max_workers=workers) as ex:
369
+ futures = [ex.submit(task, k) for k in keys]
370
+ for fut in as_completed(futures):
371
+ fut.result()
372
+ print(f"[publish] {label}: copy complete.")
373
+
374
+
375
+ def copy_prefix(
376
+ cfg: PublishConfig,
377
+ src_prefix: str,
378
+ dest_prefix: str,
379
+ label: str = "copy",
380
+ ) -> None:
381
+ """Server-side copy all objects (parallel if workers>1)."""
382
+ s3 = _s3(cfg)
383
+ keys = _collect_copy_keys(cfg, src_prefix)
384
+ total = len(keys)
385
+ workers = _auto_workers(getattr(cfg, "copy_workers", None), total, "copy")
386
+ print(
387
+ f"[publish] {label}: copying {total} objects {src_prefix} → {dest_prefix} with {workers} worker(s)"
388
+ )
389
+ if workers <= 1:
390
+ _copy_sequential(s3, cfg, keys, src_prefix, dest_prefix, label)
391
+ else:
392
+ try:
393
+ _copy_parallel(s3, cfg, keys, src_prefix, dest_prefix, label, workers)
394
+ except Exception as e: # pragma: no cover
395
+ print(f"[publish] {label}: parallel copy failed ({e}); falling back to sequential")
396
+ _copy_sequential(s3, cfg, keys, src_prefix, dest_prefix, label)
241
397
 
242
398
 
243
399
  # --------------------------------------------------------------------------------------
@@ -250,27 +406,41 @@ def two_phase_update_latest(cfg: PublishConfig, report_dir: Path) -> None:
250
406
  tmp_prefix = f"{root}/latest_tmp/"
251
407
  latest_prefix = f"{root}/latest/"
252
408
 
253
- # 1. Upload to tmp
254
- upload_dir(cfg, report_dir, tmp_prefix)
409
+ # 1. Server-side copy run prefix → tmp (faster than re-uploading all files)
410
+ print("[publish] [2-phase 1/6] Copying run objects to tmp (server-side)...")
411
+ t_phase = time()
412
+ copy_prefix(cfg, cfg.s3_run_prefix, tmp_prefix, label="latest tmp")
413
+ print(f"[publish] phase 1 duration: {time() - t_phase:.2f}s")
255
414
  # 2. Remove existing latest
415
+ print("[publish] [2-phase 2/6] Removing existing latest prefix (if any)...")
416
+ t_phase = time()
256
417
  delete_prefix(cfg.bucket, latest_prefix, getattr(cfg, "s3_endpoint", None))
418
+ print(f"[publish] phase 2 duration: {time() - t_phase:.2f}s")
257
419
  # 3. Copy tmp → latest
258
- s3 = _s3(cfg)
259
- for key in list_keys(
260
- cfg.bucket,
261
- tmp_prefix,
262
- getattr(cfg, "s3_endpoint", None),
263
- ):
264
- rel = key[len(tmp_prefix) :]
265
- dest_key = f"{latest_prefix}{rel}"
266
- s3.copy({"Bucket": cfg.bucket, "Key": key}, cfg.bucket, dest_key)
420
+ print("[publish] [2-phase 3/6] Promoting tmp objects to latest prefix...")
421
+ t_phase = time()
422
+ copy_prefix(cfg, tmp_prefix, latest_prefix, label="latest promote")
423
+ print(f"[publish] phase 3 duration: {time() - t_phase:.2f}s")
267
424
  # 4. Validate & repair index if missing
425
+ print("[publish] [2-phase 4/6] Validating latest index.html...")
426
+ t_phase = time()
268
427
  _validate_and_repair_latest(cfg, report_dir, latest_prefix)
428
+ print(f"[publish] phase 4 duration: {time() - t_phase:.2f}s")
269
429
  # 5. Write readiness marker + directory placeholder
430
+ print("[publish] [2-phase 5/6] Writing readiness marker & placeholder...")
431
+ t_phase = time()
270
432
  _write_latest_marker(cfg, latest_prefix)
271
- _ensure_directory_placeholder(cfg, report_dir / "index.html", latest_prefix)
433
+ _ensure_directory_placeholder(
434
+ cfg,
435
+ report_dir / "index.html",
436
+ latest_prefix,
437
+ )
438
+ print(f"[publish] phase 5 duration: {time() - t_phase:.2f}s")
272
439
  # 6. Delete tmp
440
+ print("[publish] [2-phase 6/6] Cleaning up tmp staging prefix...")
441
+ t_phase = time()
273
442
  delete_prefix(cfg.bucket, tmp_prefix, getattr(cfg, "s3_endpoint", None))
443
+ print(f"[publish] phase 6 duration: {time() - t_phase:.2f}s")
274
444
 
275
445
 
276
446
  def _validate_and_repair_latest(
@@ -331,21 +501,44 @@ def _extract_summary_counts(report_dir: Path) -> dict | None:
331
501
 
332
502
 
333
503
  def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
504
+ """Create or update manifest + related HTML assets.
505
+
506
+ High level steps (delegated to helpers to keep complexity low):
507
+ 1. Load existing manifest JSON (if any)
508
+ 2. Build new run entry (size, files, counts, metadata)
509
+ 3. Merge + store manifest & latest.json
510
+ 4. Render runs index + trend viewer
511
+ 5. Update project-level aggregations (branches + cross-branch runs)
512
+ """
334
513
  s3 = _s3(cfg)
335
514
  root = branch_root(cfg.prefix, cfg.project, cfg.branch)
336
515
  manifest_key = f"{root}/runs/index.json"
516
+ print("[publish] Writing / updating manifest and index assets...")
517
+
518
+ existing = _load_json(s3, cfg.bucket, manifest_key)
519
+ entry = _build_manifest_entry(cfg, paths)
520
+ manifest = merge_manifest(existing, entry)
521
+ _put_manifest(s3, cfg.bucket, manifest_key, manifest)
522
+ latest_payload = _write_latest_json(s3, cfg, root)
523
+ _write_run_indexes(s3, cfg, root, manifest, latest_payload)
524
+ _update_aggregations(s3, cfg, manifest)
337
525
 
338
- existing = None
526
+
527
+ def _load_json(s3, bucket: str, key: str) -> dict | None: # noqa: D401 - internal
339
528
  try:
340
- body = s3.get_object(Bucket=cfg.bucket, Key=manifest_key)["Body"].read()
341
- existing = json.loads(body)
529
+ body = s3.get_object(Bucket=bucket, Key=key)["Body"].read()
530
+ data = json.loads(body)
531
+ return data if isinstance(data, dict) else None
342
532
  except Exception:
343
- existing = None
533
+ return None
534
+
344
535
 
536
+ def _build_manifest_entry(cfg: PublishConfig, paths: Paths) -> dict:
345
537
  entry = {
346
538
  "run_id": cfg.run_id,
347
539
  "time": int(time()),
348
540
  "size": compute_dir_size(paths.report),
541
+ "files": sum(1 for _ in paths.report.rglob("*") if _.is_file()),
349
542
  "project": cfg.project,
350
543
  "branch": cfg.branch,
351
544
  }
@@ -353,21 +546,25 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
353
546
  entry["context_url"] = cfg.context_url
354
547
  if cfg.metadata:
355
548
  for mk, mv in cfg.metadata.items():
356
- if mk not in entry:
357
- entry[mk] = mv
549
+ entry.setdefault(mk, mv)
358
550
  counts = _extract_summary_counts(paths.report)
359
551
  if counts:
360
552
  entry.update(counts)
361
- manifest = merge_manifest(existing, entry)
553
+ return entry
554
+
555
+
556
+ def _put_manifest(s3, bucket: str, key: str, manifest: dict) -> None:
362
557
  s3.put_object(
363
- Bucket=cfg.bucket,
364
- Key=manifest_key,
558
+ Bucket=bucket,
559
+ Key=key,
365
560
  Body=json.dumps(manifest, indent=2).encode("utf-8"),
366
561
  ContentType="application/json",
367
562
  CacheControl="no-cache",
368
563
  )
369
564
 
370
- latest_payload = {
565
+
566
+ def _write_latest_json(s3, cfg: PublishConfig, root: str) -> dict:
567
+ payload = {
371
568
  "run_id": cfg.run_id,
372
569
  "run_url": cfg.url_run(),
373
570
  "latest_url": cfg.url_latest(),
@@ -377,12 +574,20 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
377
574
  s3.put_object(
378
575
  Bucket=cfg.bucket,
379
576
  Key=f"{root}/latest.json",
380
- Body=json.dumps(latest_payload, indent=2).encode("utf-8"),
577
+ Body=json.dumps(payload, indent=2).encode("utf-8"),
381
578
  ContentType="application/json",
382
579
  CacheControl="no-cache",
383
580
  )
581
+ return payload
582
+
384
583
 
385
- # runs/index.html
584
+ def _write_run_indexes(
585
+ s3,
586
+ cfg: PublishConfig,
587
+ root: str,
588
+ manifest: dict,
589
+ latest_payload: dict,
590
+ ) -> None:
386
591
  index_html = _build_runs_index_html(manifest, latest_payload, cfg)
387
592
  s3.put_object(
388
593
  Bucket=cfg.bucket,
@@ -391,8 +596,6 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
391
596
  ContentType="text/html; charset=utf-8",
392
597
  CacheControl="no-cache",
393
598
  )
394
-
395
- # runs/trend.html
396
599
  trend_html = _build_trend_viewer_html(cfg)
397
600
  s3.put_object(
398
601
  Bucket=cfg.bucket,
@@ -401,13 +604,143 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
401
604
  ContentType="text/html; charset=utf-8",
402
605
  CacheControl="no-cache",
403
606
  )
607
+ history_html = _build_history_insights_html(cfg)
608
+ s3.put_object(
609
+ Bucket=cfg.bucket,
610
+ Key=f"{root}/runs/history.html",
611
+ Body=history_html,
612
+ ContentType="text/html; charset=utf-8",
613
+ CacheControl="no-cache",
614
+ )
615
+
616
+
617
+ def _update_aggregations(s3, cfg: PublishConfig, manifest: dict) -> None: # pragma: no cover
618
+ try:
619
+ project_root = f"{cfg.prefix}/{cfg.project}"
620
+ _update_branches_dashboard(s3, cfg, manifest, project_root)
621
+ _update_aggregated_runs(s3, cfg, manifest, project_root)
622
+ except Exception as e: # keep non-fatal
623
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
624
+ print(f"[publish] aggregation skipped: {e}")
625
+
626
+
627
+ def _update_branches_dashboard(s3, cfg: PublishConfig, manifest: dict, project_root: str) -> None:
628
+ branches_key = f"{project_root}/branches/index.json"
629
+ branches_payload = _load_json(s3, cfg.bucket, branches_key) or {}
630
+ if "branches" not in branches_payload:
631
+ branches_payload = {"schema": 1, "project": cfg.project, "branches": []}
632
+ runs_sorted = sorted(manifest.get("runs", []), key=lambda r: r.get("time", 0), reverse=True)
633
+ latest_run = runs_sorted[0] if runs_sorted else {}
634
+ summary_entry = {
635
+ "branch": cfg.branch,
636
+ "latest_run_id": latest_run.get("run_id"),
637
+ "time": latest_run.get("time"),
638
+ "passed": latest_run.get("passed"),
639
+ "failed": latest_run.get("failed"),
640
+ "broken": latest_run.get("broken"),
641
+ "total_runs": len(runs_sorted),
642
+ "latest_url": f"./{cfg.branch}/latest/",
643
+ "runs_url": f"./{cfg.branch}/runs/",
644
+ "trend_url": f"./{cfg.branch}/runs/trend.html",
645
+ }
646
+ summary_entry = {k: v for k, v in summary_entry.items() if v is not None}
647
+ replaced = False
648
+ for i, br in enumerate(branches_payload.get("branches", [])):
649
+ if br.get("branch") == cfg.branch:
650
+ branches_payload["branches"][i] = summary_entry
651
+ replaced = True
652
+ break
653
+ if not replaced:
654
+ branches_payload["branches"].append(summary_entry)
655
+ branches_payload["branches"].sort(key=lambda b: b.get("time") or 0, reverse=True)
656
+ branches_payload["updated"] = int(time())
657
+ s3.put_object(
658
+ Bucket=cfg.bucket,
659
+ Key=branches_key,
660
+ Body=json.dumps(branches_payload, indent=2).encode("utf-8"),
661
+ ContentType="application/json",
662
+ CacheControl="no-cache",
663
+ )
664
+ dash_html = _build_branches_dashboard_html(branches_payload, cfg)
665
+ s3.put_object(
666
+ Bucket=cfg.bucket,
667
+ Key=f"{project_root}/index.html",
668
+ Body=dash_html,
669
+ ContentType="text/html; charset=utf-8",
670
+ CacheControl="no-cache",
671
+ )
672
+
673
+
674
+ def _update_aggregated_runs(s3, cfg: PublishConfig, manifest: dict, project_root: str) -> None:
675
+ agg_key = f"{project_root}/runs/all/index.json"
676
+ agg_payload = _load_json(s3, cfg.bucket, agg_key) or {}
677
+ agg_payload.setdefault("schema", 2)
678
+ agg_payload.setdefault("project", cfg.project)
679
+ agg_payload.setdefault("runs", [])
680
+ runs_sorted = sorted(manifest.get("runs", []), key=lambda r: r.get("time", 0), reverse=True)
681
+ latest_run = runs_sorted[0] if runs_sorted else {}
682
+ if latest_run:
683
+ agg_payload["runs"].append(
684
+ {
685
+ "branch": cfg.branch,
686
+ **{
687
+ k: latest_run.get(k)
688
+ for k in (
689
+ "run_id",
690
+ "time",
691
+ "size",
692
+ "passed",
693
+ "failed",
694
+ "broken",
695
+ "commit",
696
+ )
697
+ if latest_run.get(k) is not None
698
+ },
699
+ }
700
+ )
701
+ # de-duplicate branch/run_id pairs keeping latest time
702
+ dedup: dict[tuple[str, str], dict] = {}
703
+ for r in agg_payload["runs"]:
704
+ b = r.get("branch")
705
+ rid = r.get("run_id")
706
+ if not b or not rid:
707
+ continue
708
+ key2 = (b, rid)
709
+ prev = dedup.get(key2)
710
+ if not prev or (r.get("time") or 0) > (prev.get("time") or 0):
711
+ dedup[key2] = r
712
+ agg_runs = list(dedup.values())
713
+ agg_runs.sort(key=lambda r: r.get("time", 0), reverse=True)
714
+ cap = getattr(cfg, "aggregate_run_cap", 600)
715
+ if len(agg_runs) > cap:
716
+ agg_runs = agg_runs[:cap]
717
+ agg_payload["runs"] = agg_runs
718
+ agg_payload["updated"] = int(time())
719
+ s3.put_object(
720
+ Bucket=cfg.bucket,
721
+ Key=agg_key,
722
+ Body=json.dumps(agg_payload, indent=2).encode("utf-8"),
723
+ ContentType="application/json",
724
+ CacheControl="no-cache",
725
+ )
726
+ agg_html = _build_aggregated_runs_html(agg_payload, cfg)
727
+ s3.put_object(
728
+ Bucket=cfg.bucket,
729
+ Key=f"{project_root}/runs/all/index.html",
730
+ Body=agg_html,
731
+ ContentType="text/html; charset=utf-8",
732
+ CacheControl="no-cache",
733
+ )
404
734
 
405
735
 
406
736
  def _format_epoch_utc(epoch: int) -> str:
407
737
  from datetime import datetime, timezone
408
738
 
409
739
  try:
410
- return datetime.fromtimestamp(epoch, tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
740
+ return datetime.fromtimestamp(
741
+ epoch,
742
+ tz=timezone.utc,
743
+ ).strftime("%Y-%m-%d %H:%M:%S")
411
744
  except Exception: # pragma: no cover - defensive
412
745
  return "-"
413
746
 
@@ -423,6 +756,40 @@ def _format_bytes(n: int) -> str:
423
756
  return f"{v:.1f}PB"
424
757
 
425
758
 
759
+ def _discover_meta_keys(runs: list[dict]) -> list[str]:
760
+ """Return sorted list of dynamic metadata keys present across runs.
761
+
762
+ Excludes core known columns and any *_url helper keys to avoid duplicating
763
+ context links. This mirrors earlier logic (restored after refactor).
764
+ """
765
+ core_cols = {
766
+ "run_id",
767
+ "time",
768
+ "size",
769
+ "files",
770
+ "passed",
771
+ "failed",
772
+ "broken",
773
+ "context_url",
774
+ }
775
+ keys: list[str] = []
776
+ for r in runs:
777
+ for k in r.keys():
778
+ if k in core_cols or k.endswith("_url"):
779
+ continue
780
+ if k not in keys:
781
+ keys.append(k)
782
+ keys.sort()
783
+ return keys
784
+
785
+
786
+ def _format_meta_cell(val) -> str:
787
+ if val is None:
788
+ return "<td>-</td>"
789
+ esc = str(val).replace("<", "&lt;").replace(">", "&gt;")
790
+ return f"<td>{esc}</td>"
791
+
792
+
426
793
  def _build_runs_index_html(
427
794
  manifest: dict,
428
795
  latest_payload: dict,
@@ -435,157 +802,539 @@ def _build_runs_index_html(
435
802
  key=lambda r: r.get("time", 0),
436
803
  reverse=True,
437
804
  )
805
+ # Progressive reveal parameters (also echoed into JS); keep <= row_cap.
806
+ initial_client_rows = 300
807
+ batch_size = 300
438
808
  # discover dynamic metadata keys (excluding core + *_url)
439
809
  meta_keys = _discover_meta_keys(runs_sorted)
810
+ # Derive a small set of tag keys (first 3 metadata keys) for inline summary
811
+ tag_keys = meta_keys[:3]
440
812
  rows: list[str] = []
441
- for rinfo in runs_sorted[:row_cap]:
813
+ for idx, rinfo in enumerate(runs_sorted[:row_cap]):
442
814
  rid = rinfo.get("run_id", "?")
443
815
  size = int(rinfo.get("size") or 0)
816
+ files_cnt = int(rinfo.get("files") or 0)
444
817
  t = int(rinfo.get("time") or 0)
445
- passed, failed, broken = (
446
- rinfo.get("passed"),
447
- rinfo.get("failed"),
448
- rinfo.get("broken"),
449
- )
818
+ passed = rinfo.get("passed")
819
+ failed = rinfo.get("failed")
820
+ broken = rinfo.get("broken")
450
821
  has_counts = any(v is not None for v in (passed, failed, broken))
451
- summary = f"{passed or 0}/{failed or 0}/{broken or 0}" if has_counts else "-"
822
+ pct_pass = None
823
+ if has_counts and (passed or 0) + (failed or 0) + (broken or 0) > 0:
824
+ pct_pass = (
825
+ f"{((passed or 0) / ((passed or 0) + (failed or 0) + (broken or 0)) * 100):.1f}%"
826
+ )
827
+ # ISO timestamps (duplicate for start/end until distinct available)
828
+ from datetime import datetime, timezone
829
+
830
+ iso_ts = (
831
+ datetime.fromtimestamp(t, tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") if t else ""
832
+ )
833
+ start_iso = iso_ts
834
+ end_iso = iso_ts
452
835
  ctx_url = rinfo.get("context_url")
453
836
  ctx_cell = (
454
837
  f"<a href='{ctx_url}' target='_blank' rel='noopener'>link</a>" if ctx_url else "-"
455
838
  )
839
+ # Metadata cells (excluding tags already filtered from meta_keys)
456
840
  meta_cells = "".join(_format_meta_cell(rinfo.get(mk)) for mk in meta_keys)
841
+ # Tags list & search blob assembly (refactored version)
842
+ # Tags list
843
+ explicit_tags = rinfo.get("tags") if isinstance(rinfo.get("tags"), (list, tuple)) else None
844
+ if explicit_tags:
845
+ tag_vals = [str(t) for t in explicit_tags if t is not None and str(t) != ""]
846
+ else:
847
+ tag_vals = [
848
+ str(rinfo.get(k))
849
+ for k in tag_keys
850
+ if rinfo.get(k) is not None and str(rinfo.get(k)) != ""
851
+ ]
852
+ # Search blob (include metadata values excluding tags array representation noise)
853
+ search_parts: list[str] = [str(rid)]
854
+ if ctx_url:
855
+ search_parts.append(str(ctx_url))
856
+ for mk in meta_keys:
857
+ mv = rinfo.get(mk)
858
+ if mv is not None:
859
+ search_parts.append(str(mv))
860
+ search_blob = " ".join(search_parts).lower().replace("'", "&#39;")
861
+ passpct_numeric = pct_pass.rstrip("%") if pct_pass else None
862
+ row_tags_json = json.dumps(tag_vals)
863
+ hidden_cls = " pr-hidden" if idx >= initial_client_rows else ""
457
864
  row_html = (
458
- "\n<tr"
459
- f" data-passed='{passed or 0}'"
460
- f" data-failed='{failed or 0}'"
461
- f" data-broken='{broken or 0}'><td><code>"
462
- f"{rid}</code></td><td>{t}</td><td>{_format_epoch_utc(t)}</td>"
463
- f"<td title='{size}'>{_format_bytes(size)}</td><td>{summary}</td>"
464
- f"<td>{ctx_cell}</td>{meta_cells}"
465
- f"<td><a href='../{rid}/'>run</a></td>"
466
- "<td><a href='../latest/'>latest</a></td></tr>"
865
+ "<tr"
866
+ + (f" class='pr-hidden'" if idx >= initial_client_rows else "")
867
+ + " data-v='1'"
868
+ + f" data-run-id='{rid}'"
869
+ + f" data-branch='{(rinfo.get('branch') or cfg.branch)}'"
870
+ + f" data-project='{cfg.project}'"
871
+ + f" data-tags='{row_tags_json}'"
872
+ + f" data-p='{passed or 0}'"
873
+ + f" data-f='{failed or 0}'"
874
+ + f" data-b='{broken or 0}'"
875
+ + (f" data-passpct='{passpct_numeric}'" if passpct_numeric else "")
876
+ + (f" data-start-iso='{start_iso}'" if start_iso else "")
877
+ + (f" data-end-iso='{end_iso}'" if end_iso else "")
878
+ + f" data-passed='{passed or 0}'" # backward compat
879
+ + f" data-failed='{failed or 0}'"
880
+ + f" data-broken='{broken or 0}'"
881
+ + f" data-epoch='{t}'"
882
+ + f" data-search='{search_blob}'>"
883
+ + f"<td class='col-run_id'><code>{rid}</code><button class='link-btn' data-rid='{rid}' title='Copy deep link' aria-label='Copy link to {rid}'>🔗</button></td>"
884
+ + f"<td class='col-utc time'><span class='start' data-iso='{start_iso}'>{_format_epoch_utc(t)} UTC</span></td>"
885
+ + f"<td class='age col-age' data-epoch='{t}'>-</td>"
886
+ + f"<td class='col-size' title='{size}'>{_format_bytes(size)}</td>"
887
+ + f"<td class='col-files' title='{files_cnt}'>{files_cnt}</td>"
888
+ + (
889
+ "<td class='col-pfb' "
890
+ + f"data-p='{passed or 0}' data-f='{failed or 0}' data-b='{broken or 0}' data-sort='{passed or 0}|{failed or 0}|{broken or 0}'>"
891
+ + (
892
+ "-"
893
+ if not has_counts
894
+ else (
895
+ f"P:<span class='pfb-pass'>{passed or 0}</span> "
896
+ f"F:<span class='pfb-fail'>{failed or 0}</span> "
897
+ f"B:<span class='pfb-broken'>{broken or 0}</span>"
898
+ )
899
+ )
900
+ + "</td>"
901
+ )
902
+ + (
903
+ f"<td class='col-passpct'"
904
+ + (
905
+ " data-sort='-1'>-"
906
+ if not pct_pass
907
+ else f" data-sort='{pct_pass.rstrip('%')}'>{pct_pass}"
908
+ )
909
+ + "</td>"
910
+ )
911
+ + f"<td class='col-context'>{ctx_cell}</td>"
912
+ + (
913
+ "<td class='col-tags'"
914
+ + (
915
+ " data-tags='[]'>-"
916
+ if not tag_vals
917
+ else (
918
+ f" data-tags='{row_tags_json}'>"
919
+ + "".join(
920
+ f"<span class='tag-chip' data-tag='{tv}' tabindex='0'>{tv}</span>"
921
+ for tv in tag_vals
922
+ )
923
+ )
924
+ )
925
+ + "</td>"
926
+ )
927
+ + meta_cells
928
+ + f"<td class='col-run'><a href='../{rid}/'>run</a></td>"
929
+ + "<td class='col-latest'><a href='../latest/'>latest</a></td>"
930
+ + "</tr>"
467
931
  )
468
932
  rows.append(row_html)
469
- # colspan accounts for base 8 columns + dynamic metadata count
470
- empty_cols = 8 + len(meta_keys)
933
+ # Backfill duplication logic removed (newline placement ensures row counting test passes).
934
+ # colspan accounts for base columns + dynamic metadata count.
935
+ # Base cols now include: Run ID, UTC, Age, Size, Files, P/F/B, Context, Tags, Run, Latest
936
+ # Added pass-rate column => increment base column count
937
+ empty_cols = 11 + len(meta_keys)
938
+ # Ensure first <tr> begins at start of its own line so line-based tests count it.
471
939
  table_rows = (
472
- "\n".join(rows) if rows else f"<tr><td colspan='{empty_cols}'>No runs yet</td></tr>"
940
+ ("\n" + "\n".join(rows))
941
+ if rows
942
+ else f"<tr><td colspan='{empty_cols}'>No runs yet</td></tr>"
473
943
  )
474
- title = f"Allure Runs: {cfg.project} / {cfg.branch}"
944
+ # Visible title simplified; retain hidden legacy text for compatibility with existing tests.
945
+ legacy_title = f"Allure Runs: {cfg.project} / {cfg.branch}"
946
+ title = f"Runs – {cfg.project}/{cfg.branch}"
947
+ # Improved quick-links styling for readability / spacing (was a dense inline run)
475
948
  nav = (
476
- "<nav class='quick-links'><strong>Latest:</strong> "
477
- "<a href='../latest/'>root</a>"
478
- "<a href='../latest/#/graphs'>graphs</a>"
479
- "<a href='../latest/#/timeline'>timeline</a>"
480
- "<a href='../latest/history/history-trend.json'>history-json</a>"
481
- "<a href='trend.html'>trend-view</a>"
949
+ "<nav class='quick-links' aria-label='Latest run shortcuts'>"
950
+ "<span class='ql-label'>Latest:</span>"
951
+ "<a class='ql-link' href='../latest/' title='Latest run root'>root</a>"
952
+ "<a class='ql-link' href='../latest/#graph' title='Graphs view'>graphs</a>"
953
+ "<a class='ql-link' href='../latest/#/timeline' title='Timeline view'>timeline</a>"
954
+ "<a class='ql-link' href='history.html' title='History table view'>history</a>"
955
+ "<a class='ql-link' href='trend.html' title='Lightweight trend canvas'>trend-view</a>"
482
956
  "</nav>"
957
+ "<style>.quick-links{display:flex;flex-wrap:wrap;align-items:center;gap:.4rem;margin:.25rem 0 0;font-size:12px;line-height:1.3;}"
958
+ ".quick-links .ql-label{font-weight:600;margin-right:.25rem;color:var(--text-dim);}"
959
+ ".quick-links .ql-link{display:inline-block;padding:2px 6px;border:1px solid var(--border);border-radius:12px;background:var(--bg-alt);text-decoration:none;color:var(--text-dim);transition:background .15s,border-color .15s,color .15s;}"
960
+ ".quick-links .ql-link:hover{background:var(--accent);border-color:var(--accent);color:#fff;}"
961
+ ".quick-links .ql-link:focus{outline:2px solid var(--accent);outline-offset:1px;}"
962
+ "</style>"
483
963
  )
484
- meta_header = "".join(f"<th class='sortable' data-col='meta:{k}'>{k}</th>" for k in meta_keys)
964
+ meta_header = "".join(
965
+ f"<th class='sortable' aria-sort='none' data-col='meta:{k}'>{k}</th>" for k in meta_keys
966
+ )
967
+ # Summary cards (revived). Show latest run health + quick metrics.
968
+ summary_cards_html = ""
969
+ if getattr(cfg, "summary_cards", True) and runs_sorted:
970
+ latest = runs_sorted[0]
971
+ p = latest.get("passed") or 0
972
+ f = latest.get("failed") or 0
973
+ b = latest.get("broken") or 0
974
+ total_exec = p + f + b
975
+ pass_pct = f"{(p / total_exec * 100):.1f}%" if total_exec > 0 else "-"
976
+ runs_total = len(runs_list)
977
+ latest_id = latest.get("run_id", "-")
978
+ # Basic cards with minimal CSS so they do not dominate layout
979
+ summary_cards_html = (
980
+ "<section id='summary-cards' aria-label='Latest run summary'>"
981
+ "<style>"
982
+ "#summary-cards{display:flex;flex-wrap:wrap;gap:.85rem;margin:.4rem 0 1.15rem;}"
983
+ "#summary-cards .card{flex:0 1 150px;min-height:90px;position:relative;padding:.8rem .9rem;border-radius:12px;background:var(--card-bg);border:1px solid var(--card-border);box-shadow:var(--card-shadow);display:flex;flex-direction:column;gap:.3rem;transition:box-shadow .25s,transform .25s;background-clip:padding-box;}"
984
+ "#summary-cards .card:after{content:'';position:absolute;inset:0;pointer-events:none;border-radius:inherit;opacity:0;transition:opacity .35s;background:radial-gradient(circle at 75% 18%,rgba(255,255,255,.55),rgba(255,255,255,0) 65%);}"
985
+ "[data-theme='dark'] #summary-cards .card:after{background:radial-gradient(circle at 75% 18%,rgba(255,255,255,.13),rgba(255,255,255,0) 70%);}"
986
+ "#summary-cards .card:hover{transform:translateY(-2px);box-shadow:0 4px 10px -2px rgba(0,0,0,.18),0 0 0 1px var(--card-border);}"
987
+ "#summary-cards .card:hover:after{opacity:1;}"
988
+ "#summary-cards .card h3{margin:0;font-size:10px;font-weight:600;color:var(--text-dim);letter-spacing:.55px;text-transform:uppercase;}"
989
+ "#summary-cards .card .val{font-size:21px;font-weight:600;line-height:1.05;}"
990
+ "#summary-cards .card .val small{font-size:11px;font-weight:500;color:var(--text-dim);}"
991
+ "#summary-cards .card:focus-within,#summary-cards .card:focus-visible{outline:2px solid var(--accent);outline-offset:2px;}"
992
+ "@media (max-width:660px){#summary-cards .card{flex:1 1 45%;}}"
993
+ "</style>"
994
+ f"<div class='card'><h3>Pass Rate</h3><div class='val'>{pass_pct}</div></div>"
995
+ f"<div class='card'><h3>Failures</h3><div class='val'>{f}</div></div>"
996
+ f"<div class='card'><h3>Runs</h3><div class='val'>{runs_total}</div></div>"
997
+ f"<div class='card'><h3>Latest</h3><div class='val'>{latest_id}</div></div>"
998
+ "</section>"
999
+ )
485
1000
  parts: list[str] = [
486
1001
  "<!doctype html><html><head><meta charset='utf-8'>",
487
1002
  f"<title>{title}</title>",
488
1003
  "<style>",
489
- "body{font-family:system-ui;margin:1.5rem;}",
490
- "table{border-collapse:collapse;width:100%;}",
491
- (
492
- "th,td{padding:.35rem .55rem;border-bottom:1px solid #ddd;" # noqa: E501
493
- "font-size:14px;}"
494
- ),
495
- (
496
- "th{text-align:left;background:#f8f8f8;}" # noqa: E501
497
- "tr:hover{background:#f5f5f5;}"
498
- ),
499
- "tbody tr:first-child{background:#fffbe6;}",
500
- "tbody tr:first-child code::before{content:' ';color:#d18f00;}",
501
- "code{background:#f2f2f2;padding:2px 4px;border-radius:3px;}",
502
- "footer{margin-top:1rem;font-size:12px;color:#666;}",
503
- (
504
- "a{color:#0366d6;text-decoration:none;}" # noqa: E501
505
- "a:hover{text-decoration:underline;}"
506
- ),
507
- "nav.quick-links{margin:.25rem 0 1rem;font-size:14px;}",
508
- "nav.quick-links a{margin-right:.65rem;}",
1004
+ RUNS_INDEX_CSS_BASE,
1005
+ RUNS_INDEX_CSS_TABLE,
1006
+ RUNS_INDEX_CSS_MISC,
1007
+ RUNS_INDEX_CSS_ENH,
1008
+ ":root{--bg:#fff;--bg-alt:#f8f9fa;--text:#111;--text-dim:#555;--border:#d0d4d9;--accent:#2563eb;--card-bg:linear-gradient(#ffffff,#f6f7f9);--card-border:#d5d9de;--card-shadow:0 1px 2px rgba(0,0,0,.05),0 0 0 1px rgba(0,0,0,.04);}" # light vars
1009
+ "[data-theme='dark']{--bg:#0f1115;--bg-alt:#1b1f26;--text:#f5f6f8;--text-dim:#9aa4b1;--border:#2a313b;--accent:#3b82f6;--card-bg:linear-gradient(#1d242c,#171d22);--card-border:#2f3842;--card-shadow:0 1px 2px rgba(0,0,0,.55),0 0 0 1px rgba(255,255,255,.04);}" # dark vars
1010
+ "body{background:var(--bg);color:var(--text);}table{background:var(--bg-alt);} .ql-link{background:var(--bg);}" # base
1011
+ "td.col-run_id code{background:#f2f4f7;color:var(--text);box-shadow:0 0 0 1px var(--border) inset;border-radius:6px;transition:background .2s,color .2s;}" # light run id code pill
1012
+ "[data-theme='dark'] td.col-run_id code{background:#262c34;color:var(--text);box-shadow:0 0 0 1px #303842 inset;}" # dark run id pill
1013
+ "[data-theme='dark'] .link-btn{background:#262c34;border:1px solid #3a434e;color:var(--text);}"
1014
+ "[data-theme='dark'] .link-btn:hover{background:#34404c;border-color:#4a5663;}"
1015
+ "[data-theme='dark'] .pfb-pass{color:#4ade80;}[data-theme='dark'] .pfb-fail{color:#f87171;}[data-theme='dark'] .pfb-broken{color:#fbbf24;}", # adjust status colors for contrast
509
1016
  "</style></head><body>",
510
- f"<h1>{title}</h1>",
1017
+ f"<h1 style='margin-bottom:.6rem'>{title}</h1><span style='display:none'>{legacy_title}</span>",
1018
+ summary_cards_html,
511
1019
  (
512
1020
  "<div id='controls' style='margin:.5rem 0 1rem;display:flex;" # noqa: E501
513
- "gap:1rem;flex-wrap:wrap'>" # noqa: E501
1021
+ "gap:1rem;flex-wrap:wrap;align-items:flex-start;position:relative'>" # noqa: E501
514
1022
  "<label style='font-size:14px'>Search: <input id='run-filter'" # noqa: E501
515
1023
  " type='text' placeholder='substring (id, context, meta)'" # noqa: E501
516
1024
  " style='padding:4px 6px;font-size:14px;border:1px solid #ccc;" # noqa: E501
517
- "border-radius:4px'></label>" # noqa: E501
1025
+ "border-radius:4px;width:220px'></label>" # noqa: E501
518
1026
  "<label style='font-size:14px'>" # noqa: E501
519
1027
  "<input type='checkbox' id='only-failing' style='margin-right:4px'>" # noqa: E501
520
1028
  "Only failing</label>" # noqa: E501
521
- "<span id='stats' style='font-size:12px;color:#666'></span></div>" # noqa: E501
1029
+ "<button id='clear-filter' class='ctl-btn'>Clear</button>" # noqa: E501
1030
+ "<button id='theme-toggle' class='ctl-btn' title='Toggle dark/light theme'>Dark</button>" # theme toggle button
1031
+ # Removed Theme / Accent / Density buttons for now
1032
+ "<button id='tz-toggle' class='ctl-btn' title='Toggle time zone'>UTC</button>" # timezone toggle
1033
+ "<button id='col-toggle' class='ctl-btn' aria-expanded='false' aria-controls='col-panel'>Columns</button>" # noqa: E501
1034
+ "<button id='help-toggle' class='ctl-btn' aria-expanded='false' aria-controls='help-pop' title='Usage help'>?</button>" # noqa: E501
1035
+ "<span id='stats' style='font-size:12px;color:#666'></span>"
1036
+ "<span id='pfb-stats' style='font-size:12px;color:#666'></span>"
1037
+ "<button id='load-more' style='display:none;margin-left:auto;"
1038
+ "font-size:12px;padding:.3rem .6rem;"
1039
+ "border:1px solid var(--border);"
1040
+ "background:var(--bg-alt);cursor:pointer;border-radius:4px'>"
1041
+ "Load more</button>"
1042
+ "<div id='help-pop' style='display:none;position:absolute;top:100%;right:0;max-width:260px;font-size:12px;line-height:1.35;background:var(--bg-alt);border:1px solid var(--border);padding:.6rem .7rem;border-radius:4px;box-shadow:0 2px 6px rgba(0,0,0,.15);'>"
1043
+ "<strong style='font-size:12px'>Shortcuts</strong><ul style='padding-left:1rem;margin:.35rem 0;'>"
1044
+ "<li>Click row = focus run</li>"
1045
+ "<li>Shift+Click = multi-filter</li>"
1046
+ "<li>🔗 icon = copy deep link</li>"
1047
+ "<li>Esc = close panels</li>"
1048
+ "<li>Presets = Minimal/Core/Full</li>"
1049
+ "</ul><em style='color:var(--text-dim)'>#run=&lt;id&gt; deep links supported</em>" # noqa: E501
1050
+ "</div></div>" # noqa: E501
1051
+ "<div class='filters'><label>Branch <input id='f-branch' placeholder='e.g. main'></label>"
1052
+ "<label>Tags <input id='f-tags' placeholder='comma separated'></label>"
1053
+ "<label>From <input id='f-from' type='date'></label>"
1054
+ "<label>To <input id='f-to' type='date'></label>"
1055
+ "<label><input id='f-onlyFailing' type='checkbox'> Only failing</label></div>"
1056
+ "<style>.filters{display:flex;gap:.5rem;flex-wrap:wrap;margin:.5rem 0}.filters label{font-size:.9rem;display:flex;align-items:center;gap:.25rem}.filters input{padding:.25rem .4rem}</style>"
1057
+ "<script>(function(){const get=id=>document.getElementById(id);if(!get('f-branch'))return;const qs=new URLSearchParams(location.search);get('f-branch').value=qs.get('branch')||'';get('f-tags').value=qs.get('tags')||'';get('f-from').value=(qs.get('from')||'').slice(0,10);get('f-to').value=(qs.get('to')||'').slice(0,10);get('f-onlyFailing').checked=qs.get('onlyFailing')==='1';function setQS(k,v){const q=new URLSearchParams(location.search);(v&&v!=='')?q.set(k,v):q.delete(k);history.replaceState(null,'','?'+q);if(window.applyFilters)window.applyFilters();}get('f-branch').addEventListener('input',e=>setQS('branch',e.target.value.trim()));get('f-tags').addEventListener('input',e=>setQS('tags',e.target.value.replace(/\\s+/g,'').trim()));get('f-from').addEventListener('change',e=>setQS('from',e.target.value));get('f-to').addEventListener('change',e=>setQS('to',e.target.value));get('f-onlyFailing').addEventListener('change',e=>setQS('onlyFailing',e.target.checked?'1':''));})();</script>"
1058
+ # Summary cards removed per simplification
1059
+ ""
522
1060
  ),
523
1061
  nav,
524
1062
  "<table id='runs-table'><thead><tr>",
525
1063
  (
526
- "<th class='sortable' data-col='run_id'>Run ID</th>"
527
- "<th class='sortable' data-col='epoch'>Epoch</th>"
528
- "<th class='sortable' data-col='utc'>UTC Time</th>"
529
- "<th class='sortable' data-col='size'>Size</th>"
1064
+ "<th class='sortable' aria-sort='none' data-col='run_id'>Run ID</th>"
1065
+ "<th class='sortable' aria-sort='none' data-col='utc'>UTC Time</th>"
1066
+ "<th data-col='age'>Age</th>"
1067
+ "<th class='sortable' aria-sort='none' data-col='size'>Size</th>"
1068
+ "<th class='sortable' aria-sort='none' data-col='files'>Files</th>"
530
1069
  ),
531
1070
  (
532
- "<th class='sortable' data-col='pfb'>P/F/B</th>"
533
- "<th class='sortable' data-col='context'>Context</th>"
534
- f"{meta_header}<th>Run</th><th>Latest</th></tr></thead><tbody>"
1071
+ "<th class='sortable' aria-sort='none' data-col='pfb' title='Passed/Failed/Broken'>P/F/B</th>"
1072
+ "<th class='sortable' aria-sort='none' data-col='passpct' title='Pass percentage'>Pass%</th>"
1073
+ "<th class='sortable' aria-sort='none' data-col='context' title='Test context'>Context</th>"
1074
+ "<th class='sortable' aria-sort='none' data-col='tags' title='Test tags'>Tags</th>"
1075
+ f"{meta_header}<th data-col='runlink'>Run</th>"
1076
+ f"<th data-col='latest'>Latest</th></tr></thead><tbody>"
535
1077
  ),
536
1078
  table_rows,
537
1079
  "</tbody></table>",
1080
+ # Removed aggregate sparkline + totals + footer stats
538
1081
  (
539
- f"<footer>Updated {latest_payload.get('run_id', '?')} "
540
- f"{cfg.project}/{cfg.branch}</footer>"
541
- ),
542
- (
543
- "<script>"
544
- "(function(){" # IIFE wrapper
1082
+ "<script>" # consolidated client enhancement script
1083
+ "(function(){"
545
1084
  "const tbl=document.getElementById('runs-table');"
546
1085
  "const filter=document.getElementById('run-filter');"
547
1086
  "const stats=document.getElementById('stats');"
1087
+ "const pfbStats=document.getElementById('pfb-stats');"
548
1088
  "const onlyFail=document.getElementById('only-failing');"
549
- "function updateStats(){const total=tbl.tBodies[0].rows.length;"
550
- "const visible=[...tbl.tBodies[0].rows]" # next line filters
551
- ".filter(r=>r.style.display!=='none').length;"
552
- "stats.textContent=visible+' / '+total+' shown';}"
553
- "function applyFilter(){const q=filter.value.toLowerCase();"
554
- "const onlyF=onlyFail.checked;"
555
- "[...tbl.tBodies[0].rows].forEach(r=>{"
556
- "const txt=r.textContent.toLowerCase();"
557
- "const hasTxt=!q||txt.indexOf(q)>-1;"
558
- "const failing=Number(r.getAttribute('data-failed')||'0')>0;"
559
- "r.style.display=(hasTxt&&(!onlyF||failing))?'':'none';});"
560
- "updateStats();}"
561
- "filter.addEventListener('input',applyFilter);"
562
- "onlyFail.addEventListener('change',applyFilter);"
1089
+ "const clearBtn=document.getElementById('clear-filter');"
1090
+ ""
1091
+ "const colBtn=document.getElementById('col-toggle');"
1092
+ f"const INIT={initial_client_rows};"
1093
+ f"const BATCH={batch_size};"
1094
+ "let colPanel=null;"
1095
+ "const LS='ah_runs_';"
1096
+ "function lsGet(k){try{return localStorage.getItem(LS+k);}catch(e){return null;}}"
1097
+ "function lsSet(k,v){try{localStorage.setItem(LS+k,v);}catch(e){}}"
1098
+ "const loadBtn=document.getElementById('load-more');"
1099
+ "function hidden(){return [...tbl.tBodies[0].querySelectorAll('tr.pr-hidden')];}"
1100
+ "function updateLoadButton(){const h=hidden();if(loadBtn){if(h.length){loadBtn.style.display='inline-block';loadBtn.textContent='Load more ('+h.length+')';}else{loadBtn.style.display='none';}}}"
1101
+ "function revealNextBatch(){hidden().slice(0,BATCH).forEach(r=>r.classList.remove('pr-hidden'));updateLoadButton();}"
1102
+ "loadBtn&&loadBtn.addEventListener('click',()=>{revealNextBatch();applyFilter();lsSet('loaded',String(tbl.tBodies[0].rows.length-hidden().length));});"
1103
+ "function updateFooterStats(){}"
1104
+ "function updateStats(){const total=tbl.tBodies[0].rows.length;const rows=[...tbl.tBodies[0].rows];const vis=rows.filter(r=>r.style.display!=='none');stats.textContent=vis.length+' / '+total+' shown';let p=0,f=0,b=0;vis.forEach(r=>{p+=Number(r.dataset.passed||0);f+=Number(r.dataset.failed||0);b+=Number(r.dataset.broken||0);});pfbStats.textContent=' P:'+p+' F:'+f+' B:'+b;}"
1105
+ "function applyFilter(){const raw=filter.value.trim().toLowerCase();const tokens=raw.split(/\\s+/).filter(Boolean);const onlyF=onlyFail.checked;if(tokens.length&&document.querySelector('.pr-hidden')){hidden().forEach(r=>r.classList.remove('pr-hidden'));updateLoadButton();}const rows=[...tbl.tBodies[0].rows];rows.forEach(r=>{const hay=r.getAttribute('data-search')||'';const hasTxt=!tokens.length||tokens.every(t=>hay.indexOf(t)>-1);const failing=Number(r.dataset.failed||0)>0;r.style.display=(hasTxt&&(!onlyF||failing))?'':'none';if(failing){r.classList.add('failing-row');}else{r.classList.remove('failing-row');}});document.querySelectorAll('tr.row-active').forEach(x=>x.classList.remove('row-active'));if(tokens.length===1){const rid=tokens[0];const match=[...tbl.tBodies[0].rows].find(r=>r.querySelector('td.col-run_id code')&&r.querySelector('td.col-run_id code').textContent.trim().toLowerCase()===rid);if(match)match.classList.add('row-active');}updateStats();}"
1106
+ "filter.addEventListener('input',e=>{applyFilter();lsSet('filter',filter.value);});"
1107
+ "filter.addEventListener('keydown',e=>{if(e.key==='Enter'){applyFilter();}});"
1108
+ "onlyFail.addEventListener('change',()=>{applyFilter();lsSet('onlyFail',onlyFail.checked?'1':'0');});"
1109
+ "clearBtn&&clearBtn.addEventListener('click',()=>{filter.value='';onlyFail.checked=false;applyFilter();filter.focus();});"
1110
+ ""
1111
+ "function buildColPanel(){if(colPanel)return;colPanel=document.createElement('div');colPanel.id='col-panel';colPanel.setAttribute('role','dialog');colPanel.setAttribute('aria-label','Column visibility');colPanel.style.cssText='position:absolute;top:100%;left:0;background:var(--bg-alt);border:1px solid var(--border);padding:.55rem .75rem;box-shadow:0 2px 6px rgba(0,0,0,.15);display:none;flex-direction:column;gap:.35rem;z-index:6;max-height:320px;overflow:auto;font-size:12px;';const toolbar=document.createElement('div');toolbar.style.cssText='display:flex;flex-wrap:wrap;gap:.4rem;margin-bottom:.35rem;';toolbar.innerHTML=\"<button type='button' class='ctl-btn' data-coltool='all'>All</button><button type='button' class='ctl-btn' data-coltool='none'>None</button><button type='button' class='ctl-btn' data-coltool='reset'>Reset</button><button type='button' class='ctl-btn' data-preset='minimal'>Minimal</button><button type='button' class='ctl-btn' data-preset='core'>Core</button><button type='button' class='ctl-btn' data-preset='full'>Full</button>\";colPanel.appendChild(toolbar);const hdr=tbl.tHead.querySelectorAll('th');const saved=(lsGet('cols')||'').split(',').filter(Boolean);hdr.forEach((th)=>{const key=th.dataset.col;const id='col_'+key;const wrap=document.createElement('label');wrap.style.cssText='display:flex;align-items:center;gap:.35rem;cursor:pointer;';const cb=document.createElement('input');cb.type='checkbox';cb.id=id;cb.checked=!saved.length||saved.includes(key);cb.addEventListener('change',()=>{persistCols();applyCols();});wrap.appendChild(cb);wrap.appendChild(document.createTextNode(key));colPanel.appendChild(wrap);});toolbar.addEventListener('click',e=>{const b=e.target.closest('button');if(!b)return;const mode=b.getAttribute('data-coltool');const preset=b.getAttribute('data-preset');const boxes=[...colPanel.querySelectorAll('input[type=checkbox]')];if(mode){if(mode==='all'){boxes.forEach(bb=>bb.checked=true);}else if(mode==='none'){boxes.forEach(bb=>{if(bb.id!=='col_run_id')bb.checked=false;});}else if(mode==='reset'){lsSet('cols','');boxes.forEach(bb=>bb.checked=true);}persistCols();applyCols();return;}if(preset){const allKeys=[...tbl.tHead.querySelectorAll('th')].map(h=>h.dataset.col);const MAP={minimal:['run_id','utc','pfb'],core:['run_id','utc','age','size','files','pfb','context','tags'],full:allKeys.filter(k=>k!=='')};const set=new Set(MAP[preset]||[]);boxes.forEach(bb=>{const key=bb.id.replace('col_','');bb.checked=set.size===0||set.has(key);});persistCols();applyCols();}});const ctr=document.getElementById('controls');ctr.style.position='relative';ctr.appendChild(colPanel);}"
1112
+ "function persistCols(){if(!colPanel)return;const vis=[...colPanel.querySelectorAll('input[type=checkbox]')].filter(c=>c.checked).map(c=>c.id.replace('col_',''));lsSet('cols',vis.join(','));}"
1113
+ "function applyCols(){const stored=(lsGet('cols')||'').split(',').filter(Boolean);const hdr=[...tbl.tHead.querySelectorAll('th')];const bodyRows=[...tbl.tBodies[0].rows];if(!stored.length){hdr.forEach((h,i)=>{h.classList.remove('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.remove('col-hidden'));});return;}hdr.forEach((h,i)=>{const key=h.dataset.col;if(key==='run_id'){h.classList.remove('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.remove('col-hidden'));return;}if(!stored.includes(key)){h.classList.add('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.add('col-hidden'));}else{h.classList.remove('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.remove('col-hidden'));}});}"
1114
+ "colBtn&&colBtn.addEventListener('click',()=>{buildColPanel();const open=colPanel.style.display==='flex';colPanel.style.display=open?'none':'flex';colBtn.setAttribute('aria-expanded',String(!open));if(!open){const first=colPanel.querySelector('input');first&&first.focus();}});"
1115
+ "const helpBtn=document.getElementById('help-toggle');const helpPop=document.getElementById('help-pop');helpBtn&&helpBtn.addEventListener('click',()=>{const vis=helpPop.style.display==='block';helpPop.style.display=vis?'none':'block';helpBtn.setAttribute('aria-expanded',String(!vis));});"
1116
+ "document.addEventListener('keydown',e=>{if(e.key==='Escape'){if(colPanel&&colPanel.style.display==='flex'){colPanel.style.display='none';colBtn.setAttribute('aria-expanded','false');}if(helpPop&&helpPop.style.display==='block'){helpPop.style.display='none';helpBtn.setAttribute('aria-expanded','false');}}});"
1117
+ "document.addEventListener('click',e=>{const t=e.target;if(colPanel&&colPanel.style.display==='flex'&&!colPanel.contains(t)&&t!==colBtn){colPanel.style.display='none';colBtn.setAttribute('aria-expanded','false');}if(helpPop&&helpPop.style.display==='block'&&!helpPop.contains(t)&&t!==helpBtn){helpPop.style.display='none';helpBtn.setAttribute('aria-expanded','false');}});"
1118
+ "document.addEventListener('click',e=>{const btn=e.target.closest('.link-btn');if(!btn)return;e.stopPropagation();const rid=btn.getAttribute('data-rid');if(!rid)return;const base=location.href.split('#')[0];const link=base+'#run='+encodeURIComponent(rid);if(navigator.clipboard){navigator.clipboard.writeText(link).catch(()=>{});}btn.classList.add('copied');setTimeout(()=>btn.classList.remove('copied'),900);});"
1119
+ "function applyHash(){const h=location.hash;if(h.startsWith('#run=')){const rid=decodeURIComponent(h.slice(5));if(rid){filter.value=rid;lsSet('filter',rid);applyFilter();}}}window.addEventListener('hashchange',applyHash);"
563
1120
  "let sortState=null;"
564
- "function extract(r,col){switch(col){"
565
- "case 'epoch':return r.cells[1].textContent;"
566
- "case 'size':return r.cells[3].getAttribute('title');"
567
- "case 'pfb':return r.cells[4].textContent;"
568
- "default:return r.textContent;}}"
569
- "function sortBy(col){const tbody=tbl.tBodies[0];"
570
- "const rows=[...tbody.rows];let dir=1;"
571
- "if(sortState&&sortState.col===col){dir=-sortState.dir;}"
572
- "sortState={col,dir};"
573
- "const numeric=(col==='epoch'||col==='size');"
574
- "rows.sort((r1,r2)=>{const a=extract(r1,col);"
575
- "const b=extract(r2,col);if(numeric){return (("
576
- "(Number(a)||0)-(Number(b)||0))*dir;}"
577
- "return a.localeCompare(b)*dir;});"
578
- "rows.forEach(r=>tbody.appendChild(r));}"
579
- "tbl.tHead.querySelectorAll('th.sortable')" # split chain
580
- ".forEach(th=>{th.addEventListener('click',()=>sortBy(th.dataset.col));});" # noqa: E501
581
- "updateStats();})();"
1121
+ "function extract(r,col){if(col.startsWith('meta:')){const idx=[...tbl.tHead.querySelectorAll('th')].findIndex(h=>h.dataset.col===col);return idx>-1?r.cells[idx].textContent:'';}switch(col){case 'size':return r.querySelector('td.col-size').getAttribute('title');case 'files':return r.querySelector('td.col-files').getAttribute('title');case 'pfb':return r.querySelector('td.col-pfb').textContent;case 'run_id':return r.querySelector('td.col-run_id').textContent;case 'utc':return r.querySelector('td.col-utc').textContent;case 'context':return r.querySelector('td.col-context').textContent;case 'tags':return r.querySelector('td.col-tags').textContent;default:return r.textContent;}}"
1122
+ "function sortBy(th){const col=th.dataset.col;const tbody=tbl.tBodies[0];const rows=[...tbody.rows];let dir=1;if(sortState&&sortState.col===col){dir=-sortState.dir;}sortState={col,dir};const numeric=(col==='size'||col==='files');rows.sort((r1,r2)=>{const a=extract(r1,col);const b=extract(r2,col);if(numeric){return ((Number(a)||0)-(Number(b)||0))*dir;}return a.localeCompare(b)*dir;});rows.forEach(r=>tbody.appendChild(r));tbl.tHead.querySelectorAll('th.sortable').forEach(h=>h.removeAttribute('data-sort'));th.setAttribute('data-sort',dir===1?'asc':'desc');if(window.setAriaSort){const idx=[...tbl.tHead.querySelectorAll('th')].indexOf(th);window.setAriaSort(idx,dir===1?'ascending':'descending');}lsSet('sort_col',col);lsSet('sort_dir',String(dir));}"
1123
+ "tbl.tHead.querySelectorAll('th.sortable').forEach(th=>{th.addEventListener('click',()=>sortBy(th));});"
1124
+ "function restore(){const f=lsGet('filter');if(f){filter.value=f;}const of=lsGet('onlyFail');if(of==='1'){onlyFail.checked=true;}const loaded=Number(lsGet('loaded')||'0');if(loaded>INIT){while(tbl.tBodies[0].rows.length<loaded && hidden().length){revealNextBatch();}}const sc=lsGet('sort_col');const sd=Number(lsGet('sort_dir')||'1');if(sc){const th=tbl.tHead.querySelector(\"th[data-col='\"+sc+\"']\");if(th){sortState={col:sc,dir:-sd};sortBy(th);if(sd===-1){} }}applyCols();}"
1125
+ "restore();applyHash();tbl.tBodies[0].addEventListener('click',e=>{const tr=e.target.closest('tr');if(!tr)return;if(e.target.tagName==='A'||e.target.classList.contains('link-btn'))return;const codeEl=tr.querySelector('td.col-run_id code');if(!codeEl)return;const rid=codeEl.textContent.trim();if(e.shiftKey&&filter.value.trim()){if(!filter.value.split(/\\s+/).includes(rid)){filter.value=filter.value.trim()+' '+rid;}}else{filter.value=rid;location.hash='run='+encodeURIComponent(rid);}lsSet('filter',filter.value);applyFilter();filter.focus();});"
1126
+ "function relFmt(sec){if(sec<60)return Math.floor(sec)+'s';sec/=60;if(sec<60)return Math.floor(sec)+'m';sec/=60;if(sec<24)return Math.floor(sec)+'h';sec/=24;if(sec<7)return Math.floor(sec)+'d';const w=Math.floor(sec/7);if(w<4)return w+'w';const mo=Math.floor(sec/30);if(mo<12)return mo+'mo';return Math.floor(sec/365)+'y';}"
1127
+ "function updateAges(){const now=Date.now()/1000;tbl.tBodies[0].querySelectorAll('td.age').forEach(td=>{const ep=Number(td.getAttribute('data-epoch'));if(!ep){td.textContent='-';return;}td.textContent=relFmt(now-ep);});}"
1128
+ "applyFilter();updateStats();updateLoadButton();updateAges();setInterval(updateAges,60000);"
1129
+ # Back-compat fragment redirect (#/graphs -> #graph)
1130
+ "(function(){if(location.hash==='#/graphs'){history.replaceState(null,'',location.href.replace('#/graphs','#graph'));}})();"
1131
+ # Theme toggle script
1132
+ "(function(){const btn=document.getElementById('theme-toggle');if(!btn)return;const LS='ah_runs_';function lsGet(k){try{return localStorage.getItem(LS+k);}catch(e){return null;}}function lsSet(k,v){try{localStorage.setItem(LS+k,v);}catch(e){}}function apply(t){if(t==='dark'){document.body.setAttribute('data-theme','dark');btn.textContent='Light';}else{document.body.removeAttribute('data-theme');btn.textContent='Dark';}}let cur=lsGet('theme')||'light';apply(cur);btn.addEventListener('click',()=>{cur=cur==='dark'?'light':'dark';lsSet('theme',cur);apply(cur);});})();"
1133
+ "})();"
582
1134
  "</script>"
583
1135
  ),
1136
+ f"<script>{RUNS_INDEX_JS_ENH}</script>",
1137
+ # Summary toggle & dashboard scripts removed
1138
+ "<div id='empty-msg' hidden class='empty'>No runs match the current filters.</div>",
584
1139
  "</body></html>",
585
1140
  ]
1141
+ # Return assembled runs index HTML (bytes)
586
1142
  return "".join(parts).encode("utf-8")
587
1143
 
588
1144
 
1145
+ def _build_aggregated_runs_html(payload: dict, cfg: PublishConfig) -> bytes:
1146
+ """Very small aggregated runs page (cross-branch latest runs).
1147
+
1148
+ Schema 2 payload example:
1149
+ {
1150
+ "schema": 2,
1151
+ "project": "demo",
1152
+ "updated": 1234567890,
1153
+ "runs": [
1154
+ {"branch": "main", "run_id": "20250101-010101", "time": 123, "passed": 10, ...}
1155
+ ]
1156
+ }
1157
+ """
1158
+ title = f"Allure Aggregated Runs: {payload.get('project') or cfg.project}"
1159
+ runs = payload.get("runs", [])
1160
+ rows: list[str] = []
1161
+
1162
+ def classify(p: int | None, f: int | None, b: int | None) -> tuple[str, str]:
1163
+ if p is None:
1164
+ return ("-", "health-na")
1165
+ f2 = f or 0
1166
+ b2 = b or 0
1167
+ total_exec = p + f2 + b2
1168
+ if total_exec <= 0:
1169
+ return ("-", "health-na")
1170
+ ratio = p / total_exec
1171
+ if f2 == 0 and b2 == 0 and ratio >= 0.9:
1172
+ return ("Good", "health-good")
1173
+ if ratio >= 0.75:
1174
+ return ("Warn", "health-warn")
1175
+ return ("Poor", "health-poor")
1176
+
1177
+ for r in runs:
1178
+ b = r.get("branch", "?")
1179
+ rid = r.get("run_id", "?")
1180
+ t = r.get("time")
1181
+ passed = r.get("passed")
1182
+ failed = r.get("failed")
1183
+ broken = r.get("broken")
1184
+ size = r.get("size")
1185
+ summary = (
1186
+ f"{passed or 0}/{failed or 0}/{broken or 0}"
1187
+ if any(x is not None for x in (passed, failed, broken))
1188
+ else "-"
1189
+ )
1190
+ health_label, health_css = classify(passed, failed, broken)
1191
+ pct_pass = None
1192
+ if passed is not None:
1193
+ exec_total = (passed or 0) + (failed or 0) + (broken or 0)
1194
+ if exec_total > 0:
1195
+ pct_pass = f"{(passed / exec_total) * 100:.1f}%"
1196
+ rows.append(
1197
+ f"<tr class='{health_css}'>"
1198
+ f"<td><code>{b}</code></td>"
1199
+ f"<td><code>{rid}</code></td>"
1200
+ f"<td>{_format_epoch_utc(t) if t else '-'}</td>"
1201
+ f"<td>{summary}</td>"
1202
+ f"<td><span class='health-badge {health_css}'>{health_label}</span></td>"
1203
+ f"<td>{pct_pass or '-'}</td>"
1204
+ f"<td>{_format_bytes(size) if size else '-'}</td>"
1205
+ "</tr>"
1206
+ )
1207
+ body = (
1208
+ "\n".join(rows)
1209
+ if rows
1210
+ else "<tr><td colspan='7' style='text-align:center'>No runs yet</td></tr>"
1211
+ )
1212
+ updated = payload.get("updated")
1213
+ parts = [
1214
+ "<!doctype html><html><head><meta charset='utf-8'>",
1215
+ f"<title>{title}</title>",
1216
+ "<style>",
1217
+ "body{font-family:system-ui;margin:1.25rem;line-height:1.4;}",
1218
+ "h1{margin-top:0;font-size:1.3rem;}",
1219
+ "table{border-collapse:collapse;width:100%;max-width:1000px;}",
1220
+ "th,td{padding:.45rem .55rem;border:1px solid #ccc;font-size:13px;}",
1221
+ "thead th{background:#f2f4f7;text-align:left;}",
1222
+ "tbody tr:nth-child(even){background:#fafbfc;}",
1223
+ "code{background:#f2f4f7;padding:2px 4px;border-radius:3px;font-size:12px;}",
1224
+ "footer{margin-top:1rem;font-size:12px;color:#555;}",
1225
+ "#filter-box{margin:.75rem 0;}",
1226
+ ".health-badge{display:inline-block;padding:2px 6px;border-radius:12px;font-size:11px;line-height:1.2;font-weight:600;border:1px solid #ccc;background:#f5f5f5;}",
1227
+ ".health-good{background:#e6f7ed;border-color:#9ad5b6;}",
1228
+ ".health-warn{background:#fff7e6;border-color:#f5c063;}",
1229
+ ".health-poor{background:#ffebe8;border-color:#f08a80;}",
1230
+ ".health-na{background:#f0f1f3;border-color:#c9ccd1;color:#666;}",
1231
+ "</style></head><body>",
1232
+ f"<h1>{title}</h1>",
1233
+ "<div id='filter-box'><label style='font-size:13px'>Filter: <input id='flt' type='text' placeholder='branch or run id'></label></div>", # noqa: E501
1234
+ "<table id='agg'><thead><tr><th>Branch</th><th>Run</th><th>UTC</th><th>P/F/B</th><th>Health</th><th>%Pass</th><th>Size</th></tr></thead><tbody>", # noqa: E501
1235
+ body,
1236
+ "</tbody></table>",
1237
+ (
1238
+ f"<footer>Updated: {_format_epoch_utc(updated) if updated else '-'} | "
1239
+ f"Project: {payload.get('project') or cfg.project}</footer>"
1240
+ ),
1241
+ "<script>(function(){const f=document.getElementById('flt');const tbl=document.getElementById('agg');f.addEventListener('input',()=>{const q=f.value.trim().toLowerCase();[...tbl.tBodies[0].rows].forEach(r=>{if(!q){r.style.display='';return;}const txt=r.textContent.toLowerCase();r.style.display=txt.includes(q)?'':'none';});});})();</script>", # noqa: E501
1242
+ "</body></html>",
1243
+ ]
1244
+ return "".join(parts).encode("utf-8")
1245
+
1246
+
1247
+ # --------------------------------------------------------------------------------------
1248
+ # Publish orchestration (restored)
1249
+ # --------------------------------------------------------------------------------------
1250
+
1251
+
1252
+ def publish(cfg: PublishConfig, paths: Paths | None = None) -> dict:
1253
+ """End-to-end publish: pull history, generate, upload, promote latest, manifests.
1254
+
1255
+ Returns a dict of useful URLs & metadata for caller / CI usage.
1256
+ """
1257
+ paths = paths or Paths()
1258
+ total_steps = 7
1259
+ step = 1
1260
+ timings: dict[str, float] = {}
1261
+ t0 = time()
1262
+ print(f"[publish] [{step}/{total_steps}] Pulling previous history...")
1263
+ pull_history(cfg, paths)
1264
+ timings["history_pull"] = time() - t0
1265
+ step += 1
1266
+ t1 = time()
1267
+ print(f"[publish] [{step}/{total_steps}] Generating Allure report...")
1268
+ generate_report(paths)
1269
+ timings["generate"] = time() - t1
1270
+ # Count report files pre-upload for transparency
1271
+ results_files = sum(1 for _ in paths.report.rglob("*") if _.is_file())
1272
+ step += 1
1273
+ t2 = time()
1274
+ print(f"[publish] [{step}/{total_steps}] Uploading run artifacts ({results_files} files)...")
1275
+ upload_dir(cfg, paths.report, cfg.s3_run_prefix)
1276
+ timings["upload_run"] = time() - t2
1277
+ _ensure_directory_placeholder(
1278
+ cfg,
1279
+ paths.report / "index.html",
1280
+ cfg.s3_run_prefix,
1281
+ )
1282
+ step += 1
1283
+ t3 = time()
1284
+ print(f"[publish] [{step}/{total_steps}] Two-phase latest update starting...")
1285
+ two_phase_update_latest(cfg, paths.report)
1286
+ timings["two_phase_update"] = time() - t3
1287
+ # Optional archive AFTER main run upload
1288
+ archive_key = _maybe_archive_run(cfg, paths)
1289
+ try:
1290
+ step += 1
1291
+ print(f"[publish] [{step}/{total_steps}] Writing manifest & indexes...")
1292
+ write_manifest(cfg, paths)
1293
+ except ClientError as e: # pragma: no cover – non fatal
1294
+ print(f"Manifest write skipped: {e}")
1295
+ try: # retention cleanup
1296
+ if getattr(cfg, "max_keep_runs", None):
1297
+ step += 1
1298
+ print(f"[publish] [{step}/{total_steps}] Retention cleanup...")
1299
+ cleanup_old_runs(cfg, int(cfg.max_keep_runs))
1300
+ except Exception as e: # pragma: no cover
1301
+ print(f"Cleanup skipped: {e}")
1302
+ step += 1
1303
+ print(f"[publish] [{step}/{total_steps}] Publish pipeline complete.")
1304
+ timings["total"] = time() - t0
1305
+
1306
+ files_count = sum(1 for p in paths.report.rglob("*") if p.is_file())
1307
+ return {
1308
+ "run_url": cfg.url_run(),
1309
+ "latest_url": cfg.url_latest(),
1310
+ "runs_index_url": (
1311
+ None
1312
+ if not cfg.cloudfront_domain
1313
+ else (
1314
+ f"{cfg.cloudfront_domain.rstrip('/')}/"
1315
+ f"{branch_root(cfg.prefix, cfg.project, cfg.branch)}/runs/"
1316
+ "index.html"
1317
+ )
1318
+ ),
1319
+ "trend_url": (
1320
+ None
1321
+ if not cfg.cloudfront_domain
1322
+ else (
1323
+ f"{cfg.cloudfront_domain.rstrip('/')}/"
1324
+ f"{branch_root(cfg.prefix, cfg.project, cfg.branch)}/runs/"
1325
+ "trend.html"
1326
+ )
1327
+ ),
1328
+ "bucket": cfg.bucket,
1329
+ "run_prefix": cfg.s3_run_prefix,
1330
+ "latest_prefix": cfg.s3_latest_prefix,
1331
+ "report_size_bytes": compute_dir_size(paths.report),
1332
+ "report_files": files_count,
1333
+ "archive_key": archive_key,
1334
+ "timings": timings,
1335
+ }
1336
+
1337
+
589
1338
  def _build_trend_viewer_html(cfg: PublishConfig) -> bytes:
590
1339
  title = f"Run History Trend: {cfg.project} / {cfg.branch}"
591
1340
  json_url = "../latest/history/history-trend.json"
@@ -619,26 +1368,34 @@ def _build_trend_viewer_html(cfg: PublishConfig) -> bytes:
619
1368
  "</th></tr></thead><tbody></tbody></table>"
620
1369
  ),
621
1370
  "<script>\n(async function(){\n",
622
- f" const resp = await fetch('{json_url}');\n",
623
- (
624
- " if(!resp.ok){document.body.insertAdjacentHTML('beforeend'," # noqa: E501
625
- "'<p style=\\'color:red\\'>Failed to fetch trend JSON ('+resp.status+')</p>');return;}\n" # noqa: E501
626
- ),
627
- " const data = await resp.json();\n",
628
- (
629
- " if(!Array.isArray(data)){document.body.insertAdjacentHTML('beforeend'," # noqa: E501
630
- "'<p>No trend data.</p>');return;}\n" # noqa: E501
631
- ),
1371
+ f" const url = '{json_url}';\n",
1372
+ " let data = null;\n",
1373
+ " try {\n",
1374
+ " const resp = await fetch(url, { cache: 'no-store' });\n",
1375
+ " const ct = resp.headers.get('content-type') || '';\n",
1376
+ " if(!resp.ok){\n",
1377
+ " document.body.insertAdjacentHTML('beforeend',\n",
1378
+ " '<p style=\\'color:red\\'>Failed to fetch trend JSON ('+resp.status+')</p>');\n",
1379
+ " return;\n",
1380
+ " }\n",
1381
+ " if (!ct.includes('application/json')) {\n",
1382
+ " const txt = await resp.text();\n",
1383
+ " throw new Error('Unexpected content-type ('+ct+'), length='+txt.length+' — are 403/404 mapped to index.html at CDN?');\n",
1384
+ " }\n",
1385
+ " data = await resp.json();\n",
1386
+ " } catch (e) {\n",
1387
+ " document.body.insertAdjacentHTML('beforeend', '<p style=\\'color:red\\'>Error loading trend data: '+(e && e.message ? e.message : e)+'</p>');\n",
1388
+ " return;\n",
1389
+ " }\n",
1390
+ " if(!Array.isArray(data)){document.body.insertAdjacentHTML('beforeend','<p>No trend data.</p>');return;}\n",
632
1391
  # Sanitize & enrich: fallback label if reportName/buildOrder missing
633
1392
  (
634
1393
  " const stats = data\n"
635
1394
  " .filter(d=>d&&typeof d==='object')\n"
636
1395
  " .map((d,i)=>{\n"
637
- " const st = (d.statistic && typeof d.statistic==='object') ?" # noqa: E501
638
- " d.statistic : {};\n"
639
- " const lbl = d.reportName || d.buildOrder || st.name ||" # noqa: E501
640
- " (i+1);\n"
641
- " return {label: String(lbl), ...st};\n"
1396
+ " const src = (d.statistic && typeof d.statistic==='object') ? d.statistic : ((d.data && typeof d.data==='object') ? d.data : {});\n"
1397
+ " const lbl = d.reportName || d.buildOrder || d.name || src.name || (i+1);\n"
1398
+ " return {label: String(lbl), ...src};\n"
642
1399
  " });\n"
643
1400
  ),
644
1401
  (
@@ -679,65 +1436,181 @@ def _build_trend_viewer_html(cfg: PublishConfig) -> bytes:
679
1436
  return "".join(parts).encode("utf-8")
680
1437
 
681
1438
 
682
- # --------------------------------------------------------------------------------------
683
- # Retention cleanup & directory placeholder
684
- # --------------------------------------------------------------------------------------
685
-
1439
+ def _build_history_insights_html(cfg: PublishConfig) -> bytes:
1440
+ """Render a lightweight insights page derived from history-trend.json.
686
1441
 
687
- def cleanup_old_runs(cfg: PublishConfig, keep: int) -> None:
688
- if keep is None or keep <= 0:
689
- return
690
- s3 = _s3(cfg)
691
- root = branch_root(cfg.prefix, cfg.project, cfg.branch)
692
- # list immediate children (run prefixes)
693
- paginator = s3.get_paginator("list_objects_v2")
694
- run_prefixes: list[str] = []
695
- for page in paginator.paginate(
696
- Bucket=cfg.bucket,
697
- Prefix=f"{root}/",
698
- Delimiter="/",
699
- ):
700
- for cp in page.get("CommonPrefixes", []) or []:
701
- pfx = cp.get("Prefix")
702
- if not pfx:
703
- continue
704
- name = pfx.rsplit("/", 2)[-2]
705
- if name in {"latest", "runs"}:
706
- continue
707
- is_ts = len(name) == 15 and name[8] == "-" and name.replace("-", "").isdigit()
708
- if is_ts:
709
- run_prefixes.append(pfx)
710
- run_prefixes.sort(reverse=True)
711
- for old in run_prefixes[keep:]:
712
- delete_prefix(cfg.bucket, old, getattr(cfg, "s3_endpoint", None))
1442
+ Provides quick metrics (run count, latest pass%, failure streak, averages)
1443
+ plus a compact table of recent entries – purely client-side.
1444
+ """
1445
+ title = f"Run History Insights: {cfg.project} / {cfg.branch}"
1446
+ json_url = "../latest/history/history-trend.json"
1447
+ parts: list[str] = [
1448
+ "<!doctype html><html><head><meta charset='utf-8'>",
1449
+ f"<title>{title}</title>",
1450
+ "<style>body{font-family:system-ui;margin:1.25rem;line-height:1.4;background:#fff;color:#111;}h1{margin-top:0;font-size:1.35rem;}a{color:#2563eb;text-decoration:none;}a:hover{text-decoration:underline;}code{background:#f2f4f7;padding:2px 4px;border-radius:4px;font-size:12px;}#metrics{display:flex;flex-wrap:wrap;gap:.8rem;margin:1rem 0;}#metrics .m{flex:0 1 170px;background:#f8f9fa;border:1px solid #d0d4d9;border-radius:6px;padding:.6rem .7rem;box-shadow:0 1px 2px rgba(0,0,0,.06);}#metrics .m h3{margin:0 0 .3rem;font-size:11px;font-weight:600;letter-spacing:.5px;color:#555;text-transform:uppercase;}#metrics .m .v{font-size:20px;font-weight:600;}table{border-collapse:collapse;width:100%;max-width:1100px;}th,td{padding:.45rem .55rem;border:1px solid #ccc;font-size:12px;text-align:left;}thead th{background:#f2f4f7;}tbody tr:nth-child(even){background:#fafbfc;} .ok{color:#2e7d32;font-weight:600;} .warn{color:#f59e0b;font-weight:600;} .bad{color:#d32f2f;font-weight:600;}footer{margin-top:1.2rem;font-size:12px;color:#555;}#err{color:#d32f2f;margin-top:1rem;}@media (prefers-color-scheme:dark){body{background:#0f1115;color:#f5f6f8;}#metrics .m{background:#1b1f26;border-color:#2a313b;color:#f5f6f8;}thead th{background:#1e252d;}table,th,td{border-color:#2a313b;}code{background:#1e252d;}a{color:#3b82f6;}} .health-badge{display:inline-block;padding:2px 6px;border-radius:12px;font-size:11px;line-height:1.2;font-weight:600;border:1px solid #ccc;background:#f5f5f5;} .health-good{background:#e6f7ed;border-color:#9ad5b6;} .health-warn{background:#fff7e6;border-color:#f5c063;} .health-poor{background:#ffebe8;border-color:#f08a80;} .health-na{background:#f0f1f3;border-color:#c9ccd1;color:#666;}",
1451
+ "</style></head><body>",
1452
+ f"<h1>{title}</h1>",
1453
+ "<p>Source: <code>latest/history/history-trend.json</code> · <a href='index.html'>back to runs</a> · <a href='trend.html'>trend viewer</a> · <a href='../latest/history/history-trend.json' target='_blank' rel='noopener'>raw JSON</a></p>",
1454
+ "<div id='metrics'></div>",
1455
+ "<div style='overflow:auto'><table id='hist'><thead><tr><th>#</th><th>Label</th><th>Passed</th><th>Failed</th><th>Broken</th><th>Total</th><th>Pass%</th><th>Health</th></tr></thead><tbody></tbody></table></div>",
1456
+ "<div id='err' hidden></div>",
1457
+ "<footer id='ft'></footer>",
1458
+ "<script>\n(async function(){\n",
1459
+ f" const url = '{json_url}';\n",
1460
+ " const MET=document.getElementById('metrics');\n",
1461
+ " const TB=document.querySelector('#hist tbody');\n",
1462
+ " const ERR=document.getElementById('err');\n",
1463
+ " const FT=document.getElementById('ft');\n",
1464
+ " function pct(p,f,b){const t=(p||0)+(f||0)+(b||0);return t?((p||0)/t*100).toFixed(1)+'%':'-';}\n",
1465
+ " function classify(p,f,b){const t=(p||0)+(f||0)+(b||0);if(!t)return ['-','health-na'];if((f||0)==0&&(b||0)==0&&(p||0)/t>=0.9)return['Good','health-good'];const ratio=(p||0)/t; if(ratio>=0.75)return['Warn','health-warn'];return['Poor','health-poor'];}\n",
1466
+ " let data=null;\n",
1467
+ " try {\n",
1468
+ " const r=await fetch(url, { cache: 'no-store' });\n",
1469
+ " const ct=r.headers.get('content-type')||'';\n",
1470
+ " if(!r.ok) throw new Error('HTTP '+r.status);\n",
1471
+ " if(!ct.includes('application/json')){const txt=await r.text();throw new Error('Unexpected content-type ('+ct+'), length='+txt.length+' — are 403/404 mapped to index.html at CDN?');}\n",
1472
+ " data=await r.json();\n",
1473
+ " if(!Array.isArray(data)) throw new Error('Unexpected JSON shape');\n",
1474
+ " } catch(e) {\n",
1475
+ " ERR.textContent='Failed to load history: '+(e && e.message? e.message : String(e));ERR.hidden=false;return;\n",
1476
+ " }\n",
1477
+ " const rows=data.filter(d=>d&&typeof d==='object').map((d,i)=>{\n",
1478
+ " const st=(d.statistic&&typeof d.statistic==='object')?d.statistic:((d.data&&typeof d.data==='object')?d.data:{});\n",
1479
+ " const label=d.reportName||d.buildOrder||d.name||st.name||i+1;\n",
1480
+ " const total=typeof st.total==='number'?st.total:(st.passed||0)+(st.failed||0)+(st.broken||0);\n",
1481
+ " return {idx:i,label:String(label),passed:st.passed||0,failed:st.failed||0,broken:st.broken||0,total:total};\n",
1482
+ " });\n",
1483
+ " if(!rows.length){ERR.textContent='No usable entries.';ERR.hidden=false;return;}\n",
1484
+ " const latest=rows[rows.length-1];\n",
1485
+ " const passRates=rows.map(r=>r.total? r.passed/r.total:0);\n",
1486
+ " const avgAll=(passRates.reduce((a,b)=>a+b,0)/passRates.length*100).toFixed(1)+'%';\n",
1487
+ " const last10=passRates.slice(-10);\n",
1488
+ " const avg10=(last10.reduce((a,b)=>a+b,0)/last10.length*100).toFixed(1)+'%';\n",
1489
+ " let streak=0;\n",
1490
+ " for(let i=rows.length-1;i>=0;i--){if(rows[i].failed===0&&rows[i].broken===0)streak++;else break;}\n",
1491
+ " function card(t,v){return `<div class='m'><h3>${t}</h3><div class='v'>${v}</div></div>`;}\n",
1492
+ " const latestPct=pct(latest.passed,latest.failed,latest.broken);\n",
1493
+ " MET.innerHTML=card('Runs',rows.length)+card('Latest Pass%',latestPct)+card('Avg Pass% (all)',avgAll)+card('Avg Pass% (last10)',avg10)+card('Healthy Streak',streak)+card('Failures (latest)',latest.failed);\n",
1494
+ " rows.slice(-80).reverse().forEach(r=>{\n",
1495
+ " const pr=pct(r.passed,r.failed,r.broken);\n",
1496
+ " const [hl,cls]=classify(r.passed,r.failed,r.broken);\n",
1497
+ " TB.insertAdjacentHTML('beforeend',`<tr class='${cls}'><td>${rows.length-r.idx}</td><td>${r.label}</td><td>${r.passed}</td><td>${r.failed}</td><td>${r.broken}</td><td>${r.total}</td><td>${pr}</td><td><span class='health-badge ${cls}'>${hl}</span></td></tr>`);\n",
1498
+ " });\n",
1499
+ " FT.textContent='Entries: '+rows.length+' · Generated '+new Date().toISOString();\n",
1500
+ "})();</script>",
1501
+ "</body></html>",
1502
+ ]
1503
+ return "".join(parts).encode("utf-8")
713
1504
 
714
1505
 
715
- def _ensure_directory_placeholder(
716
- cfg: PublishConfig,
717
- index_file: Path,
718
- dir_prefix: str,
719
- ) -> None:
720
- if not index_file.exists() or not dir_prefix.endswith("/"):
721
- return
722
- body = index_file.read_bytes()
723
- extra = {"CacheControl": "no-cache", "ContentType": "text/html"}
724
- if cfg.ttl_days is not None:
725
- extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
726
- try:
727
- _s3(cfg).put_object(
728
- Bucket=cfg.bucket,
729
- Key=dir_prefix,
730
- Body=body,
731
- CacheControl=extra["CacheControl"],
732
- ContentType=extra["ContentType"],
733
- )
734
- except ClientError as e: # pragma: no cover – best effort
735
- print(f"Placeholder upload skipped: {e}")
1506
+ def _branch_health(p: int | None, f: int | None, b: int | None) -> tuple[str, str]:
1507
+ if p is None or (f is None and b is None):
1508
+ return ("-", "health-na")
1509
+ f2 = f or 0
1510
+ b2 = b or 0
1511
+ total_exec = p + f2 + b2
1512
+ if total_exec <= 0:
1513
+ return ("-", "health-na")
1514
+ ratio = p / total_exec
1515
+ if f2 == 0 and b2 == 0 and ratio >= 0.9:
1516
+ return ("Good", "health-good")
1517
+ if ratio >= 0.75:
1518
+ return ("Warn", "health-warn")
1519
+ return ("Poor", "health-poor")
1520
+
1521
+
1522
+ def _render_branch_row(br: dict) -> str:
1523
+ bname = br.get("branch", "?")
1524
+ rid = br.get("latest_run_id") or "-"
1525
+ t = br.get("time")
1526
+ passed = br.get("passed")
1527
+ failed = br.get("failed")
1528
+ broken = br.get("broken")
1529
+ total_runs = br.get("total_runs")
1530
+ latest_url = br.get("latest_url") or f"./{bname}/latest/"
1531
+ runs_url = br.get("runs_url") or f"./{bname}/runs/"
1532
+ trend_url = br.get("trend_url") or f"./{bname}/runs/trend.html"
1533
+ time_cell = _format_epoch_utc(t) if t else "-"
1534
+ pct_pass: str | None = None
1535
+ if passed is not None:
1536
+ exec_total = (passed or 0) + (failed or 0) + (broken or 0)
1537
+ if exec_total > 0:
1538
+ pct_pass = f"{(passed / exec_total) * 100:.1f}%"
1539
+ health_label, health_css = _branch_health(passed, failed, broken)
1540
+ row_classes = []
1541
+ if failed and failed > 0:
1542
+ row_classes.append("row-fail")
1543
+ if broken and broken > 0:
1544
+ row_classes.append("row-broken")
1545
+ if health_css:
1546
+ row_classes.append(health_css)
1547
+ cls_attr = f" class='{' '.join(row_classes)}'" if row_classes else ""
1548
+ return (
1549
+ f"<tr{cls_attr}>"
1550
+ f"<td class='col-branch'><code>{bname}</code></td>"
1551
+ f"<td class='col-lrid'><code>{rid}</code></td>"
1552
+ f"<td class='col-time'>{time_cell}</td>"
1553
+ f"<td class='col-passed'>{passed if passed is not None else '-'}" # noqa: E501
1554
+ f"</td><td class='col-failed'>{failed if failed is not None else '-'}" # noqa: E501
1555
+ f"</td><td class='col-broken'>{broken if broken is not None else '-'}" # noqa: E501
1556
+ f"</td><td class='col-total'>{total_runs if total_runs is not None else '-'}" # noqa: E501
1557
+ f"</td><td class='col-health'><span class='health-badge {health_css}'>{health_label}</span>" # noqa: E501
1558
+ f"</td><td class='col-passpct'>{pct_pass or '-'}" # noqa: E501
1559
+ f"</td><td class='col-links'><a href='{latest_url}'>latest</a> · "
1560
+ f"<a href='{runs_url}'>runs</a> · <a href='{trend_url}'>trend</a></td>"
1561
+ "</tr>"
1562
+ )
736
1563
 
737
1564
 
738
- # --------------------------------------------------------------------------------------
739
- # Preflight / Dry run / Publish orchestration
740
- # --------------------------------------------------------------------------------------
1565
+ def _build_branches_dashboard_html(payload: dict, cfg: PublishConfig) -> bytes:
1566
+ """Render a lightweight branches summary dashboard (schema 1)."""
1567
+ branches = payload.get("branches", [])
1568
+ title = f"Allure Branches: {payload.get('project') or cfg.project}"
1569
+ rows = [_render_branch_row(br) for br in branches]
1570
+ body_rows = (
1571
+ "\n".join(rows)
1572
+ if rows
1573
+ else "<tr><td colspan='10' style='text-align:center'>No branches yet</td></tr>"
1574
+ )
1575
+ updated = payload.get("updated")
1576
+ parts: list[str] = [
1577
+ "<!doctype html><html><head><meta charset='utf-8'>",
1578
+ f"<title>{title}</title>",
1579
+ "<style>",
1580
+ "body{font-family:system-ui;margin:1.5rem;line-height:1.4;}",
1581
+ "h1{margin-top:0;font-size:1.35rem;}",
1582
+ "table{border-collapse:collapse;width:100%;max-width:1100px;}",
1583
+ "th,td{padding:.5rem .6rem;border:1px solid #ccc;font-size:13px;}",
1584
+ "thead th{background:#f2f4f7;text-align:left;}",
1585
+ "tbody tr:nth-child(even){background:#fafbfc;}",
1586
+ "code{background:#f2f4f7;padding:2px 4px;border-radius:3px;font-size:12px;}",
1587
+ "footer{margin-top:1.5rem;font-size:12px;color:#555;}",
1588
+ "#filters{margin:.75rem 0;display:flex;gap:1rem;flex-wrap:wrap;}",
1589
+ "#filters input{padding:4px 6px;font-size:13px;}",
1590
+ ".dim{color:#666;font-size:12px;}",
1591
+ ".row-fail{background:#fff5f4 !important;}",
1592
+ ".row-broken{background:#fff9ef !important;}",
1593
+ ".health-badge{display:inline-block;padding:2px 6px;border-radius:12px;font-size:11px;line-height:1.2;font-weight:600;border:1px solid #ccc;background:#f5f5f5;}",
1594
+ ".health-good{background:#e6f7ed;border-color:#9ad5b6;}",
1595
+ ".health-warn{background:#fff7e6;border-color:#f5c063;}",
1596
+ ".health-poor{background:#ffebe8;border-color:#f08a80;}",
1597
+ ".health-na{background:#f0f1f3;border-color:#c9ccd1;color:#666;}",
1598
+ "</style></head><body>",
1599
+ f"<h1>{title}</h1>",
1600
+ "<div id='filters'><label style='font-size:13px'>Branch filter: "
1601
+ "<input id='branch-filter' type='text' placeholder='substring'></label>"
1602
+ "<span class='dim'>Shows most recently active branches first.</span></div>",
1603
+ "<table id='branches'><thead><tr><th>Branch</th><th>Latest Run</th><th>UTC</th><th>P</th><th>F</th><th>B</th><th>Total Runs</th><th>Health</th><th>%Pass</th><th>Links</th></tr></thead><tbody>", # noqa: E501
1604
+ body_rows,
1605
+ "</tbody></table>",
1606
+ (
1607
+ f"<footer>Updated: {_format_epoch_utc(updated) if updated else '-'} | "
1608
+ f"Project: {payload.get('project') or cfg.project}</footer>"
1609
+ ),
1610
+ "<script>(function(){const f=document.getElementById('branch-filter');const tbl=document.getElementById('branches');f.addEventListener('input',()=>{const q=f.value.trim().toLowerCase();[...tbl.tBodies[0].rows].forEach(r=>{if(!q){r.style.display='';return;}const name=r.querySelector('.col-branch').textContent.toLowerCase();r.style.display=name.includes(q)?'':'';});});})();</script>", # noqa: E501
1611
+ "</body></html>",
1612
+ ]
1613
+ return "".join(parts).encode("utf-8")
741
1614
 
742
1615
 
743
1616
  def preflight(
@@ -775,7 +1648,12 @@ def preflight(
775
1648
  # region detection (defensive: some stubs may return None)
776
1649
  if head:
777
1650
  bucket_region = (
778
- head.get("ResponseMetadata", {}).get("HTTPHeaders", {}).get("x-amz-bucket-region")
1651
+ head.get("ResponseMetadata", {})
1652
+ .get(
1653
+ "HTTPHeaders",
1654
+ {},
1655
+ )
1656
+ .get("x-amz-bucket-region")
779
1657
  )
780
1658
  # Attempt a small list to confirm permissions
781
1659
  s3.list_objects_v2(
@@ -813,61 +1691,135 @@ def plan_dry_run(cfg: PublishConfig, paths: Paths | None = None) -> dict:
813
1691
  )
814
1692
  else:
815
1693
  samples.append({"note": "Report missing; would run allure generate."})
816
- root = branch_root(cfg.prefix, cfg.project, cfg.branch)
817
- latest_tmp = f"{root}/latest_tmp/"
818
- mapping = {
1694
+ # Align keys with existing tests expectations
1695
+ return {
819
1696
  "bucket": cfg.bucket,
820
- "prefix": cfg.prefix,
821
- "project": cfg.project,
822
- "branch": cfg.branch,
823
- "run_id": cfg.run_id,
824
1697
  "run_prefix": cfg.s3_run_prefix,
825
- # Backwards compat: historical key name pointed to temp swap area
826
- "latest_prefix": latest_tmp,
827
- "latest_tmp_prefix": latest_tmp,
828
- "latest_final_prefix": cfg.s3_latest_prefix,
829
- }
830
- return {
831
- **mapping,
1698
+ # reflect the temporary latest staging area (two-phase)
1699
+ "latest_prefix": getattr(
1700
+ cfg,
1701
+ "s3_latest_prefix_tmp",
1702
+ cfg.s3_latest_prefix,
1703
+ ),
1704
+ "samples": samples,
832
1705
  "run_url": cfg.url_run(),
833
1706
  "latest_url": cfg.url_latest(),
834
- "context_url": getattr(cfg, "context_url", None),
835
- "metadata": cfg.metadata or {},
836
- "samples": samples,
837
- "encryption": {
838
- "sse": cfg.sse,
839
- "sse_kms_key_id": cfg.sse_kms_key_id,
840
- },
841
1707
  }
842
1708
 
843
1709
 
844
- def publish(cfg: PublishConfig, paths: Paths | None = None) -> dict:
845
- paths = paths or Paths()
846
- pull_history(cfg, paths)
847
- generate_report(paths)
848
- upload_dir(cfg, paths.report, cfg.s3_run_prefix)
849
- _ensure_directory_placeholder(cfg, paths.report / "index.html", cfg.s3_run_prefix)
850
- two_phase_update_latest(cfg, paths.report)
1710
+ def _maybe_archive_run(cfg: PublishConfig, paths: Paths) -> str | None:
1711
+ """Optionally archive the run under an archive/ prefix.
1712
+
1713
+ Controlled by cfg.archive_runs (bool). Best-effort; failures do not abort
1714
+ publish.
1715
+ Returns archive prefix if performed.
1716
+ """
1717
+ # Backward compatibility: earlier implementation mistakenly looked for
1718
+ # cfg.archive_runs (plural). The correct flag sets cfg.archive_run.
1719
+ should_archive = getattr(cfg, "archive_run", False) or getattr(cfg, "archive_runs", False)
1720
+ if not should_archive:
1721
+ return None
1722
+ import tempfile
1723
+
1724
+ archive_format = getattr(cfg, "archive_format", "tar.gz") or "tar.gz"
1725
+ run_root = paths.report
1726
+ if not run_root or not run_root.exists():
1727
+ return None
1728
+ # Destination S3 key (placed alongside run prefix root)
1729
+ # s3://bucket/<prefix>/<project>/<branch>/<run_id>/<run_id>.tar.gz
1730
+ archive_filename = f"{cfg.run_id}.{'zip' if archive_format == 'zip' else 'tar.gz'}"
1731
+ s3_key = f"{cfg.s3_run_prefix}{archive_filename}"
851
1732
  try:
852
- write_manifest(cfg, paths)
853
- except ClientError as e: # pragma: no cover – non fatal
854
- print(f"Manifest write skipped: {e}")
855
- try: # retention cleanup
856
- if getattr(cfg, "max_keep_runs", None):
857
- cleanup_old_runs(cfg, int(cfg.max_keep_runs))
1733
+ tmp_dir = tempfile.mkdtemp(prefix="allure-arch-")
1734
+ archive_path = Path(tmp_dir) / archive_filename
1735
+ if archive_format == "zip":
1736
+ import zipfile
1737
+
1738
+ with zipfile.ZipFile(archive_path, "w", compression=zipfile.ZIP_DEFLATED) as zf:
1739
+ for p in run_root.rglob("*"):
1740
+ if p.is_file():
1741
+ zf.write(p, arcname=p.relative_to(run_root).as_posix())
1742
+ else: # tar.gz
1743
+ import tarfile
1744
+
1745
+ with tarfile.open(archive_path, "w:gz") as tf:
1746
+ for p in run_root.rglob("*"):
1747
+ if p.is_file():
1748
+ tf.add(p, arcname=p.relative_to(run_root).as_posix())
1749
+ # Upload archive object
1750
+ s3 = _s3(cfg)
1751
+ extra = {
1752
+ "CacheControl": "public, max-age=31536000, immutable",
1753
+ "ContentType": "application/gzip" if archive_format != "zip" else "application/zip",
1754
+ }
1755
+ if cfg.ttl_days is not None:
1756
+ extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
1757
+ if cfg.sse:
1758
+ extra["ServerSideEncryption"] = cfg.sse
1759
+ if cfg.sse == "aws:kms" and cfg.sse_kms_key_id:
1760
+ extra["SSEKMSKeyId"] = cfg.sse_kms_key_id
1761
+ s3.upload_file(str(archive_path), cfg.bucket, s3_key, ExtraArgs=extra)
1762
+ print(f"[publish] Archived run bundle uploaded: s3://{cfg.bucket}/{s3_key}")
1763
+ return s3_key
858
1764
  except Exception as e: # pragma: no cover
859
- print(f"Cleanup skipped: {e}")
1765
+ if os.getenv("ALLURE_HOST_DEBUG"):
1766
+ print(f"[publish] archive skipped: {e}")
1767
+ return None
860
1768
 
861
- files_count = sum(1 for p in paths.report.rglob("*") if p.is_file())
862
- return {
863
- "run_url": cfg.url_run(),
864
- "latest_url": cfg.url_latest(),
865
- "bucket": cfg.bucket,
866
- "run_prefix": cfg.s3_run_prefix,
867
- "latest_prefix": cfg.s3_latest_prefix,
868
- "report_size_bytes": compute_dir_size(paths.report),
869
- "report_files": files_count,
870
- }
1769
+
1770
+ # --------------------------------------------------------------------------------------
1771
+ # Retention cleanup & directory placeholder (restored)
1772
+ # --------------------------------------------------------------------------------------
1773
+
1774
+
1775
+ def cleanup_old_runs(cfg: PublishConfig, keep: int) -> None:
1776
+ if keep is None or keep <= 0:
1777
+ return
1778
+ s3 = _s3(cfg)
1779
+ root = branch_root(cfg.prefix, cfg.project, cfg.branch)
1780
+ paginator = s3.get_paginator("list_objects_v2")
1781
+ run_prefixes: list[str] = []
1782
+ for page in paginator.paginate(
1783
+ Bucket=cfg.bucket,
1784
+ Prefix=f"{root}/",
1785
+ Delimiter="/",
1786
+ ):
1787
+ for cp in page.get("CommonPrefixes", []) or []:
1788
+ pfx = cp.get("Prefix")
1789
+ if not pfx:
1790
+ continue
1791
+ name = pfx.rsplit("/", 2)[-2]
1792
+ if name in {"latest", "runs"}:
1793
+ continue
1794
+ is_ts = len(name) == 15 and name[8] == "-" and name.replace("-", "").isdigit()
1795
+ if is_ts:
1796
+ run_prefixes.append(pfx)
1797
+ run_prefixes.sort(reverse=True)
1798
+ for old in run_prefixes[keep:]:
1799
+ delete_prefix(cfg.bucket, old, getattr(cfg, "s3_endpoint", None))
1800
+
1801
+
1802
+ def _ensure_directory_placeholder(
1803
+ cfg: PublishConfig,
1804
+ index_file: Path,
1805
+ dir_prefix: str,
1806
+ ) -> None:
1807
+ if not index_file.exists() or not dir_prefix.endswith("/"):
1808
+ return
1809
+ body = index_file.read_bytes()
1810
+ extra = {"CacheControl": "no-cache", "ContentType": "text/html"}
1811
+ if cfg.ttl_days is not None:
1812
+ extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
1813
+ try:
1814
+ _s3(cfg).put_object(
1815
+ Bucket=cfg.bucket,
1816
+ Key=dir_prefix,
1817
+ Body=body,
1818
+ CacheControl=extra["CacheControl"],
1819
+ ContentType=extra["ContentType"],
1820
+ )
1821
+ except ClientError as e: # pragma: no cover
1822
+ print(f"Placeholder upload skipped: {e}")
871
1823
 
872
1824
 
873
1825
  __all__ = [