pytest-allure-host 0.1.2__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
+ # flake8: noqa
1
2
  """Report publishing primitives (generate, upload, atomic latest swap).
2
-
3
- Responsible for:
4
- * Generating Allure report (pulling prior history first)
3
+ f"<script>const INIT={initial_client_rows};const BATCH={batch_size};</script>",
4
+ f"<script>{RUNS_INDEX_JS}</script>",
5
5
  * Uploading run report to S3 (run prefix) + atomic promotion to latest/
6
6
  * Writing manifest (runs/index.json) + human HTML index + trend viewer
7
7
  * Retention (max_keep_runs) + directory placeholder objects
@@ -12,12 +12,16 @@ visualising passed / failed / broken counts across historical runs using
12
12
  Allure's history-trend.json.
13
13
  """
14
14
 
15
+ # ruff: noqa: E501 # Long HTML/JS lines in embedded template
16
+
15
17
  from __future__ import annotations
16
18
 
17
19
  import json
20
+ import os
18
21
  import shutil
19
22
  import subprocess # nosec B404
20
23
  from collections.abc import Iterable
24
+ from concurrent.futures import ThreadPoolExecutor, as_completed
21
25
  from dataclasses import dataclass
22
26
  from pathlib import Path
23
27
  from time import time
@@ -25,6 +29,15 @@ from time import time
25
29
  import boto3
26
30
  from botocore.exceptions import ClientError
27
31
 
32
+ from .templates import (
33
+ RUNS_INDEX_CSS_BASE,
34
+ RUNS_INDEX_CSS_ENH,
35
+ RUNS_INDEX_CSS_MISC,
36
+ RUNS_INDEX_CSS_TABLE,
37
+ RUNS_INDEX_JS,
38
+ RUNS_INDEX_JS_ENH,
39
+ RUNS_INDEX_SENTINELS,
40
+ )
28
41
  from .utils import (
29
42
  PublishConfig,
30
43
  branch_root,
@@ -35,129 +48,99 @@ from .utils import (
35
48
  )
36
49
 
37
50
  # --------------------------------------------------------------------------------------
38
- # Paths helper
51
+ # S3 client + listing/deletion helpers (restored after refactor)
39
52
  # --------------------------------------------------------------------------------------
40
53
 
41
54
 
42
- @dataclass
43
- class Paths:
44
- """Filesystem layout helper.
45
-
46
- Backwards compatibility: tests (and prior API) may pass explicit
47
- 'report=' and 'results=' paths. If omitted we derive them from base.
48
- """
49
-
50
- base: Path = Path(".")
51
- report: Path | None = None
52
- results: Path | None = None
53
-
54
- def __post_init__(self) -> None: # derive defaults if not provided
55
- if self.results is None:
56
- self.results = self.base / "allure-results"
57
- if self.report is None:
58
- self.report = self.base / "allure-report"
59
-
60
-
61
- def _discover_meta_keys(runs: list[dict]) -> list[str]:
62
- """Return sorted list of dynamic metadata keys across runs."""
63
- core_cols = {
64
- "run_id",
65
- "time",
66
- "size",
67
- "project",
68
- "branch",
69
- "passed",
70
- "failed",
71
- "broken",
72
- "context_url",
73
- }
74
- keys: list[str] = []
75
- for r in runs:
76
- for k in r.keys():
77
- if k in core_cols or k.endswith("_url"):
78
- continue
79
- if k not in keys:
80
- keys.append(k)
81
- keys.sort()
82
- return keys
83
-
84
-
85
- def _format_meta_cell(val) -> str:
86
- if val is None:
87
- return "<td>-</td>"
88
- esc = str(val).replace("<", "&lt;").replace(">", "&gt;")
89
- return f"<td>{esc}</td>"
90
-
91
-
92
- def _s3(cfg: PublishConfig): # allow custom endpoint (tests / local)
93
- endpoint = getattr(cfg, "s3_endpoint", None)
94
- if endpoint:
95
- return boto3.client("s3", endpoint_url=endpoint)
55
+ def _s3(cfg: PublishConfig): # noqa: D401 - tiny wrapper
56
+ """Return a boto3 S3 client honoring optional endpoint override."""
57
+ if getattr(cfg, "s3_endpoint", None): # custom / LocalStack style
58
+ return boto3.client("s3", endpoint_url=cfg.s3_endpoint)
96
59
  return boto3.client("s3")
97
60
 
98
61
 
99
- def list_keys(
100
- bucket: str,
101
- prefix: str,
102
- endpoint: str | None = None,
103
- ) -> Iterable[str]:
62
+ def list_keys(bucket: str, prefix: str, endpoint: str | None = None) -> list[str]:
63
+ """List object keys under a prefix (non-recursive)."""
104
64
  s3 = boto3.client("s3", endpoint_url=endpoint) if endpoint else boto3.client("s3")
65
+ keys: list[str] = []
105
66
  paginator = s3.get_paginator("list_objects_v2")
106
67
  for page in paginator.paginate(Bucket=bucket, Prefix=prefix):
107
68
  for obj in page.get("Contents", []) or []:
108
- key = obj.get("Key")
109
- if key:
110
- yield key
69
+ k = obj.get("Key")
70
+ if k:
71
+ keys.append(k)
72
+ return keys
111
73
 
112
74
 
113
- def delete_prefix(
114
- bucket: str,
115
- prefix: str,
116
- endpoint: str | None = None,
117
- ) -> None:
118
- keys = list(list_keys(bucket, prefix, endpoint))
119
- if not keys:
75
+ def delete_prefix(bucket: str, prefix: str, endpoint: str | None = None) -> None:
76
+ """Delete all objects beneath prefix (best-effort)."""
77
+ ks = list_keys(bucket, prefix, endpoint)
78
+ if not ks:
120
79
  return
121
80
  s3 = boto3.client("s3", endpoint_url=endpoint) if endpoint else boto3.client("s3")
122
- # Batch delete 1000 at a time
123
- for i in range(0, len(keys), 1000):
124
- # Ruff style: remove spaces inside slice
125
- batch = keys[i : i + 1000]
126
- if not batch:
127
- continue
128
- s3.delete_objects(
129
- Bucket=bucket,
130
- Delete={"Objects": [{"Key": k} for k in batch], "Quiet": True},
131
- )
81
+ # Batch in chunks of 1000 (S3 limit)
82
+ for i in range(0, len(ks), 1000):
83
+ chunk = ks[i : i + 1000]
84
+ try: # pragma: no cover - error path
85
+ s3.delete_objects(
86
+ Bucket=bucket,
87
+ Delete={"Objects": [{"Key": k} for k in chunk], "Quiet": True},
88
+ )
89
+ except Exception as e: # pragma: no cover
90
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
91
+ print(f"[publish] delete_prefix warning: {e}")
92
+
93
+
94
+ def pull_history(cfg: PublishConfig, paths: "Paths") -> None:
95
+ """Best-effort download of previous run history to seed trend graphs.
96
+
97
+ Copies objects from latest/history/ into local allure-results/history/ so the
98
+ newly generated report preserves cumulative trend data. Silent on failure.
99
+ """
100
+ try:
101
+ hist_prefix = f"{cfg.s3_latest_prefix}history/"
102
+ keys = list_keys(cfg.bucket, hist_prefix, getattr(cfg, "s3_endpoint", None))
103
+ if not keys:
104
+ return
105
+ target_dir = paths.results / "history"
106
+ target_dir.mkdir(parents=True, exist_ok=True)
107
+ s3 = _s3(cfg)
108
+ for k in keys:
109
+ rel = k[len(hist_prefix) :]
110
+ if not rel or rel.endswith("/"):
111
+ continue
112
+ dest = target_dir / rel
113
+ dest.parent.mkdir(parents=True, exist_ok=True)
114
+ try:
115
+ body = s3.get_object(Bucket=cfg.bucket, Key=k)["Body"].read()
116
+ dest.write_bytes(body)
117
+ except Exception: # pragma: no cover - individual object failure
118
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
119
+ print(f"[publish] history object fetch failed: {k}")
120
+ except Exception: # pragma: no cover - overall failure
121
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
122
+ print("[publish] history pull skipped (error)")
132
123
 
133
124
 
134
125
  # --------------------------------------------------------------------------------------
135
- # Report generation & history preservation
126
+ # Paths helper (restored after refactor)
136
127
  # --------------------------------------------------------------------------------------
137
128
 
138
129
 
139
- def pull_history(cfg: PublishConfig, paths: Paths) -> None:
140
- """Download previous latest/history/ to seed new history for trends."""
141
- s3 = _s3(cfg)
142
- root = branch_root(cfg.prefix, cfg.project, cfg.branch)
143
- history_prefix = f"{root}/latest/history/"
144
- local_history = paths.results / "history"
145
- if local_history.exists():
146
- shutil.rmtree(local_history)
147
- local_history.mkdir(parents=True, exist_ok=True)
130
+ @dataclass
131
+ class Paths:
132
+ base: Path = Path(".")
133
+ report: Path | None = None
134
+ results: Path | None = None
148
135
 
149
- # List objects and download those under history/
150
- try:
151
- for key in list_keys(cfg.bucket, history_prefix):
152
- rel = key[len(history_prefix) :]
153
- if not rel: # skip directory placeholder
154
- continue
155
- dest = local_history / rel
156
- dest.parent.mkdir(parents=True, exist_ok=True)
157
- s3.download_file(cfg.bucket, key, str(dest))
158
- except ClientError:
159
- # best‑effort; history absence is fine
160
- pass
136
+ def __post_init__(self) -> None:
137
+ if self.results is None:
138
+ self.results = self.base / "allure-results"
139
+ if self.report is None:
140
+ self.report = self.base / "allure-report"
141
+
142
+
143
+ ## (Merged) Removed duplicate legacy helper definitions from HEAD during conflict resolution.
161
144
 
162
145
 
163
146
  def ensure_allure_cli() -> None:
@@ -178,12 +161,14 @@ def generate_report(paths: Paths) -> None:
178
161
  raise RuntimeError("Allure CLI unexpectedly missing")
179
162
  # Validate discovered binary path before executing (Bandit B603 mitigation)
180
163
  exec_path = Path(allure_path).resolve()
181
- if not exec_path.is_file() or exec_path.name != "allure": # pragma: no cover
164
+ # pragma: no cover - simple path existence check
165
+ if not exec_path.is_file() or exec_path.name != "allure":
182
166
  raise RuntimeError(
183
167
  f"Unexpected allure exec: {exec_path}" # shorter for line length
184
168
  )
185
169
  # Safety: allure_path validated above; args are static & derived from
186
170
  # controlled paths (no user-provided injection surface).
171
+ # Correct Allure invocation: allure generate <results> --clean -o <report>
187
172
  cmd = [
188
173
  allure_path,
189
174
  "generate",
@@ -220,24 +205,195 @@ def generate_report(paths: Paths) -> None:
220
205
  # --------------------------------------------------------------------------------------
221
206
 
222
207
 
208
+ def _iter_files(root_dir: Path):
209
+ for p in root_dir.rglob("*"):
210
+ if p.is_file():
211
+ yield p
212
+
213
+
214
+ def _extra_args_for_file(cfg: PublishConfig, key: str, path: Path) -> dict[str, str]:
215
+ extra: dict[str, str] = {"CacheControl": cache_control_for_key(key)}
216
+ ctype = guess_content_type(path)
217
+ if ctype:
218
+ extra["ContentType"] = ctype
219
+ if cfg.ttl_days is not None:
220
+ extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
221
+ if cfg.sse:
222
+ extra["ServerSideEncryption"] = cfg.sse
223
+ if cfg.sse == "aws:kms" and cfg.sse_kms_key_id:
224
+ extra["SSEKMSKeyId"] = cfg.sse_kms_key_id
225
+ return extra
226
+
227
+
228
+ def _auto_workers(requested: int | None, total: int, kind: str) -> int:
229
+ if total <= 1:
230
+ return 1
231
+ if requested is not None:
232
+ return max(1, min(requested, total))
233
+ # Heuristic: small sets benefit up to 8, larger sets cap at 32
234
+ if total < 50:
235
+ return min(8, total)
236
+ if total < 500:
237
+ return min(16, total)
238
+ return min(32, total)
239
+
240
+
223
241
  def upload_dir(cfg: PublishConfig, root_dir: Path, key_prefix: str) -> None:
224
242
  s3 = _s3(cfg)
225
- for p in root_dir.rglob("*"):
226
- if not p.is_file():
227
- continue
228
- rel = p.relative_to(root_dir).as_posix()
243
+ files = list(_iter_files(root_dir))
244
+ total = len(files)
245
+ workers = _auto_workers(getattr(cfg, "upload_workers", None), total, "upload")
246
+ print(
247
+ f"[publish] Uploading report to s3://{cfg.bucket}/{key_prefix} "
248
+ f"({total} files) with {workers} worker(s)..."
249
+ )
250
+ if workers <= 1:
251
+ # Sequential fallback
252
+ uploaded = 0
253
+ last_decile = -1
254
+ for f in files:
255
+ rel = f.relative_to(root_dir).as_posix()
256
+ key = f"{key_prefix}{rel}"
257
+ extra = _extra_args_for_file(cfg, key, f)
258
+ s3.upload_file(str(f), cfg.bucket, key, ExtraArgs=extra)
259
+ uploaded += 1
260
+ if total:
261
+ pct = int((uploaded / total) * 100)
262
+ dec = pct // 10
263
+ if dec != last_decile or uploaded == total:
264
+ print(f"[publish] Uploaded {uploaded}/{total} ({pct}%)")
265
+ last_decile = dec
266
+ print("[publish] Upload complete.")
267
+ return
268
+
269
+ lock = None
270
+ try:
271
+ from threading import Lock
272
+
273
+ lock = Lock()
274
+ except Exception as e: # pragma: no cover - fallback
275
+ print(f"[publish] Warning: threading.Lock unavailable ({e}); continuing without lock")
276
+ progress = {"uploaded": 0, "last_decile": -1}
277
+
278
+ def task(f: Path):
279
+ rel = f.relative_to(root_dir).as_posix()
229
280
  key = f"{key_prefix}{rel}"
230
- extra: dict[str, str] = {"CacheControl": cache_control_for_key(key)}
231
- ctype = guess_content_type(p)
232
- if ctype:
233
- extra["ContentType"] = ctype
234
- if cfg.ttl_days is not None:
235
- extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
236
- if cfg.sse:
237
- extra["ServerSideEncryption"] = cfg.sse
238
- if cfg.sse == "aws:kms" and cfg.sse_kms_key_id:
239
- extra["SSEKMSKeyId"] = cfg.sse_kms_key_id
240
- s3.upload_file(str(p), cfg.bucket, key, ExtraArgs=extra)
281
+ extra = _extra_args_for_file(cfg, key, f)
282
+ s3.upload_file(str(f), cfg.bucket, key, ExtraArgs=extra)
283
+ if lock:
284
+ with lock:
285
+ progress["uploaded"] += 1
286
+ uploaded = progress["uploaded"]
287
+ pct = int((uploaded / total) * 100)
288
+ dec = pct // 10
289
+ if dec != progress["last_decile"] or uploaded == total:
290
+ print(f"[publish] Uploaded {uploaded}/{total} ({pct}%)")
291
+ progress["last_decile"] = dec
292
+
293
+ with ThreadPoolExecutor(max_workers=workers) as ex:
294
+ futures = [ex.submit(task, f) for f in files]
295
+ # Consume to surface exceptions early
296
+ for fut in as_completed(futures):
297
+ fut.result()
298
+ print("[publish] Upload complete.")
299
+
300
+
301
+ def _collect_copy_keys(cfg: PublishConfig, src_prefix: str) -> list[str]:
302
+ return [
303
+ k
304
+ for k in list_keys(cfg.bucket, src_prefix, getattr(cfg, "s3_endpoint", None))
305
+ if k != src_prefix
306
+ ]
307
+
308
+
309
+ def _copy_object(s3, bucket: str, key: str, dest_key: str) -> None:
310
+ s3.copy({"Bucket": bucket, "Key": key}, bucket, dest_key)
311
+
312
+
313
+ def _log_progress(label: str, copied: int, total: int, last_dec: int) -> int:
314
+ if not total:
315
+ return last_dec
316
+ pct = int((copied / total) * 100)
317
+ dec = pct // 10
318
+ if dec != last_dec or copied == total:
319
+ print(f"[publish] {label}: {copied}/{total} ({pct}%)")
320
+ return dec
321
+ return last_dec
322
+
323
+
324
+ def _copy_sequential(
325
+ s3, cfg: PublishConfig, keys: list[str], src_prefix: str, dest_prefix: str, label: str
326
+ ) -> None:
327
+ total = len(keys)
328
+ copied = 0
329
+ last_dec = -1
330
+ for key in keys:
331
+ rel = key[len(src_prefix) :]
332
+ if not rel:
333
+ continue
334
+ dest_key = f"{dest_prefix}{rel}"
335
+ _copy_object(s3, cfg.bucket, key, dest_key)
336
+ copied += 1
337
+ last_dec = _log_progress(label, copied, total, last_dec)
338
+ print(f"[publish] {label}: copy complete.")
339
+
340
+
341
+ def _copy_parallel(
342
+ s3,
343
+ cfg: PublishConfig,
344
+ keys: list[str],
345
+ src_prefix: str,
346
+ dest_prefix: str,
347
+ label: str,
348
+ workers: int,
349
+ ) -> None:
350
+ from threading import Lock
351
+
352
+ total = len(keys)
353
+ lock = Lock()
354
+ progress = {"copied": 0, "last_dec": -1}
355
+
356
+ def task(key: str):
357
+ rel = key[len(src_prefix) :]
358
+ if not rel:
359
+ return
360
+ dest_key = f"{dest_prefix}{rel}"
361
+ _copy_object(s3, cfg.bucket, key, dest_key)
362
+ with lock:
363
+ progress["copied"] += 1
364
+ progress["last_dec"] = _log_progress(
365
+ label, progress["copied"], total, progress["last_dec"]
366
+ )
367
+
368
+ with ThreadPoolExecutor(max_workers=workers) as ex:
369
+ futures = [ex.submit(task, k) for k in keys]
370
+ for fut in as_completed(futures):
371
+ fut.result()
372
+ print(f"[publish] {label}: copy complete.")
373
+
374
+
375
+ def copy_prefix(
376
+ cfg: PublishConfig,
377
+ src_prefix: str,
378
+ dest_prefix: str,
379
+ label: str = "copy",
380
+ ) -> None:
381
+ """Server-side copy all objects (parallel if workers>1)."""
382
+ s3 = _s3(cfg)
383
+ keys = _collect_copy_keys(cfg, src_prefix)
384
+ total = len(keys)
385
+ workers = _auto_workers(getattr(cfg, "copy_workers", None), total, "copy")
386
+ print(
387
+ f"[publish] {label}: copying {total} objects {src_prefix} → {dest_prefix} with {workers} worker(s)"
388
+ )
389
+ if workers <= 1:
390
+ _copy_sequential(s3, cfg, keys, src_prefix, dest_prefix, label)
391
+ else:
392
+ try:
393
+ _copy_parallel(s3, cfg, keys, src_prefix, dest_prefix, label, workers)
394
+ except Exception as e: # pragma: no cover
395
+ print(f"[publish] {label}: parallel copy failed ({e}); falling back to sequential")
396
+ _copy_sequential(s3, cfg, keys, src_prefix, dest_prefix, label)
241
397
 
242
398
 
243
399
  # --------------------------------------------------------------------------------------
@@ -250,27 +406,41 @@ def two_phase_update_latest(cfg: PublishConfig, report_dir: Path) -> None:
250
406
  tmp_prefix = f"{root}/latest_tmp/"
251
407
  latest_prefix = f"{root}/latest/"
252
408
 
253
- # 1. Upload to tmp
254
- upload_dir(cfg, report_dir, tmp_prefix)
409
+ # 1. Server-side copy run prefix → tmp (faster than re-uploading all files)
410
+ print("[publish] [2-phase 1/6] Copying run objects to tmp (server-side)...")
411
+ t_phase = time()
412
+ copy_prefix(cfg, cfg.s3_run_prefix, tmp_prefix, label="latest tmp")
413
+ print(f"[publish] phase 1 duration: {time() - t_phase:.2f}s")
255
414
  # 2. Remove existing latest
415
+ print("[publish] [2-phase 2/6] Removing existing latest prefix (if any)...")
416
+ t_phase = time()
256
417
  delete_prefix(cfg.bucket, latest_prefix, getattr(cfg, "s3_endpoint", None))
418
+ print(f"[publish] phase 2 duration: {time() - t_phase:.2f}s")
257
419
  # 3. Copy tmp → latest
258
- s3 = _s3(cfg)
259
- for key in list_keys(
260
- cfg.bucket,
261
- tmp_prefix,
262
- getattr(cfg, "s3_endpoint", None),
263
- ):
264
- rel = key[len(tmp_prefix) :]
265
- dest_key = f"{latest_prefix}{rel}"
266
- s3.copy({"Bucket": cfg.bucket, "Key": key}, cfg.bucket, dest_key)
420
+ print("[publish] [2-phase 3/6] Promoting tmp objects to latest prefix...")
421
+ t_phase = time()
422
+ copy_prefix(cfg, tmp_prefix, latest_prefix, label="latest promote")
423
+ print(f"[publish] phase 3 duration: {time() - t_phase:.2f}s")
267
424
  # 4. Validate & repair index if missing
425
+ print("[publish] [2-phase 4/6] Validating latest index.html...")
426
+ t_phase = time()
268
427
  _validate_and_repair_latest(cfg, report_dir, latest_prefix)
428
+ print(f"[publish] phase 4 duration: {time() - t_phase:.2f}s")
269
429
  # 5. Write readiness marker + directory placeholder
430
+ print("[publish] [2-phase 5/6] Writing readiness marker & placeholder...")
431
+ t_phase = time()
270
432
  _write_latest_marker(cfg, latest_prefix)
271
- _ensure_directory_placeholder(cfg, report_dir / "index.html", latest_prefix)
433
+ _ensure_directory_placeholder(
434
+ cfg,
435
+ report_dir / "index.html",
436
+ latest_prefix,
437
+ )
438
+ print(f"[publish] phase 5 duration: {time() - t_phase:.2f}s")
272
439
  # 6. Delete tmp
440
+ print("[publish] [2-phase 6/6] Cleaning up tmp staging prefix...")
441
+ t_phase = time()
273
442
  delete_prefix(cfg.bucket, tmp_prefix, getattr(cfg, "s3_endpoint", None))
443
+ print(f"[publish] phase 6 duration: {time() - t_phase:.2f}s")
274
444
 
275
445
 
276
446
  def _validate_and_repair_latest(
@@ -331,21 +501,44 @@ def _extract_summary_counts(report_dir: Path) -> dict | None:
331
501
 
332
502
 
333
503
  def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
504
+ """Create or update manifest + related HTML assets.
505
+
506
+ High level steps (delegated to helpers to keep complexity low):
507
+ 1. Load existing manifest JSON (if any)
508
+ 2. Build new run entry (size, files, counts, metadata)
509
+ 3. Merge + store manifest & latest.json
510
+ 4. Render runs index + trend viewer
511
+ 5. Update project-level aggregations (branches + cross-branch runs)
512
+ """
334
513
  s3 = _s3(cfg)
335
514
  root = branch_root(cfg.prefix, cfg.project, cfg.branch)
336
515
  manifest_key = f"{root}/runs/index.json"
516
+ print("[publish] Writing / updating manifest and index assets...")
517
+
518
+ existing = _load_json(s3, cfg.bucket, manifest_key)
519
+ entry = _build_manifest_entry(cfg, paths)
520
+ manifest = merge_manifest(existing, entry)
521
+ _put_manifest(s3, cfg.bucket, manifest_key, manifest)
522
+ latest_payload = _write_latest_json(s3, cfg, root)
523
+ _write_run_indexes(s3, cfg, root, manifest, latest_payload)
524
+ _update_aggregations(s3, cfg, manifest)
525
+
337
526
 
338
- existing = None
527
+ def _load_json(s3, bucket: str, key: str) -> dict | None: # noqa: D401 - internal
339
528
  try:
340
- body = s3.get_object(Bucket=cfg.bucket, Key=manifest_key)["Body"].read()
341
- existing = json.loads(body)
529
+ body = s3.get_object(Bucket=bucket, Key=key)["Body"].read()
530
+ data = json.loads(body)
531
+ return data if isinstance(data, dict) else None
342
532
  except Exception:
343
- existing = None
533
+ return None
534
+
344
535
 
536
+ def _build_manifest_entry(cfg: PublishConfig, paths: Paths) -> dict:
345
537
  entry = {
346
538
  "run_id": cfg.run_id,
347
539
  "time": int(time()),
348
540
  "size": compute_dir_size(paths.report),
541
+ "files": sum(1 for _ in paths.report.rglob("*") if _.is_file()),
349
542
  "project": cfg.project,
350
543
  "branch": cfg.branch,
351
544
  }
@@ -353,21 +546,25 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
353
546
  entry["context_url"] = cfg.context_url
354
547
  if cfg.metadata:
355
548
  for mk, mv in cfg.metadata.items():
356
- if mk not in entry:
357
- entry[mk] = mv
549
+ entry.setdefault(mk, mv)
358
550
  counts = _extract_summary_counts(paths.report)
359
551
  if counts:
360
552
  entry.update(counts)
361
- manifest = merge_manifest(existing, entry)
553
+ return entry
554
+
555
+
556
+ def _put_manifest(s3, bucket: str, key: str, manifest: dict) -> None:
362
557
  s3.put_object(
363
- Bucket=cfg.bucket,
364
- Key=manifest_key,
558
+ Bucket=bucket,
559
+ Key=key,
365
560
  Body=json.dumps(manifest, indent=2).encode("utf-8"),
366
561
  ContentType="application/json",
367
562
  CacheControl="no-cache",
368
563
  )
369
564
 
370
- latest_payload = {
565
+
566
+ def _write_latest_json(s3, cfg: PublishConfig, root: str) -> dict:
567
+ payload = {
371
568
  "run_id": cfg.run_id,
372
569
  "run_url": cfg.url_run(),
373
570
  "latest_url": cfg.url_latest(),
@@ -377,12 +574,20 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
377
574
  s3.put_object(
378
575
  Bucket=cfg.bucket,
379
576
  Key=f"{root}/latest.json",
380
- Body=json.dumps(latest_payload, indent=2).encode("utf-8"),
577
+ Body=json.dumps(payload, indent=2).encode("utf-8"),
381
578
  ContentType="application/json",
382
579
  CacheControl="no-cache",
383
580
  )
581
+ return payload
384
582
 
385
- # runs/index.html
583
+
584
+ def _write_run_indexes(
585
+ s3,
586
+ cfg: PublishConfig,
587
+ root: str,
588
+ manifest: dict,
589
+ latest_payload: dict,
590
+ ) -> None:
386
591
  index_html = _build_runs_index_html(manifest, latest_payload, cfg)
387
592
  s3.put_object(
388
593
  Bucket=cfg.bucket,
@@ -391,8 +596,6 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
391
596
  ContentType="text/html; charset=utf-8",
392
597
  CacheControl="no-cache",
393
598
  )
394
-
395
- # runs/trend.html
396
599
  trend_html = _build_trend_viewer_html(cfg)
397
600
  s3.put_object(
398
601
  Bucket=cfg.bucket,
@@ -401,13 +604,143 @@ def write_manifest(cfg: PublishConfig, paths: Paths) -> None:
401
604
  ContentType="text/html; charset=utf-8",
402
605
  CacheControl="no-cache",
403
606
  )
607
+ history_html = _build_history_insights_html(cfg)
608
+ s3.put_object(
609
+ Bucket=cfg.bucket,
610
+ Key=f"{root}/runs/history.html",
611
+ Body=history_html,
612
+ ContentType="text/html; charset=utf-8",
613
+ CacheControl="no-cache",
614
+ )
615
+
616
+
617
+ def _update_aggregations(s3, cfg: PublishConfig, manifest: dict) -> None: # pragma: no cover
618
+ try:
619
+ project_root = f"{cfg.prefix}/{cfg.project}"
620
+ _update_branches_dashboard(s3, cfg, manifest, project_root)
621
+ _update_aggregated_runs(s3, cfg, manifest, project_root)
622
+ except Exception as e: # keep non-fatal
623
+ if os.environ.get("ALLURE_HOST_DEBUG") == "1":
624
+ print(f"[publish] aggregation skipped: {e}")
625
+
626
+
627
+ def _update_branches_dashboard(s3, cfg: PublishConfig, manifest: dict, project_root: str) -> None:
628
+ branches_key = f"{project_root}/branches/index.json"
629
+ branches_payload = _load_json(s3, cfg.bucket, branches_key) or {}
630
+ if "branches" not in branches_payload:
631
+ branches_payload = {"schema": 1, "project": cfg.project, "branches": []}
632
+ runs_sorted = sorted(manifest.get("runs", []), key=lambda r: r.get("time", 0), reverse=True)
633
+ latest_run = runs_sorted[0] if runs_sorted else {}
634
+ summary_entry = {
635
+ "branch": cfg.branch,
636
+ "latest_run_id": latest_run.get("run_id"),
637
+ "time": latest_run.get("time"),
638
+ "passed": latest_run.get("passed"),
639
+ "failed": latest_run.get("failed"),
640
+ "broken": latest_run.get("broken"),
641
+ "total_runs": len(runs_sorted),
642
+ "latest_url": f"./{cfg.branch}/latest/",
643
+ "runs_url": f"./{cfg.branch}/runs/",
644
+ "trend_url": f"./{cfg.branch}/runs/trend.html",
645
+ }
646
+ summary_entry = {k: v for k, v in summary_entry.items() if v is not None}
647
+ replaced = False
648
+ for i, br in enumerate(branches_payload.get("branches", [])):
649
+ if br.get("branch") == cfg.branch:
650
+ branches_payload["branches"][i] = summary_entry
651
+ replaced = True
652
+ break
653
+ if not replaced:
654
+ branches_payload["branches"].append(summary_entry)
655
+ branches_payload["branches"].sort(key=lambda b: b.get("time") or 0, reverse=True)
656
+ branches_payload["updated"] = int(time())
657
+ s3.put_object(
658
+ Bucket=cfg.bucket,
659
+ Key=branches_key,
660
+ Body=json.dumps(branches_payload, indent=2).encode("utf-8"),
661
+ ContentType="application/json",
662
+ CacheControl="no-cache",
663
+ )
664
+ dash_html = _build_branches_dashboard_html(branches_payload, cfg)
665
+ s3.put_object(
666
+ Bucket=cfg.bucket,
667
+ Key=f"{project_root}/index.html",
668
+ Body=dash_html,
669
+ ContentType="text/html; charset=utf-8",
670
+ CacheControl="no-cache",
671
+ )
672
+
673
+
674
+ def _update_aggregated_runs(s3, cfg: PublishConfig, manifest: dict, project_root: str) -> None:
675
+ agg_key = f"{project_root}/runs/all/index.json"
676
+ agg_payload = _load_json(s3, cfg.bucket, agg_key) or {}
677
+ agg_payload.setdefault("schema", 2)
678
+ agg_payload.setdefault("project", cfg.project)
679
+ agg_payload.setdefault("runs", [])
680
+ runs_sorted = sorted(manifest.get("runs", []), key=lambda r: r.get("time", 0), reverse=True)
681
+ latest_run = runs_sorted[0] if runs_sorted else {}
682
+ if latest_run:
683
+ agg_payload["runs"].append(
684
+ {
685
+ "branch": cfg.branch,
686
+ **{
687
+ k: latest_run.get(k)
688
+ for k in (
689
+ "run_id",
690
+ "time",
691
+ "size",
692
+ "passed",
693
+ "failed",
694
+ "broken",
695
+ "commit",
696
+ )
697
+ if latest_run.get(k) is not None
698
+ },
699
+ }
700
+ )
701
+ # de-duplicate branch/run_id pairs keeping latest time
702
+ dedup: dict[tuple[str, str], dict] = {}
703
+ for r in agg_payload["runs"]:
704
+ b = r.get("branch")
705
+ rid = r.get("run_id")
706
+ if not b or not rid:
707
+ continue
708
+ key2 = (b, rid)
709
+ prev = dedup.get(key2)
710
+ if not prev or (r.get("time") or 0) > (prev.get("time") or 0):
711
+ dedup[key2] = r
712
+ agg_runs = list(dedup.values())
713
+ agg_runs.sort(key=lambda r: r.get("time", 0), reverse=True)
714
+ cap = getattr(cfg, "aggregate_run_cap", 600)
715
+ if len(agg_runs) > cap:
716
+ agg_runs = agg_runs[:cap]
717
+ agg_payload["runs"] = agg_runs
718
+ agg_payload["updated"] = int(time())
719
+ s3.put_object(
720
+ Bucket=cfg.bucket,
721
+ Key=agg_key,
722
+ Body=json.dumps(agg_payload, indent=2).encode("utf-8"),
723
+ ContentType="application/json",
724
+ CacheControl="no-cache",
725
+ )
726
+ agg_html = _build_aggregated_runs_html(agg_payload, cfg)
727
+ s3.put_object(
728
+ Bucket=cfg.bucket,
729
+ Key=f"{project_root}/runs/all/index.html",
730
+ Body=agg_html,
731
+ ContentType="text/html; charset=utf-8",
732
+ CacheControl="no-cache",
733
+ )
404
734
 
405
735
 
406
736
  def _format_epoch_utc(epoch: int) -> str:
407
737
  from datetime import datetime, timezone
408
738
 
409
739
  try:
410
- return datetime.fromtimestamp(epoch, tz=timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
740
+ return datetime.fromtimestamp(
741
+ epoch,
742
+ tz=timezone.utc,
743
+ ).strftime("%Y-%m-%d %H:%M:%S")
411
744
  except Exception: # pragma: no cover - defensive
412
745
  return "-"
413
746
 
@@ -423,6 +756,40 @@ def _format_bytes(n: int) -> str:
423
756
  return f"{v:.1f}PB"
424
757
 
425
758
 
759
+ def _discover_meta_keys(runs: list[dict]) -> list[str]:
760
+ """Return sorted list of dynamic metadata keys present across runs.
761
+
762
+ Excludes core known columns and any *_url helper keys to avoid duplicating
763
+ context links. This mirrors earlier logic (restored after refactor).
764
+ """
765
+ core_cols = {
766
+ "run_id",
767
+ "time",
768
+ "size",
769
+ "files",
770
+ "passed",
771
+ "failed",
772
+ "broken",
773
+ "context_url",
774
+ }
775
+ keys: list[str] = []
776
+ for r in runs:
777
+ for k in r.keys():
778
+ if k in core_cols or k.endswith("_url"):
779
+ continue
780
+ if k not in keys:
781
+ keys.append(k)
782
+ keys.sort()
783
+ return keys
784
+
785
+
786
+ def _format_meta_cell(val) -> str:
787
+ if val is None:
788
+ return "<td>-</td>"
789
+ esc = str(val).replace("<", "&lt;").replace(">", "&gt;")
790
+ return f"<td>{esc}</td>"
791
+
792
+
426
793
  def _build_runs_index_html(
427
794
  manifest: dict,
428
795
  latest_payload: dict,
@@ -435,157 +802,573 @@ def _build_runs_index_html(
435
802
  key=lambda r: r.get("time", 0),
436
803
  reverse=True,
437
804
  )
805
+ # Progressive reveal parameters (also echoed into JS); keep <= row_cap.
806
+ initial_client_rows = 300
807
+ batch_size = 300
438
808
  # discover dynamic metadata keys (excluding core + *_url)
439
809
  meta_keys = _discover_meta_keys(runs_sorted)
810
+ # Derive a small set of tag keys (first 3 metadata keys) for inline summary
811
+ tag_keys = meta_keys[:3]
440
812
  rows: list[str] = []
441
- for rinfo in runs_sorted[:row_cap]:
813
+ for idx, rinfo in enumerate(runs_sorted[:row_cap]):
442
814
  rid = rinfo.get("run_id", "?")
443
815
  size = int(rinfo.get("size") or 0)
816
+ files_cnt = int(rinfo.get("files") or 0)
444
817
  t = int(rinfo.get("time") or 0)
445
- passed, failed, broken = (
446
- rinfo.get("passed"),
447
- rinfo.get("failed"),
448
- rinfo.get("broken"),
449
- )
818
+ passed = rinfo.get("passed")
819
+ failed = rinfo.get("failed")
820
+ broken = rinfo.get("broken")
450
821
  has_counts = any(v is not None for v in (passed, failed, broken))
451
- summary = f"{passed or 0}/{failed or 0}/{broken or 0}" if has_counts else "-"
822
+ pct_pass = None
823
+ if has_counts and (passed or 0) + (failed or 0) + (broken or 0) > 0:
824
+ pct_pass = (
825
+ f"{((passed or 0) / ((passed or 0) + (failed or 0) + (broken or 0)) * 100):.1f}%"
826
+ )
827
+ # ISO timestamps (duplicate for start/end until distinct available)
828
+ from datetime import datetime, timezone
829
+
830
+ iso_ts = (
831
+ datetime.fromtimestamp(t, tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") if t else ""
832
+ )
833
+ start_iso = iso_ts
834
+ end_iso = iso_ts
452
835
  ctx_url = rinfo.get("context_url")
453
836
  ctx_cell = (
454
837
  f"<a href='{ctx_url}' target='_blank' rel='noopener'>link</a>" if ctx_url else "-"
455
838
  )
839
+ # Metadata cells (excluding tags already filtered from meta_keys)
456
840
  meta_cells = "".join(_format_meta_cell(rinfo.get(mk)) for mk in meta_keys)
841
+ # Tags list & search blob assembly (refactored version)
842
+ # Tags list
843
+ explicit_tags = rinfo.get("tags") if isinstance(rinfo.get("tags"), (list, tuple)) else None
844
+ if explicit_tags:
845
+ tag_vals = [str(t) for t in explicit_tags if t is not None and str(t) != ""]
846
+ else:
847
+ tag_vals = [
848
+ str(rinfo.get(k))
849
+ for k in tag_keys
850
+ if rinfo.get(k) is not None and str(rinfo.get(k)) != ""
851
+ ]
852
+ # Search blob (include metadata values excluding tags array representation noise)
853
+ search_parts: list[str] = [str(rid)]
854
+ if ctx_url:
855
+ search_parts.append(str(ctx_url))
856
+ for mk in meta_keys:
857
+ mv = rinfo.get(mk)
858
+ if mv is not None:
859
+ search_parts.append(str(mv))
860
+ search_blob = " ".join(search_parts).lower().replace("'", "&#39;")
861
+ passpct_numeric = pct_pass.rstrip("%") if pct_pass else None
862
+ row_tags_json = json.dumps(tag_vals)
863
+ hidden_cls = " pr-hidden" if idx >= initial_client_rows else ""
457
864
  row_html = (
458
- "\n<tr"
459
- f" data-passed='{passed or 0}'"
460
- f" data-failed='{failed or 0}'"
461
- f" data-broken='{broken or 0}'><td><code>"
462
- f"{rid}</code></td><td>{t}</td><td>{_format_epoch_utc(t)}</td>"
463
- f"<td title='{size}'>{_format_bytes(size)}</td><td>{summary}</td>"
464
- f"<td>{ctx_cell}</td>{meta_cells}"
465
- f"<td><a href='../{rid}/'>run</a></td>"
466
- "<td><a href='../latest/'>latest</a></td></tr>"
865
+ "<tr"
866
+ + (f" class='pr-hidden'" if idx >= initial_client_rows else "")
867
+ + " data-v='1'"
868
+ + f" data-run-id='{rid}'"
869
+ + f" data-branch='{(rinfo.get('branch') or cfg.branch)}'"
870
+ + f" data-project='{cfg.project}'"
871
+ + f" data-tags='{row_tags_json}'"
872
+ + f" data-p='{passed or 0}'"
873
+ + f" data-f='{failed or 0}'"
874
+ + f" data-b='{broken or 0}'"
875
+ + (f" data-passpct='{passpct_numeric}'" if passpct_numeric else "")
876
+ + (f" data-start-iso='{start_iso}'" if start_iso else "")
877
+ + (f" data-end-iso='{end_iso}'" if end_iso else "")
878
+ + f" data-passed='{passed or 0}'" # backward compat
879
+ + f" data-failed='{failed or 0}'"
880
+ + f" data-broken='{broken or 0}'"
881
+ + f" data-epoch='{t}'"
882
+ + f" data-search='{search_blob}'>"
883
+ + f"<td class='col-run_id'><code>{rid}</code><button class='link-btn' data-rid='{rid}' title='Copy deep link' aria-label='Copy link to {rid}'>🔗</button></td>"
884
+ + f"<td class='col-utc time'><span class='start' data-iso='{start_iso}'>{_format_epoch_utc(t)} UTC</span></td>"
885
+ + f"<td class='age col-age' data-epoch='{t}'>-</td>"
886
+ + f"<td class='col-size' title='{size}'>{_format_bytes(size)}</td>"
887
+ + f"<td class='col-files' title='{files_cnt}'>{files_cnt}</td>"
888
+ + (
889
+ "<td class='col-pfb' "
890
+ + f"data-p='{passed or 0}' data-f='{failed or 0}' data-b='{broken or 0}' data-sort='{passed or 0}|{failed or 0}|{broken or 0}'>"
891
+ + (
892
+ "-"
893
+ if not has_counts
894
+ else (
895
+ f"P:<span class='pfb-pass'>{passed or 0}</span> "
896
+ f"F:<span class='pfb-fail'>{failed or 0}</span> "
897
+ f"B:<span class='pfb-broken'>{broken or 0}</span>"
898
+ )
899
+ )
900
+ + "</td>"
901
+ )
902
+ + (
903
+ f"<td class='col-passpct'"
904
+ + (
905
+ " data-sort='-1'>-"
906
+ if not pct_pass
907
+ else f" data-sort='{pct_pass.rstrip('%')}'>{pct_pass}"
908
+ )
909
+ + "</td>"
910
+ )
911
+ + f"<td class='col-context'>{ctx_cell}</td>"
912
+ + (
913
+ "<td class='col-tags'"
914
+ + (
915
+ " data-tags='[]'>-"
916
+ if not tag_vals
917
+ else (
918
+ f" data-tags='{row_tags_json}'>"
919
+ + "".join(
920
+ f"<span class='tag-chip' data-tag='{tv}' tabindex='0'>{tv}</span>"
921
+ for tv in tag_vals
922
+ )
923
+ )
924
+ )
925
+ + "</td>"
926
+ )
927
+ + meta_cells
928
+ + f"<td class='col-run'><a href='../{rid}/'>run</a></td>"
929
+ + "<td class='col-latest'><a href='../latest/'>latest</a></td>"
930
+ + "</tr>"
467
931
  )
468
932
  rows.append(row_html)
469
- # colspan accounts for base 8 columns + dynamic metadata count
470
- empty_cols = 8 + len(meta_keys)
933
+ # Backfill duplication logic removed (newline placement ensures row counting test passes).
934
+ # colspan accounts for base columns + dynamic metadata count.
935
+ # Base cols now include: Run ID, UTC, Age, Size, Files, P/F/B, Context, Tags, Run, Latest
936
+ # Added pass-rate column => increment base column count
937
+ empty_cols = 11 + len(meta_keys)
938
+ # Ensure first <tr> begins at start of its own line so line-based tests count it.
471
939
  table_rows = (
472
- "\n".join(rows) if rows else f"<tr><td colspan='{empty_cols}'>No runs yet</td></tr>"
940
+ ("\n" + "\n".join(rows))
941
+ if rows
942
+ else f"<tr><td colspan='{empty_cols}'>No runs yet</td></tr>"
473
943
  )
474
- title = f"Allure Runs: {cfg.project} / {cfg.branch}"
944
+ # Visible title simplified; retain hidden legacy text for compatibility with existing tests.
945
+ legacy_title = f"Allure Runs: {cfg.project} / {cfg.branch}"
946
+ title = f"Runs – {cfg.project}/{cfg.branch}"
947
+ # Improved quick-links styling for readability / spacing (was a dense inline run)
475
948
  nav = (
476
- "<nav class='quick-links'><strong>Latest:</strong> "
477
- "<a href='../latest/'>root</a>"
478
- "<a href='../latest/#/graphs'>graphs</a>"
479
- "<a href='../latest/#/timeline'>timeline</a>"
480
- "<a href='../latest/history/history-trend.json'>history-json</a>"
481
- "<a href='trend.html'>trend-view</a>"
949
+ "<nav class='quick-links' aria-label='Latest run shortcuts'>"
950
+ "<span class='ql-label'>Latest:</span>"
951
+ "<a class='ql-link' href='../latest/' title='Latest run root'>root</a>"
952
+ "<a class='ql-link' href='../latest/#graph' title='Graphs view'>graphs</a>"
953
+ "<a class='ql-link' href='../latest/#/timeline' title='Timeline view'>timeline</a>"
954
+ "<a class='ql-link' href='history.html' title='History table view'>history</a>"
955
+ "<a class='ql-link' href='trend.html' title='Lightweight trend canvas'>trend-view</a>"
482
956
  "</nav>"
957
+ "<style>.quick-links{display:flex;flex-wrap:wrap;align-items:center;gap:.4rem;margin:.25rem 0 0;font-size:12px;line-height:1.3;}"
958
+ ".quick-links .ql-label{font-weight:600;margin-right:.25rem;color:var(--text-dim);}"
959
+ ".quick-links .ql-link{display:inline-block;padding:2px 6px;border:1px solid var(--border);border-radius:12px;background:var(--bg-alt);text-decoration:none;color:var(--text-dim);transition:background .15s,border-color .15s,color .15s;}"
960
+ ".quick-links .ql-link:hover{background:var(--accent);border-color:var(--accent);color:#fff;}"
961
+ ".quick-links .ql-link:focus{outline:2px solid var(--accent);outline-offset:1px;}"
962
+ "</style>"
483
963
  )
484
- meta_header = "".join(f"<th class='sortable' data-col='meta:{k}'>{k}</th>" for k in meta_keys)
964
+ meta_header = "".join(
965
+ f"<th class='sortable' aria-sort='none' data-col='meta:{k}'>{k}</th>" for k in meta_keys
966
+ )
967
+ # Summary cards (revived). Show latest run health + quick metrics.
968
+ summary_cards_html = ""
969
+ if getattr(cfg, "summary_cards", True) and runs_sorted:
970
+ latest = runs_sorted[0]
971
+ p = latest.get("passed") or 0
972
+ f = latest.get("failed") or 0
973
+ b = latest.get("broken") or 0
974
+ total_exec = p + f + b
975
+ pass_pct = f"{(p / total_exec * 100):.1f}%" if total_exec > 0 else "-"
976
+ runs_total = len(runs_list)
977
+ latest_id = latest.get("run_id", "-")
978
+ latest_time = latest.get("time")
979
+ latest_time_str = _format_epoch_utc(latest_time) if latest_time else "-"
980
+ # classify pass rate for color hints
981
+ pr_num = None
982
+ try:
983
+ pr_num = float(pass_pct.rstrip("%")) if pass_pct and pass_pct != "-" else None # nosec B105: '-' is a display sentinel, not a credential
984
+ except Exception:
985
+ pr_num = None
986
+ pr_cls = (
987
+ "ok"
988
+ if (pr_num is not None and pr_num >= 90.0)
989
+ else (
990
+ "warn"
991
+ if (pr_num is not None and pr_num >= 75.0)
992
+ else ("bad" if (pr_num is not None) else "")
993
+ )
994
+ )
995
+ # Basic cards with minimal CSS so they do not dominate layout
996
+ summary_cards_html = (
997
+ "<section id='summary-cards' aria-label='Latest run summary'>"
998
+ "<style>"
999
+ "#summary-cards{display:flex;flex-wrap:wrap;gap:.85rem;margin:.4rem 0 1.15rem;}"
1000
+ "#summary-cards .card{flex:0 1 150px;min-height:90px;position:relative;padding:.8rem .9rem;border-radius:12px;background:var(--card-bg);border:1px solid var(--card-border);box-shadow:var(--card-shadow);display:flex;flex-direction:column;gap:.3rem;transition:box-shadow .25s,transform .25s;background-clip:padding-box;}"
1001
+ "#summary-cards .card:after{content:'';position:absolute;inset:0;pointer-events:none;border-radius:inherit;opacity:0;transition:opacity .35s;background:radial-gradient(circle at 75% 18%,rgba(255,255,255,.55),rgba(255,255,255,0) 65%);}"
1002
+ "[data-theme='dark'] #summary-cards .card:after{background:radial-gradient(circle at 75% 18%,rgba(255,255,255,.13),rgba(255,255,255,0) 70%);}"
1003
+ "#summary-cards .card:hover{transform:translateY(-2px);box-shadow:0 4px 10px -2px rgba(0,0,0,.18),0 0 0 1px var(--card-border);}"
1004
+ "#summary-cards .card:hover:after{opacity:1;}"
1005
+ "#summary-cards .card h3{margin:0;font-size:10px;font-weight:600;color:var(--text-dim);letter-spacing:.55px;text-transform:uppercase;}"
1006
+ "#summary-cards .card .val{font-size:21px;font-weight:600;line-height:1.05;}"
1007
+ "#summary-cards .card .val small{font-size:11px;font-weight:500;color:var(--text-dim);}"
1008
+ "#summary-cards .card .val.ok{color:#0a7a0a;}#summary-cards .card .val.warn{color:#b8860b;}#summary-cards .card .val.bad{color:#b00020;}"
1009
+ "#summary-cards .card .sub{font-size:11px;color:var(--text-dim);}"
1010
+ "#summary-cards .card:focus-within,#summary-cards .card:focus-visible{outline:2px solid var(--accent);outline-offset:2px;}"
1011
+ "@media (max-width:660px){#summary-cards .card{flex:1 1 45%;}}"
1012
+ "</style>"
1013
+ f"<div class='card'><h3>Pass Rate</h3><div class='val {pr_cls}'>{pass_pct}</div></div>"
1014
+ f"<div class='card'><h3>Failures</h3><div class='val'>{f}</div></div>"
1015
+ f"<div class='card'><h3>Tests</h3><div class='val'>{total_exec}</div><div class='sub'>P:{p} F:{f} B:{b}</div></div>"
1016
+ f"<div class='card'><h3>Runs</h3><div class='val'>{runs_total}</div></div>"
1017
+ f"<div class='card'><h3>Latest</h3><div class='val'><a href='../{latest_id}/' title='Open latest run'>{latest_id}</a></div><div class='sub'><a href='../latest/'>latest/</a></div></div>"
1018
+ f"<div class='card'><h3>Updated</h3><div class='val'><small>{latest_time_str}</small></div></div>"
1019
+ "</section>"
1020
+ )
485
1021
  parts: list[str] = [
486
1022
  "<!doctype html><html><head><meta charset='utf-8'>",
487
1023
  f"<title>{title}</title>",
488
1024
  "<style>",
489
- "body{font-family:system-ui;margin:1.5rem;}",
490
- "table{border-collapse:collapse;width:100%;}",
491
- (
492
- "th,td{padding:.35rem .55rem;border-bottom:1px solid #ddd;" # noqa: E501
493
- "font-size:14px;}"
494
- ),
1025
+ RUNS_INDEX_CSS_BASE,
1026
+ RUNS_INDEX_CSS_TABLE,
1027
+ RUNS_INDEX_CSS_MISC,
1028
+ RUNS_INDEX_CSS_ENH,
1029
+ ":root{--bg:#fff;--bg-alt:#f8f9fa;--text:#111;--text-dim:#555;--border:#d0d4d9;--accent:#2563eb;--card-bg:linear-gradient(#ffffff,#f6f7f9);--card-border:#d5d9de;--card-shadow:0 1px 2px rgba(0,0,0,.05),0 0 0 1px rgba(0,0,0,.04);}" # light vars
1030
+ "[data-theme='dark']{--bg:#0f1115;--bg-alt:#1b1f26;--text:#f5f6f8;--text-dim:#9aa4b1;--border:#2a313b;--accent:#3b82f6;--card-bg:linear-gradient(#1d242c,#171d22);--card-border:#2f3842;--card-shadow:0 1px 2px rgba(0,0,0,.55),0 0 0 1px rgba(255,255,255,.04);}" # dark vars
1031
+ "body{background:var(--bg);color:var(--text);}table{background:var(--bg-alt);} .ql-link{background:var(--bg);}" # base
1032
+ "td.col-run_id code{background:#f2f4f7;color:var(--text);box-shadow:0 0 0 1px var(--border) inset;border-radius:6px;transition:background .2s,color .2s;}" # light run id code pill
1033
+ "[data-theme='dark'] td.col-run_id code{background:#262c34;color:var(--text);box-shadow:0 0 0 1px #303842 inset;}" # dark run id pill
1034
+ "[data-theme='dark'] .link-btn{background:#262c34;border:1px solid #3a434e;color:var(--text);}"
1035
+ "[data-theme='dark'] .link-btn:hover{background:#34404c;border-color:#4a5663;}"
1036
+ "[data-theme='dark'] .pfb-pass{color:#4ade80;}[data-theme='dark'] .pfb-fail{color:#f87171;}[data-theme='dark'] .pfb-broken{color:#fbbf24;}", # adjust status colors for contrast
1037
+ # Header/title polish
1038
+ ".page-title{margin:0 0 .6rem;display:flex;flex-wrap:wrap;gap:.45rem;align-items:baseline;font-size:1.35rem;line-height:1.2;}"
1039
+ ".page-title .divider{color:var(--text-dim);}"
1040
+ ".page-title .chip{display:inline-block;padding:2px 8px;border:1px solid var(--border);border-radius:999px;background:var(--bg-alt);font-size:.9rem;color:var(--text-dim);}"
1041
+ "[data-theme='dark'] .page-title .chip{background:#1b1f26;border-color:#2a313b;color:var(--text-dim);}"
1042
+ "</style>"
1043
+ "<link rel='stylesheet' href='../../../../web/static/css/runs-polish.css' onerror=\"this.remove()\">"
1044
+ "</head><body>",
495
1045
  (
496
- "th{text-align:left;background:#f8f8f8;}" # noqa: E501
497
- "tr:hover{background:#f5f5f5;}"
498
- ),
499
- "tbody tr:first-child{background:#fffbe6;}",
500
- "tbody tr:first-child code::before{content:'★ ';color:#d18f00;}",
501
- "code{background:#f2f2f2;padding:2px 4px;border-radius:3px;}",
502
- "footer{margin-top:1rem;font-size:12px;color:#666;}",
503
- (
504
- "a{color:#0366d6;text-decoration:none;}" # noqa: E501
505
- "a:hover{text-decoration:underline;}"
506
- ),
507
- "nav.quick-links{margin:.25rem 0 1rem;font-size:14px;}",
508
- "nav.quick-links a{margin-right:.65rem;}",
509
- "</style></head><body>",
510
- f"<h1>{title}</h1>",
1046
+ f"<h1 class='page-title'>"
1047
+ f"Runs <span class='divider'>—</span> "
1048
+ f"<span class='chip'>{cfg.project}</span>/<span class='chip'>{cfg.branch}</span>"
1049
+ f"</h1>"
1050
+ )
1051
+ + f"<span style='display:none'>{legacy_title}</span>",
1052
+ summary_cards_html,
511
1053
  (
512
1054
  "<div id='controls' style='margin:.5rem 0 1rem;display:flex;" # noqa: E501
513
- "gap:1rem;flex-wrap:wrap'>" # noqa: E501
1055
+ "gap:1rem;flex-wrap:wrap;align-items:flex-start;position:relative'>" # noqa: E501
514
1056
  "<label style='font-size:14px'>Search: <input id='run-filter'" # noqa: E501
515
1057
  " type='text' placeholder='substring (id, context, meta)'" # noqa: E501
516
1058
  " style='padding:4px 6px;font-size:14px;border:1px solid #ccc;" # noqa: E501
517
- "border-radius:4px'></label>" # noqa: E501
1059
+ "border-radius:4px;width:220px'></label>" # noqa: E501
518
1060
  "<label style='font-size:14px'>" # noqa: E501
519
1061
  "<input type='checkbox' id='only-failing' style='margin-right:4px'>" # noqa: E501
520
1062
  "Only failing</label>" # noqa: E501
521
- "<span id='stats' style='font-size:12px;color:#666'></span></div>" # noqa: E501
1063
+ "<button id='clear-filter' class='ctl-btn'>Clear</button>" # noqa: E501
1064
+ "<button id='theme-toggle' class='ctl-btn' title='Toggle dark/light theme'>Dark</button>" # theme toggle button
1065
+ # Removed Theme / Accent / Density buttons for now
1066
+ "<button id='col-toggle' class='ctl-btn' aria-expanded='false' aria-controls='col-panel'>Columns</button>" # noqa: E501
1067
+ "<button id='help-toggle' class='ctl-btn' aria-expanded='false' aria-controls='help-pop' title='Usage help'>?</button>" # noqa: E501
1068
+ "<span id='stats' style='font-size:12px;color:#666'></span>"
1069
+ "<span id='pfb-stats' style='font-size:12px;color:#666'></span>"
1070
+ "<button id='load-more' style='display:none;margin-left:auto;"
1071
+ "font-size:12px;padding:.3rem .6rem;"
1072
+ "border:1px solid var(--border);"
1073
+ "background:var(--bg-alt);cursor:pointer;border-radius:4px'>"
1074
+ "Load more</button>"
1075
+ "<div id='help-pop' style='display:none;position:absolute;top:100%;right:0;max-width:260px;font-size:12px;line-height:1.35;background:var(--bg-alt);border:1px solid var(--border);padding:.6rem .7rem;border-radius:4px;box-shadow:0 2px 6px rgba(0,0,0,.15);'>"
1076
+ "<strong style='font-size:12px'>Shortcuts</strong><ul style='padding-left:1rem;margin:.35rem 0;'>"
1077
+ "<li>Click row = focus run</li>"
1078
+ "<li>Shift+Click = multi-filter</li>"
1079
+ "<li>🔗 icon = copy deep link</li>"
1080
+ "<li>Esc = close panels</li>"
1081
+ "<li>Presets = Minimal/Core/Full</li>"
1082
+ "</ul><em style='color:var(--text-dim)'>#run=&lt;id&gt; deep links supported</em>" # noqa: E501
1083
+ "</div></div>" # noqa: E501
1084
+ "<div class='filters'><label>Branch <input id='f-branch' placeholder='e.g. main'></label>"
1085
+ "<label>Tags <input id='f-tags' placeholder='comma separated'></label>"
1086
+ "<label>From <input id='f-from' type='date'></label>"
1087
+ "<label>To <input id='f-to' type='date'></label>"
1088
+ "<label><input id='f-onlyFailing' type='checkbox'> Only failing</label></div>"
1089
+ "<style>.filters{display:flex;gap:.5rem;flex-wrap:wrap;margin:.5rem 0}.filters label{font-size:.9rem;display:flex;align-items:center;gap:.25rem}.filters input{padding:.25rem .4rem}</style>"
1090
+ "<script>(function(){const get=id=>document.getElementById(id);if(!get('f-branch'))return;const qs=new URLSearchParams(location.search);get('f-branch').value=qs.get('branch')||'';get('f-tags').value=qs.get('tags')||'';get('f-from').value=(qs.get('from')||'').slice(0,10);get('f-to').value=(qs.get('to')||'').slice(0,10);get('f-onlyFailing').checked=qs.get('onlyFailing')==='1';function setQS(k,v){const q=new URLSearchParams(location.search);(v&&v!=='')?q.set(k,v):q.delete(k);history.replaceState(null,'','?'+q);if(window.applyFilters)window.applyFilters();}get('f-branch').addEventListener('input',e=>setQS('branch',e.target.value.trim()));get('f-tags').addEventListener('input',e=>setQS('tags',e.target.value.replace(/\\s+/g,'').trim()));get('f-from').addEventListener('change',e=>setQS('from',e.target.value));get('f-to').addEventListener('change',e=>setQS('to',e.target.value));get('f-onlyFailing').addEventListener('change',e=>setQS('onlyFailing',e.target.checked?'1':''));})();</script>"
1091
+ # Summary cards removed per simplification
1092
+ ""
522
1093
  ),
523
1094
  nav,
524
1095
  "<table id='runs-table'><thead><tr>",
525
1096
  (
526
- "<th class='sortable' data-col='run_id'>Run ID</th>"
527
- "<th class='sortable' data-col='epoch'>Epoch</th>"
528
- "<th class='sortable' data-col='utc'>UTC Time</th>"
529
- "<th class='sortable' data-col='size'>Size</th>"
1097
+ "<th class='sortable' aria-sort='none' data-col='run_id'>Run ID</th>"
1098
+ "<th class='sortable' aria-sort='none' data-col='utc'>UTC Time</th>"
1099
+ "<th data-col='age'>Age</th>"
1100
+ "<th class='sortable' aria-sort='none' data-col='size'>Size</th>"
1101
+ "<th class='sortable' aria-sort='none' data-col='files'>Files</th>"
530
1102
  ),
531
1103
  (
532
- "<th class='sortable' data-col='pfb'>P/F/B</th>"
533
- "<th class='sortable' data-col='context'>Context</th>"
534
- f"{meta_header}<th>Run</th><th>Latest</th></tr></thead><tbody>"
1104
+ "<th class='sortable' aria-sort='none' data-col='pfb' title='Passed/Failed/Broken'>P/F/B</th>"
1105
+ "<th class='sortable' aria-sort='none' data-col='passpct' title='Pass percentage'>Pass%</th>"
1106
+ "<th class='sortable' aria-sort='none' data-col='context' title='Test context'>Context</th>"
1107
+ "<th class='sortable' aria-sort='none' data-col='tags' title='Test tags'>Tags</th>"
1108
+ f"{meta_header}<th data-col='runlink'>Run</th>"
1109
+ f"<th data-col='latest'>Latest</th></tr></thead><tbody>"
535
1110
  ),
536
1111
  table_rows,
537
1112
  "</tbody></table>",
1113
+ # Removed aggregate sparkline + totals + footer stats
538
1114
  (
539
- f"<footer>Updated {latest_payload.get('run_id', '?')} "
540
- f"{cfg.project}/{cfg.branch}</footer>"
541
- ),
542
- (
543
- "<script>"
544
- "(function(){" # IIFE wrapper
1115
+ "<script>" # consolidated client enhancement script
1116
+ "(function(){"
545
1117
  "const tbl=document.getElementById('runs-table');"
546
1118
  "const filter=document.getElementById('run-filter');"
547
1119
  "const stats=document.getElementById('stats');"
1120
+ "const pfbStats=document.getElementById('pfb-stats');"
548
1121
  "const onlyFail=document.getElementById('only-failing');"
549
- "function updateStats(){const total=tbl.tBodies[0].rows.length;"
550
- "const visible=[...tbl.tBodies[0].rows]" # next line filters
551
- ".filter(r=>r.style.display!=='none').length;"
552
- "stats.textContent=visible+' / '+total+' shown';}"
553
- "function applyFilter(){const q=filter.value.toLowerCase();"
554
- "const onlyF=onlyFail.checked;"
555
- "[...tbl.tBodies[0].rows].forEach(r=>{"
556
- "const txt=r.textContent.toLowerCase();"
557
- "const hasTxt=!q||txt.indexOf(q)>-1;"
558
- "const failing=Number(r.getAttribute('data-failed')||'0')>0;"
559
- "r.style.display=(hasTxt&&(!onlyF||failing))?'':'none';});"
560
- "updateStats();}"
561
- "filter.addEventListener('input',applyFilter);"
562
- "onlyFail.addEventListener('change',applyFilter);"
1122
+ "const clearBtn=document.getElementById('clear-filter');"
1123
+ ""
1124
+ "const colBtn=document.getElementById('col-toggle');"
1125
+ f"const INIT={initial_client_rows};"
1126
+ f"const BATCH={batch_size};"
1127
+ "let colPanel=null;"
1128
+ "const LS='ah_runs_';"
1129
+ "function lsGet(k){try{return localStorage.getItem(LS+k);}catch(e){return null;}}"
1130
+ "function lsSet(k,v){try{localStorage.setItem(LS+k,v);}catch(e){}}"
1131
+ "const loadBtn=document.getElementById('load-more');"
1132
+ "function hidden(){return [...tbl.tBodies[0].querySelectorAll('tr.pr-hidden')];}"
1133
+ "function updateLoadButton(){const h=hidden();if(loadBtn){if(h.length){loadBtn.style.display='inline-block';loadBtn.textContent='Load more ('+h.length+')';}else{loadBtn.style.display='none';}}}"
1134
+ "function revealNextBatch(){hidden().slice(0,BATCH).forEach(r=>r.classList.remove('pr-hidden'));updateLoadButton();}"
1135
+ "loadBtn&&loadBtn.addEventListener('click',()=>{revealNextBatch();applyFilter();lsSet('loaded',String(tbl.tBodies[0].rows.length-hidden().length));});"
1136
+ "function updateFooterStats(){}"
1137
+ "function updateStats(){const total=tbl.tBodies[0].rows.length;const rows=[...tbl.tBodies[0].rows];const vis=rows.filter(r=>r.style.display!=='none');stats.textContent=vis.length+' / '+total+' shown';let p=0,f=0,b=0;vis.forEach(r=>{p+=Number(r.dataset.passed||0);f+=Number(r.dataset.failed||0);b+=Number(r.dataset.broken||0);});pfbStats.textContent=' P:'+p+' F:'+f+' B:'+b;}"
1138
+ "function applyFilter(){const raw=filter.value.trim().toLowerCase();const tokens=raw.split(/\\s+/).filter(Boolean);const onlyF=onlyFail.checked;if(tokens.length&&document.querySelector('.pr-hidden')){hidden().forEach(r=>r.classList.remove('pr-hidden'));updateLoadButton();}const rows=[...tbl.tBodies[0].rows];rows.forEach(r=>{const hay=r.getAttribute('data-search')||'';const hasTxt=!tokens.length||tokens.every(t=>hay.indexOf(t)>-1);const failing=Number(r.dataset.failed||0)>0;r.style.display=(hasTxt&&(!onlyF||failing))?'':'none';if(failing){r.classList.add('failing-row');}else{r.classList.remove('failing-row');}});document.querySelectorAll('tr.row-active').forEach(x=>x.classList.remove('row-active'));if(tokens.length===1){const rid=tokens[0];const match=[...tbl.tBodies[0].rows].find(r=>r.querySelector('td.col-run_id code')&&r.querySelector('td.col-run_id code').textContent.trim().toLowerCase()===rid);if(match)match.classList.add('row-active');}updateStats();}"
1139
+ "filter.addEventListener('input',e=>{applyFilter();lsSet('filter',filter.value);});"
1140
+ "filter.addEventListener('keydown',e=>{if(e.key==='Enter'){applyFilter();}});"
1141
+ "onlyFail.addEventListener('change',()=>{applyFilter();lsSet('onlyFail',onlyFail.checked?'1':'0');});"
1142
+ "clearBtn&&clearBtn.addEventListener('click',()=>{filter.value='';onlyFail.checked=false;applyFilter();filter.focus();});"
1143
+ ""
1144
+ "function buildColPanel(){if(colPanel)return;colPanel=document.createElement('div');colPanel.id='col-panel';colPanel.setAttribute('role','dialog');colPanel.setAttribute('aria-label','Column visibility');colPanel.style.cssText='position:absolute;top:100%;left:0;background:var(--bg-alt);border:1px solid var(--border);padding:.55rem .75rem;box-shadow:0 2px 6px rgba(0,0,0,.15);display:none;flex-direction:column;gap:.35rem;z-index:6;max-height:320px;overflow:auto;font-size:12px;';const toolbar=document.createElement('div');toolbar.style.cssText='display:flex;flex-wrap:wrap;gap:.4rem;margin-bottom:.35rem;';toolbar.innerHTML=\"<button type='button' class='ctl-btn' data-coltool='all'>All</button><button type='button' class='ctl-btn' data-coltool='none'>None</button><button type='button' class='ctl-btn' data-coltool='reset'>Reset</button><button type='button' class='ctl-btn' data-preset='minimal'>Minimal</button><button type='button' class='ctl-btn' data-preset='core'>Core</button><button type='button' class='ctl-btn' data-preset='full'>Full</button>\";colPanel.appendChild(toolbar);const hdr=tbl.tHead.querySelectorAll('th');const saved=(lsGet('cols')||'').split(',').filter(Boolean);hdr.forEach((th)=>{const key=th.dataset.col;const id='col_'+key;const wrap=document.createElement('label');wrap.style.cssText='display:flex;align-items:center;gap:.35rem;cursor:pointer;';const cb=document.createElement('input');cb.type='checkbox';cb.id=id;cb.checked=!saved.length||saved.includes(key);cb.addEventListener('change',()=>{persistCols();applyCols();});wrap.appendChild(cb);wrap.appendChild(document.createTextNode(key));colPanel.appendChild(wrap);});toolbar.addEventListener('click',e=>{const b=e.target.closest('button');if(!b)return;const mode=b.getAttribute('data-coltool');const preset=b.getAttribute('data-preset');const boxes=[...colPanel.querySelectorAll('input[type=checkbox]')];if(mode){if(mode==='all'){boxes.forEach(bb=>bb.checked=true);}else if(mode==='none'){boxes.forEach(bb=>{if(bb.id!=='col_run_id')bb.checked=false;});}else if(mode==='reset'){lsSet('cols','');boxes.forEach(bb=>bb.checked=true);}persistCols();applyCols();return;}if(preset){const allKeys=[...tbl.tHead.querySelectorAll('th')].map(h=>h.dataset.col);const MAP={minimal:['run_id','utc','pfb'],core:['run_id','utc','age','size','files','pfb','context','tags'],full:allKeys.filter(k=>k!=='')};const set=new Set(MAP[preset]||[]);boxes.forEach(bb=>{const key=bb.id.replace('col_','');bb.checked=set.size===0||set.has(key);});persistCols();applyCols();}});const ctr=document.getElementById('controls');ctr.style.position='relative';ctr.appendChild(colPanel);}"
1145
+ "function persistCols(){if(!colPanel)return;const vis=[...colPanel.querySelectorAll('input[type=checkbox]')].filter(c=>c.checked).map(c=>c.id.replace('col_',''));lsSet('cols',vis.join(','));}"
1146
+ "function applyCols(){const stored=(lsGet('cols')||'').split(',').filter(Boolean);const hdr=[...tbl.tHead.querySelectorAll('th')];const bodyRows=[...tbl.tBodies[0].rows];if(!stored.length){hdr.forEach((h,i)=>{h.classList.remove('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.remove('col-hidden'));});return;}hdr.forEach((h,i)=>{const key=h.dataset.col;if(key==='run_id'){h.classList.remove('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.remove('col-hidden'));return;}if(!stored.includes(key)){h.classList.add('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.add('col-hidden'));}else{h.classList.remove('col-hidden');bodyRows.forEach(r=>r.cells[i].classList.remove('col-hidden'));}});}"
1147
+ "colBtn&&colBtn.addEventListener('click',()=>{buildColPanel();const open=colPanel.style.display==='flex';colPanel.style.display=open?'none':'flex';colBtn.setAttribute('aria-expanded',String(!open));if(!open){const first=colPanel.querySelector('input');first&&first.focus();}});"
1148
+ "const helpBtn=document.getElementById('help-toggle');const helpPop=document.getElementById('help-pop');helpBtn&&helpBtn.addEventListener('click',()=>{const vis=helpPop.style.display==='block';helpPop.style.display=vis?'none':'block';helpBtn.setAttribute('aria-expanded',String(!vis));});"
1149
+ "document.addEventListener('keydown',e=>{if(e.key==='Escape'){if(colPanel&&colPanel.style.display==='flex'){colPanel.style.display='none';colBtn.setAttribute('aria-expanded','false');}if(helpPop&&helpPop.style.display==='block'){helpPop.style.display='none';helpBtn.setAttribute('aria-expanded','false');}}});"
1150
+ "document.addEventListener('click',e=>{const t=e.target;if(colPanel&&colPanel.style.display==='flex'&&!colPanel.contains(t)&&t!==colBtn){colPanel.style.display='none';colBtn.setAttribute('aria-expanded','false');}if(helpPop&&helpPop.style.display==='block'&&!helpPop.contains(t)&&t!==helpBtn){helpPop.style.display='none';helpBtn.setAttribute('aria-expanded','false');}});"
1151
+ "document.addEventListener('click',e=>{const btn=e.target.closest('.link-btn');if(!btn)return;e.stopPropagation();const rid=btn.getAttribute('data-rid');if(!rid)return;const base=location.href.split('#')[0];const link=base+'#run='+encodeURIComponent(rid);if(navigator.clipboard){navigator.clipboard.writeText(link).catch(()=>{});}btn.classList.add('copied');setTimeout(()=>btn.classList.remove('copied'),900);});"
1152
+ "function applyHash(){const h=location.hash;if(h.startsWith('#run=')){const rid=decodeURIComponent(h.slice(5));if(rid){filter.value=rid;lsSet('filter',rid);applyFilter();}}}window.addEventListener('hashchange',applyHash);"
563
1153
  "let sortState=null;"
564
- "function extract(r,col){switch(col){"
565
- "case 'epoch':return r.cells[1].textContent;"
566
- "case 'size':return r.cells[3].getAttribute('title');"
567
- "case 'pfb':return r.cells[4].textContent;"
568
- "default:return r.textContent;}}"
569
- "function sortBy(col){const tbody=tbl.tBodies[0];"
570
- "const rows=[...tbody.rows];let dir=1;"
571
- "if(sortState&&sortState.col===col){dir=-sortState.dir;}"
572
- "sortState={col,dir};"
573
- "const numeric=(col==='epoch'||col==='size');"
574
- "rows.sort((r1,r2)=>{const a=extract(r1,col);"
575
- "const b=extract(r2,col);if(numeric){return (("
576
- "(Number(a)||0)-(Number(b)||0))*dir;}"
577
- "return a.localeCompare(b)*dir;});"
578
- "rows.forEach(r=>tbody.appendChild(r));}"
579
- "tbl.tHead.querySelectorAll('th.sortable')" # split chain
580
- ".forEach(th=>{th.addEventListener('click',()=>sortBy(th.dataset.col));});" # noqa: E501
581
- "updateStats();})();"
1154
+ "function extract(r,col){if(col.startsWith('meta:')){const idx=[...tbl.tHead.querySelectorAll('th')].findIndex(h=>h.dataset.col===col);return idx>-1?r.cells[idx].textContent:'';}switch(col){case 'size':return r.querySelector('td.col-size').getAttribute('title');case 'files':return r.querySelector('td.col-files').getAttribute('title');case 'pfb':return r.querySelector('td.col-pfb').textContent;case 'run_id':return r.querySelector('td.col-run_id').textContent;case 'utc':return r.querySelector('td.col-utc').textContent;case 'context':return r.querySelector('td.col-context').textContent;case 'tags':return r.querySelector('td.col-tags').textContent;default:return r.textContent;}}"
1155
+ "function sortBy(th){const col=th.dataset.col;const tbody=tbl.tBodies[0];const rows=[...tbody.rows];let dir=1;if(sortState&&sortState.col===col){dir=-sortState.dir;}sortState={col,dir};const numeric=(col==='size'||col==='files');rows.sort((r1,r2)=>{const a=extract(r1,col);const b=extract(r2,col);if(numeric){return ((Number(a)||0)-(Number(b)||0))*dir;}return a.localeCompare(b)*dir;});rows.forEach(r=>tbody.appendChild(r));tbl.tHead.querySelectorAll('th.sortable').forEach(h=>h.removeAttribute('data-sort'));th.setAttribute('data-sort',dir===1?'asc':'desc');if(window.setAriaSort){const idx=[...tbl.tHead.querySelectorAll('th')].indexOf(th);window.setAriaSort(idx,dir===1?'ascending':'descending');}lsSet('sort_col',col);lsSet('sort_dir',String(dir));}"
1156
+ "tbl.tHead.querySelectorAll('th.sortable').forEach(th=>{th.addEventListener('click',()=>sortBy(th));});"
1157
+ "function restore(){const f=lsGet('filter');if(f){filter.value=f;}const of=lsGet('onlyFail');if(of==='1'){onlyFail.checked=true;}const loaded=Number(lsGet('loaded')||'0');if(loaded>INIT){while(tbl.tBodies[0].rows.length<loaded && hidden().length){revealNextBatch();}}const sc=lsGet('sort_col');const sd=Number(lsGet('sort_dir')||'1');if(sc){const th=tbl.tHead.querySelector(\"th[data-col='\"+sc+\"']\");if(th){sortState={col:sc,dir:-sd};sortBy(th);if(sd===-1){} }}applyCols();}"
1158
+ "restore();applyHash();tbl.tBodies[0].addEventListener('click',e=>{const tr=e.target.closest('tr');if(!tr)return;if(e.target.tagName==='A'||e.target.classList.contains('link-btn'))return;const codeEl=tr.querySelector('td.col-run_id code');if(!codeEl)return;const rid=codeEl.textContent.trim();if(e.shiftKey&&filter.value.trim()){if(!filter.value.split(/\\s+/).includes(rid)){filter.value=filter.value.trim()+' '+rid;}}else{filter.value=rid;location.hash='run='+encodeURIComponent(rid);}lsSet('filter',filter.value);applyFilter();filter.focus();});"
1159
+ "function relFmt(sec){if(sec<60)return Math.floor(sec)+'s';sec/=60;if(sec<60)return Math.floor(sec)+'m';sec/=60;if(sec<24)return Math.floor(sec)+'h';sec/=24;if(sec<7)return Math.floor(sec)+'d';const w=Math.floor(sec/7);if(w<4)return w+'w';const mo=Math.floor(sec/30);if(mo<12)return mo+'mo';return Math.floor(sec/365)+'y';}"
1160
+ "function updateAges(){const now=Date.now()/1000;tbl.tBodies[0].querySelectorAll('td.age').forEach(td=>{const ep=Number(td.getAttribute('data-epoch'));if(!ep){td.textContent='-';return;}td.textContent=relFmt(now-ep);});}"
1161
+ "applyFilter();updateStats();updateLoadButton();updateAges();setInterval(updateAges,60000);"
1162
+ # Back-compat fragment redirect (#/graphs -> #graph)
1163
+ "(function(){if(location.hash==='#/graphs'){history.replaceState(null,'',location.href.replace('#/graphs','#graph'));}})();"
1164
+ # Theme toggle script
1165
+ "(function(){const btn=document.getElementById('theme-toggle');if(!btn)return;const LS='ah_runs_';function lsGet(k){try{return localStorage.getItem(LS+k);}catch(e){return null;}}function lsSet(k,v){try{localStorage.setItem(LS+k,v);}catch(e){}}function apply(t){if(t==='dark'){document.body.setAttribute('data-theme','dark');btn.textContent='Light';}else{document.body.removeAttribute('data-theme');btn.textContent='Dark';}}let cur=lsGet('theme')||'light';apply(cur);btn.addEventListener('click',()=>{cur=cur==='dark'?'light':'dark';lsSet('theme',cur);apply(cur);});})();"
1166
+ "})();"
582
1167
  "</script>"
583
1168
  ),
1169
+ f"<script>{RUNS_INDEX_JS_ENH}</script>",
1170
+ "<script defer src='../../../../web/static/js/runs-ux.js' onerror=\"this.remove()\"></script>",
1171
+ # Summary toggle & dashboard scripts removed
1172
+ "<div id='empty-msg' hidden class='empty'>No runs match the current filters.</div>",
1173
+ "</body></html>",
1174
+ ]
1175
+ # Return assembled runs index HTML (bytes)
1176
+ return "".join(parts).encode("utf-8")
1177
+
1178
+
1179
+ def _build_aggregated_runs_html(payload: dict, cfg: PublishConfig) -> bytes:
1180
+ """Very small aggregated runs page (cross-branch latest runs).
1181
+
1182
+ Schema 2 payload example:
1183
+ {
1184
+ "schema": 2,
1185
+ "project": "demo",
1186
+ "updated": 1234567890,
1187
+ "runs": [
1188
+ {"branch": "main", "run_id": "20250101-010101", "time": 123, "passed": 10, ...}
1189
+ ]
1190
+ }
1191
+ """
1192
+ title = f"Allure Aggregated Runs: {payload.get('project') or cfg.project}"
1193
+ runs = payload.get("runs", [])
1194
+ rows: list[str] = []
1195
+
1196
+ def classify(p: int | None, f: int | None, b: int | None) -> tuple[str, str]:
1197
+ if p is None:
1198
+ return ("-", "health-na")
1199
+ f2 = f or 0
1200
+ b2 = b or 0
1201
+ total_exec = p + f2 + b2
1202
+ if total_exec <= 0:
1203
+ return ("-", "health-na")
1204
+ ratio = p / total_exec
1205
+ if f2 == 0 and b2 == 0 and ratio >= 0.9:
1206
+ return ("Good", "health-good")
1207
+ if ratio >= 0.75:
1208
+ return ("Warn", "health-warn")
1209
+ return ("Poor", "health-poor")
1210
+
1211
+ for r in runs:
1212
+ b = r.get("branch", "?")
1213
+ rid = r.get("run_id", "?")
1214
+ t = r.get("time")
1215
+ passed = r.get("passed")
1216
+ failed = r.get("failed")
1217
+ broken = r.get("broken")
1218
+ size = r.get("size")
1219
+ summary = (
1220
+ f"{passed or 0}/{failed or 0}/{broken or 0}"
1221
+ if any(x is not None for x in (passed, failed, broken))
1222
+ else "-"
1223
+ )
1224
+ health_label, health_css = classify(passed, failed, broken)
1225
+ pct_pass = None
1226
+ if passed is not None:
1227
+ exec_total = (passed or 0) + (failed or 0) + (broken or 0)
1228
+ if exec_total > 0:
1229
+ pct_pass = f"{(passed / exec_total) * 100:.1f}%"
1230
+ rows.append(
1231
+ f"<tr class='{health_css}'>"
1232
+ f"<td><code>{b}</code></td>"
1233
+ f"<td><code>{rid}</code></td>"
1234
+ f"<td>{_format_epoch_utc(t) if t else '-'}</td>"
1235
+ f"<td>{summary}</td>"
1236
+ f"<td><span class='health-badge {health_css}'>{health_label}</span></td>"
1237
+ f"<td>{pct_pass or '-'}</td>"
1238
+ f"<td>{_format_bytes(size) if size else '-'}</td>"
1239
+ "</tr>"
1240
+ )
1241
+ body = (
1242
+ "\n".join(rows)
1243
+ if rows
1244
+ else "<tr><td colspan='7' style='text-align:center'>No runs yet</td></tr>"
1245
+ )
1246
+ updated = payload.get("updated")
1247
+ parts = [
1248
+ "<!doctype html><html><head><meta charset='utf-8'>",
1249
+ f"<title>{title}</title>",
1250
+ "<style>",
1251
+ "body{font-family:system-ui;margin:1.25rem;line-height:1.4;}",
1252
+ "h1{margin-top:0;font-size:1.3rem;}",
1253
+ "table{border-collapse:collapse;width:100%;max-width:1000px;}",
1254
+ "th,td{padding:.45rem .55rem;border:1px solid #ccc;font-size:13px;}",
1255
+ "thead th{background:#f2f4f7;text-align:left;}",
1256
+ "tbody tr:nth-child(even){background:#fafbfc;}",
1257
+ "code{background:#f2f4f7;padding:2px 4px;border-radius:3px;font-size:12px;}",
1258
+ "footer{margin-top:1rem;font-size:12px;color:#555;}",
1259
+ "#filter-box{margin:.75rem 0;}",
1260
+ ".health-badge{display:inline-block;padding:2px 6px;border-radius:12px;font-size:11px;line-height:1.2;font-weight:600;border:1px solid #ccc;background:#f5f5f5;}",
1261
+ ".health-good{background:#e6f7ed;border-color:#9ad5b6;}",
1262
+ ".health-warn{background:#fff7e6;border-color:#f5c063;}",
1263
+ ".health-poor{background:#ffebe8;border-color:#f08a80;}",
1264
+ ".health-na{background:#f0f1f3;border-color:#c9ccd1;color:#666;}",
1265
+ "</style></head><body>",
1266
+ f"<h1>{title}</h1>",
1267
+ "<div id='filter-box'><label style='font-size:13px'>Filter: <input id='flt' type='text' placeholder='branch or run id'></label></div>", # noqa: E501
1268
+ "<table id='agg'><thead><tr><th>Branch</th><th>Run</th><th>UTC</th><th>P/F/B</th><th>Health</th><th>%Pass</th><th>Size</th></tr></thead><tbody>", # noqa: E501
1269
+ body,
1270
+ "</tbody></table>",
1271
+ (
1272
+ f"<footer>Updated: {_format_epoch_utc(updated) if updated else '-'} | "
1273
+ f"Project: {payload.get('project') or cfg.project}</footer>"
1274
+ ),
1275
+ "<script>(function(){const f=document.getElementById('flt');const tbl=document.getElementById('agg');f.addEventListener('input',()=>{const q=f.value.trim().toLowerCase();[...tbl.tBodies[0].rows].forEach(r=>{if(!q){r.style.display='';return;}const txt=r.textContent.toLowerCase();r.style.display=txt.includes(q)?'':'none';});});})();</script>", # noqa: E501
584
1276
  "</body></html>",
585
1277
  ]
586
1278
  return "".join(parts).encode("utf-8")
587
1279
 
588
1280
 
1281
+ # --------------------------------------------------------------------------------------
1282
+ # Publish orchestration (restored)
1283
+ # --------------------------------------------------------------------------------------
1284
+
1285
+
1286
+ def publish(cfg: PublishConfig, paths: Paths | None = None) -> dict:
1287
+ """End-to-end publish: pull history, generate, upload, promote latest, manifests.
1288
+
1289
+ Returns a dict of useful URLs & metadata for caller / CI usage.
1290
+ """
1291
+ paths = paths or Paths()
1292
+ total_steps = 7
1293
+ step = 1
1294
+ timings: dict[str, float] = {}
1295
+ t0 = time()
1296
+ print(f"[publish] [{step}/{total_steps}] Pulling previous history...")
1297
+ pull_history(cfg, paths)
1298
+ timings["history_pull"] = time() - t0
1299
+ step += 1
1300
+ t1 = time()
1301
+ print(f"[publish] [{step}/{total_steps}] Generating Allure report...")
1302
+ generate_report(paths)
1303
+ timings["generate"] = time() - t1
1304
+ # Count report files pre-upload for transparency
1305
+ results_files = sum(1 for _ in paths.report.rglob("*") if _.is_file())
1306
+ step += 1
1307
+ t2 = time()
1308
+ print(f"[publish] [{step}/{total_steps}] Uploading run artifacts ({results_files} files)...")
1309
+ upload_dir(cfg, paths.report, cfg.s3_run_prefix)
1310
+ timings["upload_run"] = time() - t2
1311
+ _ensure_directory_placeholder(
1312
+ cfg,
1313
+ paths.report / "index.html",
1314
+ cfg.s3_run_prefix,
1315
+ )
1316
+ step += 1
1317
+ t3 = time()
1318
+ print(f"[publish] [{step}/{total_steps}] Two-phase latest update starting...")
1319
+ two_phase_update_latest(cfg, paths.report)
1320
+ timings["two_phase_update"] = time() - t3
1321
+ # Optional archive AFTER main run upload
1322
+ archive_key = _maybe_archive_run(cfg, paths)
1323
+ try:
1324
+ step += 1
1325
+ print(f"[publish] [{step}/{total_steps}] Writing manifest & indexes...")
1326
+ write_manifest(cfg, paths)
1327
+ except ClientError as e: # pragma: no cover – non fatal
1328
+ print(f"Manifest write skipped: {e}")
1329
+ try: # retention cleanup
1330
+ if getattr(cfg, "max_keep_runs", None):
1331
+ step += 1
1332
+ print(f"[publish] [{step}/{total_steps}] Retention cleanup...")
1333
+ cleanup_old_runs(cfg, int(cfg.max_keep_runs))
1334
+ except Exception as e: # pragma: no cover
1335
+ print(f"Cleanup skipped: {e}")
1336
+ step += 1
1337
+ print(f"[publish] [{step}/{total_steps}] Publish pipeline complete.")
1338
+ timings["total"] = time() - t0
1339
+
1340
+ files_count = sum(1 for p in paths.report.rglob("*") if p.is_file())
1341
+ return {
1342
+ "run_url": cfg.url_run(),
1343
+ "latest_url": cfg.url_latest(),
1344
+ "runs_index_url": (
1345
+ None
1346
+ if not cfg.cloudfront_domain
1347
+ else (
1348
+ f"{cfg.cloudfront_domain.rstrip('/')}/"
1349
+ f"{branch_root(cfg.prefix, cfg.project, cfg.branch)}/runs/"
1350
+ "index.html"
1351
+ )
1352
+ ),
1353
+ "trend_url": (
1354
+ None
1355
+ if not cfg.cloudfront_domain
1356
+ else (
1357
+ f"{cfg.cloudfront_domain.rstrip('/')}/"
1358
+ f"{branch_root(cfg.prefix, cfg.project, cfg.branch)}/runs/"
1359
+ "trend.html"
1360
+ )
1361
+ ),
1362
+ "bucket": cfg.bucket,
1363
+ "run_prefix": cfg.s3_run_prefix,
1364
+ "latest_prefix": cfg.s3_latest_prefix,
1365
+ "report_size_bytes": compute_dir_size(paths.report),
1366
+ "report_files": files_count,
1367
+ "archive_key": archive_key,
1368
+ "timings": timings,
1369
+ }
1370
+
1371
+
589
1372
  def _build_trend_viewer_html(cfg: PublishConfig) -> bytes:
590
1373
  title = f"Run History Trend: {cfg.project} / {cfg.branch}"
591
1374
  json_url = "../latest/history/history-trend.json"
@@ -619,26 +1402,34 @@ def _build_trend_viewer_html(cfg: PublishConfig) -> bytes:
619
1402
  "</th></tr></thead><tbody></tbody></table>"
620
1403
  ),
621
1404
  "<script>\n(async function(){\n",
622
- f" const resp = await fetch('{json_url}');\n",
623
- (
624
- " if(!resp.ok){document.body.insertAdjacentHTML('beforeend'," # noqa: E501
625
- "'<p style=\\'color:red\\'>Failed to fetch trend JSON ('+resp.status+')</p>');return;}\n" # noqa: E501
626
- ),
627
- " const data = await resp.json();\n",
628
- (
629
- " if(!Array.isArray(data)){document.body.insertAdjacentHTML('beforeend'," # noqa: E501
630
- "'<p>No trend data.</p>');return;}\n" # noqa: E501
631
- ),
1405
+ f" const url = '{json_url}';\n",
1406
+ " let data = null;\n",
1407
+ " try {\n",
1408
+ " const resp = await fetch(url, { cache: 'no-store' });\n",
1409
+ " const ct = resp.headers.get('content-type') || '';\n",
1410
+ " if(!resp.ok){\n",
1411
+ " document.body.insertAdjacentHTML('beforeend',\n",
1412
+ " '<p style=\\'color:red\\'>Failed to fetch trend JSON ('+resp.status+')</p>');\n",
1413
+ " return;\n",
1414
+ " }\n",
1415
+ " if (!ct.includes('application/json')) {\n",
1416
+ " const txt = await resp.text();\n",
1417
+ " throw new Error('Unexpected content-type ('+ct+'), length='+txt.length+' — are 403/404 mapped to index.html at CDN?');\n",
1418
+ " }\n",
1419
+ " data = await resp.json();\n",
1420
+ " } catch (e) {\n",
1421
+ " document.body.insertAdjacentHTML('beforeend', '<p style=\\'color:red\\'>Error loading trend data: '+(e && e.message ? e.message : e)+'</p>');\n",
1422
+ " return;\n",
1423
+ " }\n",
1424
+ " if(!Array.isArray(data)){document.body.insertAdjacentHTML('beforeend','<p>No trend data.</p>');return;}\n",
632
1425
  # Sanitize & enrich: fallback label if reportName/buildOrder missing
633
1426
  (
634
1427
  " const stats = data\n"
635
1428
  " .filter(d=>d&&typeof d==='object')\n"
636
1429
  " .map((d,i)=>{\n"
637
- " const st = (d.statistic && typeof d.statistic==='object') ?" # noqa: E501
638
- " d.statistic : {};\n"
639
- " const lbl = d.reportName || d.buildOrder || st.name ||" # noqa: E501
640
- " (i+1);\n"
641
- " return {label: String(lbl), ...st};\n"
1430
+ " const src = (d.statistic && typeof d.statistic==='object') ? d.statistic : ((d.data && typeof d.data==='object') ? d.data : {});\n"
1431
+ " const lbl = d.reportName || d.buildOrder || d.name || src.name || (i+1);\n"
1432
+ " return {label: String(lbl), ...src};\n"
642
1433
  " });\n"
643
1434
  ),
644
1435
  (
@@ -679,65 +1470,181 @@ def _build_trend_viewer_html(cfg: PublishConfig) -> bytes:
679
1470
  return "".join(parts).encode("utf-8")
680
1471
 
681
1472
 
682
- # --------------------------------------------------------------------------------------
683
- # Retention cleanup & directory placeholder
684
- # --------------------------------------------------------------------------------------
685
-
1473
+ def _build_history_insights_html(cfg: PublishConfig) -> bytes:
1474
+ """Render a lightweight insights page derived from history-trend.json.
686
1475
 
687
- def cleanup_old_runs(cfg: PublishConfig, keep: int) -> None:
688
- if keep is None or keep <= 0:
689
- return
690
- s3 = _s3(cfg)
691
- root = branch_root(cfg.prefix, cfg.project, cfg.branch)
692
- # list immediate children (run prefixes)
693
- paginator = s3.get_paginator("list_objects_v2")
694
- run_prefixes: list[str] = []
695
- for page in paginator.paginate(
696
- Bucket=cfg.bucket,
697
- Prefix=f"{root}/",
698
- Delimiter="/",
699
- ):
700
- for cp in page.get("CommonPrefixes", []) or []:
701
- pfx = cp.get("Prefix")
702
- if not pfx:
703
- continue
704
- name = pfx.rsplit("/", 2)[-2]
705
- if name in {"latest", "runs"}:
706
- continue
707
- is_ts = len(name) == 15 and name[8] == "-" and name.replace("-", "").isdigit()
708
- if is_ts:
709
- run_prefixes.append(pfx)
710
- run_prefixes.sort(reverse=True)
711
- for old in run_prefixes[keep:]:
712
- delete_prefix(cfg.bucket, old, getattr(cfg, "s3_endpoint", None))
1476
+ Provides quick metrics (run count, latest pass%, failure streak, averages)
1477
+ plus a compact table of recent entries – purely client-side.
1478
+ """
1479
+ title = f"Run History Insights: {cfg.project} / {cfg.branch}"
1480
+ json_url = "../latest/history/history-trend.json"
1481
+ parts: list[str] = [
1482
+ "<!doctype html><html><head><meta charset='utf-8'>",
1483
+ f"<title>{title}</title>",
1484
+ "<style>body{font-family:system-ui;margin:1.25rem;line-height:1.4;background:#fff;color:#111;}h1{margin-top:0;font-size:1.35rem;}a{color:#2563eb;text-decoration:none;}a:hover{text-decoration:underline;}code{background:#f2f4f7;padding:2px 4px;border-radius:4px;font-size:12px;}#metrics{display:flex;flex-wrap:wrap;gap:.8rem;margin:1rem 0;}#metrics .m{flex:0 1 170px;background:#f8f9fa;border:1px solid #d0d4d9;border-radius:6px;padding:.6rem .7rem;box-shadow:0 1px 2px rgba(0,0,0,.06);}#metrics .m h3{margin:0 0 .3rem;font-size:11px;font-weight:600;letter-spacing:.5px;color:#555;text-transform:uppercase;}#metrics .m .v{font-size:20px;font-weight:600;}table{border-collapse:collapse;width:100%;max-width:1100px;}th,td{padding:.45rem .55rem;border:1px solid #ccc;font-size:12px;text-align:left;}thead th{background:#f2f4f7;}tbody tr:nth-child(even){background:#fafbfc;} .ok{color:#2e7d32;font-weight:600;} .warn{color:#f59e0b;font-weight:600;} .bad{color:#d32f2f;font-weight:600;}footer{margin-top:1.2rem;font-size:12px;color:#555;}#err{color:#d32f2f;margin-top:1rem;}@media (prefers-color-scheme:dark){body{background:#0f1115;color:#f5f6f8;}#metrics .m{background:#1b1f26;border-color:#2a313b;color:#f5f6f8;}thead th{background:#1e252d;}table,th,td{border-color:#2a313b;}code{background:#1e252d;}a{color:#3b82f6;}} .health-badge{display:inline-block;padding:2px 6px;border-radius:12px;font-size:11px;line-height:1.2;font-weight:600;border:1px solid #ccc;background:#f5f5f5;} .health-good{background:#e6f7ed;border-color:#9ad5b6;} .health-warn{background:#fff7e6;border-color:#f5c063;} .health-poor{background:#ffebe8;border-color:#f08a80;} .health-na{background:#f0f1f3;border-color:#c9ccd1;color:#666;}",
1485
+ "</style></head><body>",
1486
+ f"<h1>{title}</h1>",
1487
+ "<p>Source: <code>latest/history/history-trend.json</code> · <a href='index.html'>back to runs</a> · <a href='trend.html'>trend viewer</a> · <a href='../latest/history/history-trend.json' target='_blank' rel='noopener'>raw JSON</a></p>",
1488
+ "<div id='metrics'></div>",
1489
+ "<div style='overflow:auto'><table id='hist'><thead><tr><th>#</th><th>Label</th><th>Passed</th><th>Failed</th><th>Broken</th><th>Total</th><th>Pass%</th><th>Health</th></tr></thead><tbody></tbody></table></div>",
1490
+ "<div id='err' hidden></div>",
1491
+ "<footer id='ft'></footer>",
1492
+ "<script>\n(async function(){\n",
1493
+ f" const url = '{json_url}';\n",
1494
+ " const MET=document.getElementById('metrics');\n",
1495
+ " const TB=document.querySelector('#hist tbody');\n",
1496
+ " const ERR=document.getElementById('err');\n",
1497
+ " const FT=document.getElementById('ft');\n",
1498
+ " function pct(p,f,b){const t=(p||0)+(f||0)+(b||0);return t?((p||0)/t*100).toFixed(1)+'%':'-';}\n",
1499
+ " function classify(p,f,b){const t=(p||0)+(f||0)+(b||0);if(!t)return ['-','health-na'];if((f||0)==0&&(b||0)==0&&(p||0)/t>=0.9)return['Good','health-good'];const ratio=(p||0)/t; if(ratio>=0.75)return['Warn','health-warn'];return['Poor','health-poor'];}\n",
1500
+ " let data=null;\n",
1501
+ " try {\n",
1502
+ " const r=await fetch(url, { cache: 'no-store' });\n",
1503
+ " const ct=r.headers.get('content-type')||'';\n",
1504
+ " if(!r.ok) throw new Error('HTTP '+r.status);\n",
1505
+ " if(!ct.includes('application/json')){const txt=await r.text();throw new Error('Unexpected content-type ('+ct+'), length='+txt.length+' — are 403/404 mapped to index.html at CDN?');}\n",
1506
+ " data=await r.json();\n",
1507
+ " if(!Array.isArray(data)) throw new Error('Unexpected JSON shape');\n",
1508
+ " } catch(e) {\n",
1509
+ " ERR.textContent='Failed to load history: '+(e && e.message? e.message : String(e));ERR.hidden=false;return;\n",
1510
+ " }\n",
1511
+ " const rows=data.filter(d=>d&&typeof d==='object').map((d,i)=>{\n",
1512
+ " const st=(d.statistic&&typeof d.statistic==='object')?d.statistic:((d.data&&typeof d.data==='object')?d.data:{});\n",
1513
+ " const label=d.reportName||d.buildOrder||d.name||st.name||i+1;\n",
1514
+ " const total=typeof st.total==='number'?st.total:(st.passed||0)+(st.failed||0)+(st.broken||0);\n",
1515
+ " return {idx:i,label:String(label),passed:st.passed||0,failed:st.failed||0,broken:st.broken||0,total:total};\n",
1516
+ " });\n",
1517
+ " if(!rows.length){ERR.textContent='No usable entries.';ERR.hidden=false;return;}\n",
1518
+ " const latest=rows[rows.length-1];\n",
1519
+ " const passRates=rows.map(r=>r.total? r.passed/r.total:0);\n",
1520
+ " const avgAll=(passRates.reduce((a,b)=>a+b,0)/passRates.length*100).toFixed(1)+'%';\n",
1521
+ " const last10=passRates.slice(-10);\n",
1522
+ " const avg10=(last10.reduce((a,b)=>a+b,0)/last10.length*100).toFixed(1)+'%';\n",
1523
+ " let streak=0;\n",
1524
+ " for(let i=rows.length-1;i>=0;i--){if(rows[i].failed===0&&rows[i].broken===0)streak++;else break;}\n",
1525
+ " function card(t,v){return `<div class='m'><h3>${t}</h3><div class='v'>${v}</div></div>`;}\n",
1526
+ " const latestPct=pct(latest.passed,latest.failed,latest.broken);\n",
1527
+ " MET.innerHTML=card('Runs',rows.length)+card('Latest Pass%',latestPct)+card('Avg Pass% (all)',avgAll)+card('Avg Pass% (last10)',avg10)+card('Healthy Streak',streak)+card('Failures (latest)',latest.failed);\n",
1528
+ " rows.slice(-80).reverse().forEach(r=>{\n",
1529
+ " const pr=pct(r.passed,r.failed,r.broken);\n",
1530
+ " const [hl,cls]=classify(r.passed,r.failed,r.broken);\n",
1531
+ " TB.insertAdjacentHTML('beforeend',`<tr class='${cls}'><td>${rows.length-r.idx}</td><td>${r.label}</td><td>${r.passed}</td><td>${r.failed}</td><td>${r.broken}</td><td>${r.total}</td><td>${pr}</td><td><span class='health-badge ${cls}'>${hl}</span></td></tr>`);\n",
1532
+ " });\n",
1533
+ " FT.textContent='Entries: '+rows.length+' · Generated '+new Date().toISOString();\n",
1534
+ "})();</script>",
1535
+ "</body></html>",
1536
+ ]
1537
+ return "".join(parts).encode("utf-8")
713
1538
 
714
1539
 
715
- def _ensure_directory_placeholder(
716
- cfg: PublishConfig,
717
- index_file: Path,
718
- dir_prefix: str,
719
- ) -> None:
720
- if not index_file.exists() or not dir_prefix.endswith("/"):
721
- return
722
- body = index_file.read_bytes()
723
- extra = {"CacheControl": "no-cache", "ContentType": "text/html"}
724
- if cfg.ttl_days is not None:
725
- extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
726
- try:
727
- _s3(cfg).put_object(
728
- Bucket=cfg.bucket,
729
- Key=dir_prefix,
730
- Body=body,
731
- CacheControl=extra["CacheControl"],
732
- ContentType=extra["ContentType"],
733
- )
734
- except ClientError as e: # pragma: no cover – best effort
735
- print(f"Placeholder upload skipped: {e}")
1540
+ def _branch_health(p: int | None, f: int | None, b: int | None) -> tuple[str, str]:
1541
+ if p is None or (f is None and b is None):
1542
+ return ("-", "health-na")
1543
+ f2 = f or 0
1544
+ b2 = b or 0
1545
+ total_exec = p + f2 + b2
1546
+ if total_exec <= 0:
1547
+ return ("-", "health-na")
1548
+ ratio = p / total_exec
1549
+ if f2 == 0 and b2 == 0 and ratio >= 0.9:
1550
+ return ("Good", "health-good")
1551
+ if ratio >= 0.75:
1552
+ return ("Warn", "health-warn")
1553
+ return ("Poor", "health-poor")
1554
+
1555
+
1556
+ def _render_branch_row(br: dict) -> str:
1557
+ bname = br.get("branch", "?")
1558
+ rid = br.get("latest_run_id") or "-"
1559
+ t = br.get("time")
1560
+ passed = br.get("passed")
1561
+ failed = br.get("failed")
1562
+ broken = br.get("broken")
1563
+ total_runs = br.get("total_runs")
1564
+ latest_url = br.get("latest_url") or f"./{bname}/latest/"
1565
+ runs_url = br.get("runs_url") or f"./{bname}/runs/"
1566
+ trend_url = br.get("trend_url") or f"./{bname}/runs/trend.html"
1567
+ time_cell = _format_epoch_utc(t) if t else "-"
1568
+ pct_pass: str | None = None
1569
+ if passed is not None:
1570
+ exec_total = (passed or 0) + (failed or 0) + (broken or 0)
1571
+ if exec_total > 0:
1572
+ pct_pass = f"{(passed / exec_total) * 100:.1f}%"
1573
+ health_label, health_css = _branch_health(passed, failed, broken)
1574
+ row_classes = []
1575
+ if failed and failed > 0:
1576
+ row_classes.append("row-fail")
1577
+ if broken and broken > 0:
1578
+ row_classes.append("row-broken")
1579
+ if health_css:
1580
+ row_classes.append(health_css)
1581
+ cls_attr = f" class='{' '.join(row_classes)}'" if row_classes else ""
1582
+ return (
1583
+ f"<tr{cls_attr}>"
1584
+ f"<td class='col-branch'><code>{bname}</code></td>"
1585
+ f"<td class='col-lrid'><code>{rid}</code></td>"
1586
+ f"<td class='col-time'>{time_cell}</td>"
1587
+ f"<td class='col-passed'>{passed if passed is not None else '-'}" # noqa: E501
1588
+ f"</td><td class='col-failed'>{failed if failed is not None else '-'}" # noqa: E501
1589
+ f"</td><td class='col-broken'>{broken if broken is not None else '-'}" # noqa: E501
1590
+ f"</td><td class='col-total'>{total_runs if total_runs is not None else '-'}" # noqa: E501
1591
+ f"</td><td class='col-health'><span class='health-badge {health_css}'>{health_label}</span>" # noqa: E501
1592
+ f"</td><td class='col-passpct'>{pct_pass or '-'}" # noqa: E501
1593
+ f"</td><td class='col-links'><a href='{latest_url}'>latest</a> · "
1594
+ f"<a href='{runs_url}'>runs</a> · <a href='{trend_url}'>trend</a></td>"
1595
+ "</tr>"
1596
+ )
736
1597
 
737
1598
 
738
- # --------------------------------------------------------------------------------------
739
- # Preflight / Dry run / Publish orchestration
740
- # --------------------------------------------------------------------------------------
1599
+ def _build_branches_dashboard_html(payload: dict, cfg: PublishConfig) -> bytes:
1600
+ """Render a lightweight branches summary dashboard (schema 1)."""
1601
+ branches = payload.get("branches", [])
1602
+ title = f"Allure Branches: {payload.get('project') or cfg.project}"
1603
+ rows = [_render_branch_row(br) for br in branches]
1604
+ body_rows = (
1605
+ "\n".join(rows)
1606
+ if rows
1607
+ else "<tr><td colspan='10' style='text-align:center'>No branches yet</td></tr>"
1608
+ )
1609
+ updated = payload.get("updated")
1610
+ parts: list[str] = [
1611
+ "<!doctype html><html><head><meta charset='utf-8'>",
1612
+ f"<title>{title}</title>",
1613
+ "<style>",
1614
+ "body{font-family:system-ui;margin:1.5rem;line-height:1.4;}",
1615
+ "h1{margin-top:0;font-size:1.35rem;}",
1616
+ "table{border-collapse:collapse;width:100%;max-width:1100px;}",
1617
+ "th,td{padding:.5rem .6rem;border:1px solid #ccc;font-size:13px;}",
1618
+ "thead th{background:#f2f4f7;text-align:left;}",
1619
+ "tbody tr:nth-child(even){background:#fafbfc;}",
1620
+ "code{background:#f2f4f7;padding:2px 4px;border-radius:3px;font-size:12px;}",
1621
+ "footer{margin-top:1.5rem;font-size:12px;color:#555;}",
1622
+ "#filters{margin:.75rem 0;display:flex;gap:1rem;flex-wrap:wrap;}",
1623
+ "#filters input{padding:4px 6px;font-size:13px;}",
1624
+ ".dim{color:#666;font-size:12px;}",
1625
+ ".row-fail{background:#fff5f4 !important;}",
1626
+ ".row-broken{background:#fff9ef !important;}",
1627
+ ".health-badge{display:inline-block;padding:2px 6px;border-radius:12px;font-size:11px;line-height:1.2;font-weight:600;border:1px solid #ccc;background:#f5f5f5;}",
1628
+ ".health-good{background:#e6f7ed;border-color:#9ad5b6;}",
1629
+ ".health-warn{background:#fff7e6;border-color:#f5c063;}",
1630
+ ".health-poor{background:#ffebe8;border-color:#f08a80;}",
1631
+ ".health-na{background:#f0f1f3;border-color:#c9ccd1;color:#666;}",
1632
+ "</style></head><body>",
1633
+ f"<h1>{title}</h1>",
1634
+ "<div id='filters'><label style='font-size:13px'>Branch filter: "
1635
+ "<input id='branch-filter' type='text' placeholder='substring'></label>"
1636
+ "<span class='dim'>Shows most recently active branches first.</span></div>",
1637
+ "<table id='branches'><thead><tr><th>Branch</th><th>Latest Run</th><th>UTC</th><th>P</th><th>F</th><th>B</th><th>Total Runs</th><th>Health</th><th>%Pass</th><th>Links</th></tr></thead><tbody>", # noqa: E501
1638
+ body_rows,
1639
+ "</tbody></table>",
1640
+ (
1641
+ f"<footer>Updated: {_format_epoch_utc(updated) if updated else '-'} | "
1642
+ f"Project: {payload.get('project') or cfg.project}</footer>"
1643
+ ),
1644
+ "<script>(function(){const f=document.getElementById('branch-filter');const tbl=document.getElementById('branches');f.addEventListener('input',()=>{const q=f.value.trim().toLowerCase();[...tbl.tBodies[0].rows].forEach(r=>{if(!q){r.style.display='';return;}const name=r.querySelector('.col-branch').textContent.toLowerCase();r.style.display=name.includes(q)?'':'';});});})();</script>", # noqa: E501
1645
+ "</body></html>",
1646
+ ]
1647
+ return "".join(parts).encode("utf-8")
741
1648
 
742
1649
 
743
1650
  def preflight(
@@ -775,7 +1682,12 @@ def preflight(
775
1682
  # region detection (defensive: some stubs may return None)
776
1683
  if head:
777
1684
  bucket_region = (
778
- head.get("ResponseMetadata", {}).get("HTTPHeaders", {}).get("x-amz-bucket-region")
1685
+ head.get("ResponseMetadata", {})
1686
+ .get(
1687
+ "HTTPHeaders",
1688
+ {},
1689
+ )
1690
+ .get("x-amz-bucket-region")
779
1691
  )
780
1692
  # Attempt a small list to confirm permissions
781
1693
  s3.list_objects_v2(
@@ -813,61 +1725,135 @@ def plan_dry_run(cfg: PublishConfig, paths: Paths | None = None) -> dict:
813
1725
  )
814
1726
  else:
815
1727
  samples.append({"note": "Report missing; would run allure generate."})
816
- root = branch_root(cfg.prefix, cfg.project, cfg.branch)
817
- latest_tmp = f"{root}/latest_tmp/"
818
- mapping = {
1728
+ # Align keys with existing tests expectations
1729
+ return {
819
1730
  "bucket": cfg.bucket,
820
- "prefix": cfg.prefix,
821
- "project": cfg.project,
822
- "branch": cfg.branch,
823
- "run_id": cfg.run_id,
824
1731
  "run_prefix": cfg.s3_run_prefix,
825
- # Backwards compat: historical key name pointed to temp swap area
826
- "latest_prefix": latest_tmp,
827
- "latest_tmp_prefix": latest_tmp,
828
- "latest_final_prefix": cfg.s3_latest_prefix,
829
- }
830
- return {
831
- **mapping,
1732
+ # reflect the temporary latest staging area (two-phase)
1733
+ "latest_prefix": getattr(
1734
+ cfg,
1735
+ "s3_latest_prefix_tmp",
1736
+ cfg.s3_latest_prefix,
1737
+ ),
1738
+ "samples": samples,
832
1739
  "run_url": cfg.url_run(),
833
1740
  "latest_url": cfg.url_latest(),
834
- "context_url": getattr(cfg, "context_url", None),
835
- "metadata": cfg.metadata or {},
836
- "samples": samples,
837
- "encryption": {
838
- "sse": cfg.sse,
839
- "sse_kms_key_id": cfg.sse_kms_key_id,
840
- },
841
1741
  }
842
1742
 
843
1743
 
844
- def publish(cfg: PublishConfig, paths: Paths | None = None) -> dict:
845
- paths = paths or Paths()
846
- pull_history(cfg, paths)
847
- generate_report(paths)
848
- upload_dir(cfg, paths.report, cfg.s3_run_prefix)
849
- _ensure_directory_placeholder(cfg, paths.report / "index.html", cfg.s3_run_prefix)
850
- two_phase_update_latest(cfg, paths.report)
1744
+ def _maybe_archive_run(cfg: PublishConfig, paths: Paths) -> str | None:
1745
+ """Optionally archive the run under an archive/ prefix.
1746
+
1747
+ Controlled by cfg.archive_runs (bool). Best-effort; failures do not abort
1748
+ publish.
1749
+ Returns archive prefix if performed.
1750
+ """
1751
+ # Backward compatibility: earlier implementation mistakenly looked for
1752
+ # cfg.archive_runs (plural). The correct flag sets cfg.archive_run.
1753
+ should_archive = getattr(cfg, "archive_run", False) or getattr(cfg, "archive_runs", False)
1754
+ if not should_archive:
1755
+ return None
1756
+ import tempfile
1757
+
1758
+ archive_format = getattr(cfg, "archive_format", "tar.gz") or "tar.gz"
1759
+ run_root = paths.report
1760
+ if not run_root or not run_root.exists():
1761
+ return None
1762
+ # Destination S3 key (placed alongside run prefix root)
1763
+ # s3://bucket/<prefix>/<project>/<branch>/<run_id>/<run_id>.tar.gz
1764
+ archive_filename = f"{cfg.run_id}.{'zip' if archive_format == 'zip' else 'tar.gz'}"
1765
+ s3_key = f"{cfg.s3_run_prefix}{archive_filename}"
851
1766
  try:
852
- write_manifest(cfg, paths)
853
- except ClientError as e: # pragma: no cover – non fatal
854
- print(f"Manifest write skipped: {e}")
855
- try: # retention cleanup
856
- if getattr(cfg, "max_keep_runs", None):
857
- cleanup_old_runs(cfg, int(cfg.max_keep_runs))
1767
+ tmp_dir = tempfile.mkdtemp(prefix="allure-arch-")
1768
+ archive_path = Path(tmp_dir) / archive_filename
1769
+ if archive_format == "zip":
1770
+ import zipfile
1771
+
1772
+ with zipfile.ZipFile(archive_path, "w", compression=zipfile.ZIP_DEFLATED) as zf:
1773
+ for p in run_root.rglob("*"):
1774
+ if p.is_file():
1775
+ zf.write(p, arcname=p.relative_to(run_root).as_posix())
1776
+ else: # tar.gz
1777
+ import tarfile
1778
+
1779
+ with tarfile.open(archive_path, "w:gz") as tf:
1780
+ for p in run_root.rglob("*"):
1781
+ if p.is_file():
1782
+ tf.add(p, arcname=p.relative_to(run_root).as_posix())
1783
+ # Upload archive object
1784
+ s3 = _s3(cfg)
1785
+ extra = {
1786
+ "CacheControl": "public, max-age=31536000, immutable",
1787
+ "ContentType": "application/gzip" if archive_format != "zip" else "application/zip",
1788
+ }
1789
+ if cfg.ttl_days is not None:
1790
+ extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
1791
+ if cfg.sse:
1792
+ extra["ServerSideEncryption"] = cfg.sse
1793
+ if cfg.sse == "aws:kms" and cfg.sse_kms_key_id:
1794
+ extra["SSEKMSKeyId"] = cfg.sse_kms_key_id
1795
+ s3.upload_file(str(archive_path), cfg.bucket, s3_key, ExtraArgs=extra)
1796
+ print(f"[publish] Archived run bundle uploaded: s3://{cfg.bucket}/{s3_key}")
1797
+ return s3_key
858
1798
  except Exception as e: # pragma: no cover
859
- print(f"Cleanup skipped: {e}")
1799
+ if os.getenv("ALLURE_HOST_DEBUG"):
1800
+ print(f"[publish] archive skipped: {e}")
1801
+ return None
860
1802
 
861
- files_count = sum(1 for p in paths.report.rglob("*") if p.is_file())
862
- return {
863
- "run_url": cfg.url_run(),
864
- "latest_url": cfg.url_latest(),
865
- "bucket": cfg.bucket,
866
- "run_prefix": cfg.s3_run_prefix,
867
- "latest_prefix": cfg.s3_latest_prefix,
868
- "report_size_bytes": compute_dir_size(paths.report),
869
- "report_files": files_count,
870
- }
1803
+
1804
+ # --------------------------------------------------------------------------------------
1805
+ # Retention cleanup & directory placeholder (restored)
1806
+ # --------------------------------------------------------------------------------------
1807
+
1808
+
1809
+ def cleanup_old_runs(cfg: PublishConfig, keep: int) -> None:
1810
+ if keep is None or keep <= 0:
1811
+ return
1812
+ s3 = _s3(cfg)
1813
+ root = branch_root(cfg.prefix, cfg.project, cfg.branch)
1814
+ paginator = s3.get_paginator("list_objects_v2")
1815
+ run_prefixes: list[str] = []
1816
+ for page in paginator.paginate(
1817
+ Bucket=cfg.bucket,
1818
+ Prefix=f"{root}/",
1819
+ Delimiter="/",
1820
+ ):
1821
+ for cp in page.get("CommonPrefixes", []) or []:
1822
+ pfx = cp.get("Prefix")
1823
+ if not pfx:
1824
+ continue
1825
+ name = pfx.rsplit("/", 2)[-2]
1826
+ if name in {"latest", "runs"}:
1827
+ continue
1828
+ is_ts = len(name) == 15 and name[8] == "-" and name.replace("-", "").isdigit()
1829
+ if is_ts:
1830
+ run_prefixes.append(pfx)
1831
+ run_prefixes.sort(reverse=True)
1832
+ for old in run_prefixes[keep:]:
1833
+ delete_prefix(cfg.bucket, old, getattr(cfg, "s3_endpoint", None))
1834
+
1835
+
1836
+ def _ensure_directory_placeholder(
1837
+ cfg: PublishConfig,
1838
+ index_file: Path,
1839
+ dir_prefix: str,
1840
+ ) -> None:
1841
+ if not index_file.exists() or not dir_prefix.endswith("/"):
1842
+ return
1843
+ body = index_file.read_bytes()
1844
+ extra = {"CacheControl": "no-cache", "ContentType": "text/html"}
1845
+ if cfg.ttl_days is not None:
1846
+ extra["Tagging"] = f"ttl-days={cfg.ttl_days}"
1847
+ try:
1848
+ _s3(cfg).put_object(
1849
+ Bucket=cfg.bucket,
1850
+ Key=dir_prefix,
1851
+ Body=body,
1852
+ CacheControl=extra["CacheControl"],
1853
+ ContentType=extra["ContentType"],
1854
+ )
1855
+ except ClientError as e: # pragma: no cover
1856
+ print(f"Placeholder upload skipped: {e}")
871
1857
 
872
1858
 
873
1859
  __all__ = [