@1mancompany/onemancompany 0.7.20 → 0.7.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/pyproject.toml +1 -1
- package/src/onemancompany/agents/product_tools.py +273 -1
- package/src/onemancompany/api/routes.py +230 -0
- package/src/onemancompany/core/config.py +2 -0
- package/src/onemancompany/core/models.py +18 -0
- package/src/onemancompany/core/product.py +482 -4
- package/src/onemancompany/core/product_triggers.py +123 -1
package/package.json
CHANGED
package/pyproject.toml
CHANGED
|
@@ -11,7 +11,7 @@ from langchain_core.tools import tool
|
|
|
11
11
|
from loguru import logger
|
|
12
12
|
|
|
13
13
|
from onemancompany.core import product as prod
|
|
14
|
-
from onemancompany.core.models import IssueResolution, IssuePriority, IssueStatus
|
|
14
|
+
from onemancompany.core.models import IssueRelation, IssueResolution, IssuePriority, IssueStatus
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
# ---------------------------------------------------------------------------
|
|
@@ -21,6 +21,7 @@ from onemancompany.core.models import IssueResolution, IssuePriority, IssueStatu
|
|
|
21
21
|
_RESOLUTION_MAP = {r.value: r for r in IssueResolution}
|
|
22
22
|
_PRIORITY_MAP = {p.value: p for p in IssuePriority}
|
|
23
23
|
_STATUS_MAP = {s.value: s for s in IssueStatus}
|
|
24
|
+
_RELATION_MAP = {r.value: r for r in IssueRelation}
|
|
24
25
|
|
|
25
26
|
|
|
26
27
|
def _resolve_caller_id() -> str:
|
|
@@ -297,6 +298,270 @@ async def update_kr_progress_tool(
|
|
|
297
298
|
return f"Error: {e}"
|
|
298
299
|
|
|
299
300
|
|
|
301
|
+
# ---------------------------------------------------------------------------
|
|
302
|
+
# Sprint tools
|
|
303
|
+
# ---------------------------------------------------------------------------
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
@tool
|
|
307
|
+
async def create_sprint_tool(
|
|
308
|
+
product_slug: str,
|
|
309
|
+
name: str,
|
|
310
|
+
start_date: str,
|
|
311
|
+
end_date: str,
|
|
312
|
+
goal: str = "",
|
|
313
|
+
capacity: str = "",
|
|
314
|
+
) -> str:
|
|
315
|
+
"""Create a new sprint for a product.
|
|
316
|
+
|
|
317
|
+
Args:
|
|
318
|
+
product_slug: The product slug
|
|
319
|
+
name: Sprint name (e.g. "Sprint 3")
|
|
320
|
+
start_date: Start date in YYYY-MM-DD format
|
|
321
|
+
end_date: End date in YYYY-MM-DD format
|
|
322
|
+
goal: Sprint goal description
|
|
323
|
+
capacity: Optional capacity in story points
|
|
324
|
+
"""
|
|
325
|
+
try:
|
|
326
|
+
cap = int(capacity) if capacity else None
|
|
327
|
+
sprint = prod.create_sprint(
|
|
328
|
+
slug=product_slug,
|
|
329
|
+
name=name,
|
|
330
|
+
start_date=start_date,
|
|
331
|
+
end_date=end_date,
|
|
332
|
+
goal=goal,
|
|
333
|
+
capacity=cap,
|
|
334
|
+
)
|
|
335
|
+
logger.debug("create_sprint_tool: {} in {}", sprint["id"], product_slug)
|
|
336
|
+
return f"Created sprint '{name}' ({sprint['id']}) for {product_slug}: {start_date} → {end_date}"
|
|
337
|
+
except (ValueError, FileNotFoundError) as e:
|
|
338
|
+
return f"Error: {e}"
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
@tool
|
|
342
|
+
async def close_sprint_tool(
|
|
343
|
+
product_slug: str,
|
|
344
|
+
sprint_id: str = "",
|
|
345
|
+
) -> str:
|
|
346
|
+
"""Close the active sprint for a product. Calculates velocity, carries over unfinished issues, generates retrospective.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
product_slug: The product slug
|
|
350
|
+
sprint_id: Sprint ID to close. If empty, closes the active sprint.
|
|
351
|
+
"""
|
|
352
|
+
try:
|
|
353
|
+
if not sprint_id:
|
|
354
|
+
active = prod.get_active_sprint(product_slug)
|
|
355
|
+
if not active:
|
|
356
|
+
return f"No active sprint found for {product_slug}"
|
|
357
|
+
sprint_id = active["id"]
|
|
358
|
+
result = prod.close_sprint(product_slug, sprint_id)
|
|
359
|
+
vel = result.get("velocity", 0)
|
|
360
|
+
rate = result.get("completion_rate", 0)
|
|
361
|
+
carry = result.get("carry_over_count", 0)
|
|
362
|
+
logger.debug("close_sprint_tool: {} closed — vel={}", sprint_id, vel)
|
|
363
|
+
return (
|
|
364
|
+
f"Sprint closed: velocity={vel} pts, completion={rate}%, "
|
|
365
|
+
f"carry_over={carry} issues\n\n{result.get('retrospective', '')}"
|
|
366
|
+
)
|
|
367
|
+
except (ValueError, FileNotFoundError) as e:
|
|
368
|
+
return f"Error: {e}"
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
@tool
|
|
372
|
+
async def get_sprint_info_tool(
|
|
373
|
+
product_slug: str,
|
|
374
|
+
sprint_id: str = "",
|
|
375
|
+
) -> str:
|
|
376
|
+
"""Get sprint information. Defaults to the active sprint if no ID given.
|
|
377
|
+
|
|
378
|
+
Args:
|
|
379
|
+
product_slug: The product slug
|
|
380
|
+
sprint_id: Sprint ID. If empty, returns the active sprint.
|
|
381
|
+
"""
|
|
382
|
+
try:
|
|
383
|
+
if sprint_id:
|
|
384
|
+
sprint = prod.load_sprint(product_slug, sprint_id)
|
|
385
|
+
else:
|
|
386
|
+
sprint = prod.get_active_sprint(product_slug)
|
|
387
|
+
|
|
388
|
+
if not sprint:
|
|
389
|
+
# List all sprints as fallback
|
|
390
|
+
all_sprints = prod.list_sprints(product_slug)
|
|
391
|
+
if not all_sprints:
|
|
392
|
+
return f"No sprints found for {product_slug}"
|
|
393
|
+
lines = [f"No active sprint. All sprints for {product_slug}:"]
|
|
394
|
+
for s in all_sprints:
|
|
395
|
+
lines.append(f"- [{s['status']}] {s['name']} ({s['id']}) {s['start_date']}→{s['end_date']}")
|
|
396
|
+
return "\n".join(lines)
|
|
397
|
+
|
|
398
|
+
# Show sprint details
|
|
399
|
+
issues = prod.list_issues(product_slug, sprint=sprint["id"])
|
|
400
|
+
done = [i for i in issues if i.get("status") in ("done", "released")]
|
|
401
|
+
vel = sum(i.get("story_points") or 0 for i in done)
|
|
402
|
+
total_pts = sum(i.get("story_points") or 0 for i in issues)
|
|
403
|
+
|
|
404
|
+
lines = [
|
|
405
|
+
f"**{sprint['name']}** ({sprint['id']})",
|
|
406
|
+
f"Status: {sprint['status']}",
|
|
407
|
+
f"Goal: {sprint.get('goal') or 'N/A'}",
|
|
408
|
+
f"Period: {sprint['start_date']} → {sprint['end_date']}",
|
|
409
|
+
f"Issues: {len(done)}/{len(issues)} done",
|
|
410
|
+
f"Points: {vel}/{total_pts}",
|
|
411
|
+
]
|
|
412
|
+
if sprint.get("capacity"):
|
|
413
|
+
lines.append(f"Capacity: {sprint['capacity']} pts")
|
|
414
|
+
|
|
415
|
+
suggestion = prod.suggest_capacity(product_slug)
|
|
416
|
+
if suggestion is not None:
|
|
417
|
+
lines.append(f"Suggested capacity (avg last 3): {suggestion} pts")
|
|
418
|
+
|
|
419
|
+
return "\n".join(lines)
|
|
420
|
+
except (ValueError, FileNotFoundError) as e:
|
|
421
|
+
return f"Error: {e}"
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
# ---------------------------------------------------------------------------
|
|
425
|
+
# Issue link tools
|
|
426
|
+
# ---------------------------------------------------------------------------
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
@tool
|
|
430
|
+
async def link_issues_tool(
|
|
431
|
+
product_slug: str,
|
|
432
|
+
issue_id: str,
|
|
433
|
+
target_id: str,
|
|
434
|
+
relation: str,
|
|
435
|
+
) -> str:
|
|
436
|
+
"""Link two issues with a dependency or relation.
|
|
437
|
+
|
|
438
|
+
Args:
|
|
439
|
+
product_slug: The product slug
|
|
440
|
+
issue_id: Source issue ID
|
|
441
|
+
target_id: Target issue ID
|
|
442
|
+
relation: blocks, blocked_by, or relates_to
|
|
443
|
+
"""
|
|
444
|
+
rel = _RELATION_MAP.get(relation)
|
|
445
|
+
if rel is None:
|
|
446
|
+
return f"Error: invalid relation '{relation}'. Must be one of: {', '.join(_RELATION_MAP)}"
|
|
447
|
+
try:
|
|
448
|
+
prod.add_issue_link(product_slug, issue_id, target_id, rel)
|
|
449
|
+
logger.debug("link_issues_tool: {} —{}→ {}", issue_id, relation, target_id)
|
|
450
|
+
return f"Linked {issue_id} —{relation}→ {target_id}"
|
|
451
|
+
except ValueError as e:
|
|
452
|
+
return f"Error: {e}"
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
@tool
|
|
456
|
+
async def unlink_issues_tool(
|
|
457
|
+
product_slug: str,
|
|
458
|
+
issue_id: str,
|
|
459
|
+
target_id: str,
|
|
460
|
+
) -> str:
|
|
461
|
+
"""Remove all links between two issues.
|
|
462
|
+
|
|
463
|
+
Args:
|
|
464
|
+
product_slug: The product slug
|
|
465
|
+
issue_id: First issue ID
|
|
466
|
+
target_id: Second issue ID
|
|
467
|
+
"""
|
|
468
|
+
prod.remove_issue_link(product_slug, issue_id, target_id)
|
|
469
|
+
logger.debug("unlink_issues_tool: {} ↔ {}", issue_id, target_id)
|
|
470
|
+
return f"Unlinked {issue_id} ↔ {target_id}"
|
|
471
|
+
|
|
472
|
+
|
|
473
|
+
@tool
|
|
474
|
+
async def check_blocked_issues_tool(
|
|
475
|
+
product_slug: str,
|
|
476
|
+
) -> str:
|
|
477
|
+
"""List all issues that are currently blocked by unfinished dependencies.
|
|
478
|
+
|
|
479
|
+
Args:
|
|
480
|
+
product_slug: The product slug
|
|
481
|
+
"""
|
|
482
|
+
all_issues = prod.list_issues(product_slug)
|
|
483
|
+
blocked = []
|
|
484
|
+
for issue in all_issues:
|
|
485
|
+
if issue.get("status") in (IssueStatus.DONE.value, IssueStatus.RELEASED.value):
|
|
486
|
+
continue
|
|
487
|
+
if prod.is_blocked(product_slug, issue["id"]):
|
|
488
|
+
blockers = [
|
|
489
|
+
l["issue_id"] for l in issue.get("issue_links", [])
|
|
490
|
+
if l["relation"] == IssueRelation.BLOCKED_BY.value
|
|
491
|
+
]
|
|
492
|
+
blocked.append(f"- [{issue.get('priority', '?')}] {issue['title']} ({issue['id']}) blocked by: {', '.join(blockers)}")
|
|
493
|
+
if not blocked:
|
|
494
|
+
return "No blocked issues found"
|
|
495
|
+
return f"Blocked issues ({len(blocked)}):\n" + "\n".join(blocked)
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
@tool
|
|
499
|
+
async def manage_review_tool(
|
|
500
|
+
product_slug: str,
|
|
501
|
+
action: str,
|
|
502
|
+
review_id: str = "",
|
|
503
|
+
item_key: str = "",
|
|
504
|
+
checked: str = "",
|
|
505
|
+
) -> str:
|
|
506
|
+
"""Manage product review checklists: list, view, check items, or complete.
|
|
507
|
+
|
|
508
|
+
Args:
|
|
509
|
+
product_slug: The product slug
|
|
510
|
+
action: list, view, check, uncheck, or complete
|
|
511
|
+
review_id: Review ID (required for view/check/uncheck/complete)
|
|
512
|
+
item_key: Checklist item key (required for check/uncheck)
|
|
513
|
+
checked: 'true' or 'false' (for check/uncheck, overrides action)
|
|
514
|
+
"""
|
|
515
|
+
try:
|
|
516
|
+
if action == "list":
|
|
517
|
+
reviews = prod.list_reviews(product_slug)
|
|
518
|
+
if not reviews:
|
|
519
|
+
return "No reviews found"
|
|
520
|
+
lines = []
|
|
521
|
+
for r in reviews:
|
|
522
|
+
checked_count = sum(1 for i in r.get("items", []) if i.get("checked"))
|
|
523
|
+
total = len(r.get("items", []))
|
|
524
|
+
lines.append(f"- [{r['status']}] {r['id']} ({r['trigger']}) {checked_count}/{total} items checked")
|
|
525
|
+
return "\n".join(lines)
|
|
526
|
+
|
|
527
|
+
if not review_id:
|
|
528
|
+
return "Error: review_id is required for this action"
|
|
529
|
+
|
|
530
|
+
if action == "view":
|
|
531
|
+
review = prod.load_review(product_slug, review_id)
|
|
532
|
+
if not review:
|
|
533
|
+
return f"Review '{review_id}' not found"
|
|
534
|
+
lines = [
|
|
535
|
+
f"**Review {review['id']}**",
|
|
536
|
+
f"Status: {review['status']}",
|
|
537
|
+
f"Trigger: {review['trigger']} ({review.get('trigger_ref', '')})",
|
|
538
|
+
f"Owner: {review['owner']}",
|
|
539
|
+
"",
|
|
540
|
+
"Checklist:",
|
|
541
|
+
]
|
|
542
|
+
for item in review.get("items", []):
|
|
543
|
+
mark = "✓" if item.get("checked") else "○"
|
|
544
|
+
lines.append(f" {mark} [{item['key']}] {item['label']}")
|
|
545
|
+
return "\n".join(lines)
|
|
546
|
+
|
|
547
|
+
if action in ("check", "uncheck"):
|
|
548
|
+
if not item_key:
|
|
549
|
+
return "Error: item_key is required for check/uncheck"
|
|
550
|
+
is_checked = action == "check"
|
|
551
|
+
if checked:
|
|
552
|
+
is_checked = checked.lower() == "true"
|
|
553
|
+
prod.update_review_item(product_slug, review_id, item_key, checked=is_checked)
|
|
554
|
+
return f"{'Checked' if is_checked else 'Unchecked'} item '{item_key}' in review {review_id}"
|
|
555
|
+
|
|
556
|
+
if action == "complete":
|
|
557
|
+
review = prod.complete_review(product_slug, review_id)
|
|
558
|
+
return f"Review {review_id} completed at {review['completed_at']}"
|
|
559
|
+
|
|
560
|
+
return f"Error: unknown action '{action}'. Use: list, view, check, uncheck, complete"
|
|
561
|
+
except ValueError as e:
|
|
562
|
+
return f"Error: {e}"
|
|
563
|
+
|
|
564
|
+
|
|
300
565
|
# ---------------------------------------------------------------------------
|
|
301
566
|
# Export
|
|
302
567
|
# ---------------------------------------------------------------------------
|
|
@@ -309,4 +574,11 @@ PRODUCT_TOOLS = [
|
|
|
309
574
|
get_product_context_tool,
|
|
310
575
|
list_product_issues_tool,
|
|
311
576
|
update_kr_progress_tool,
|
|
577
|
+
create_sprint_tool,
|
|
578
|
+
close_sprint_tool,
|
|
579
|
+
get_sprint_info_tool,
|
|
580
|
+
link_issues_tool,
|
|
581
|
+
unlink_issues_tool,
|
|
582
|
+
check_blocked_issues_tool,
|
|
583
|
+
manage_review_tool,
|
|
312
584
|
]
|
|
@@ -7331,11 +7331,27 @@ async def api_product_detail(slug: str) -> dict:
|
|
|
7331
7331
|
all_projects = list_projects()
|
|
7332
7332
|
linked_projects = [p for p in all_projects if p.get("product_id") == product.get("id")]
|
|
7333
7333
|
|
|
7334
|
+
# Sprints
|
|
7335
|
+
sprints = prod.list_sprints(slug)
|
|
7336
|
+
active_sprint = prod.get_active_sprint(slug)
|
|
7337
|
+
suggested_capacity = prod.suggest_capacity(slug)
|
|
7338
|
+
|
|
7339
|
+
# Reviews
|
|
7340
|
+
reviews = prod.list_reviews(slug)
|
|
7341
|
+
|
|
7342
|
+
# Blocked issues count
|
|
7343
|
+
blocked_count = sum(1 for i in issues if prod.is_blocked(slug, i["id"]))
|
|
7344
|
+
|
|
7334
7345
|
return {
|
|
7335
7346
|
"product": product,
|
|
7336
7347
|
"issues": issues,
|
|
7337
7348
|
"versions": versions,
|
|
7338
7349
|
"projects": linked_projects,
|
|
7350
|
+
"sprints": sprints,
|
|
7351
|
+
"active_sprint": active_sprint,
|
|
7352
|
+
"suggested_capacity": suggested_capacity,
|
|
7353
|
+
"reviews": reviews,
|
|
7354
|
+
"blocked_issues_count": blocked_count,
|
|
7339
7355
|
}
|
|
7340
7356
|
|
|
7341
7357
|
|
|
@@ -7415,3 +7431,217 @@ async def api_start_product_planning(slug: str) -> dict:
|
|
|
7415
7431
|
product_id=product["id"],
|
|
7416
7432
|
)
|
|
7417
7433
|
return {"conversation_id": conv.id, "existing": False}
|
|
7434
|
+
|
|
7435
|
+
|
|
7436
|
+
# ── Sprints ──────────────────────────────────────────────────────────────────
|
|
7437
|
+
|
|
7438
|
+
|
|
7439
|
+
@router.post("/api/product/{slug}/sprint")
|
|
7440
|
+
async def api_create_sprint(slug: str, request: Request) -> dict:
|
|
7441
|
+
"""Create a sprint for a product."""
|
|
7442
|
+
from onemancompany.core import product as prod
|
|
7443
|
+
|
|
7444
|
+
body = await request.json()
|
|
7445
|
+
name = body.get("name")
|
|
7446
|
+
start_date = body.get("start_date")
|
|
7447
|
+
end_date = body.get("end_date")
|
|
7448
|
+
if not name or not start_date or not end_date:
|
|
7449
|
+
raise HTTPException(status_code=400, detail="Missing required fields: name, start_date, end_date")
|
|
7450
|
+
try:
|
|
7451
|
+
result = prod.create_sprint(
|
|
7452
|
+
slug=slug,
|
|
7453
|
+
name=name,
|
|
7454
|
+
start_date=start_date,
|
|
7455
|
+
end_date=end_date,
|
|
7456
|
+
goal=body.get("goal", ""),
|
|
7457
|
+
capacity=int(body["capacity"]) if body.get("capacity") else None,
|
|
7458
|
+
)
|
|
7459
|
+
except ValueError as exc:
|
|
7460
|
+
raise HTTPException(status_code=404, detail=str(exc))
|
|
7461
|
+
return result
|
|
7462
|
+
|
|
7463
|
+
|
|
7464
|
+
@router.get("/api/product/{slug}/sprints")
|
|
7465
|
+
async def api_list_sprints(slug: str, status: str = "") -> list[dict]:
|
|
7466
|
+
"""List sprints for a product, optionally filtered by status."""
|
|
7467
|
+
from onemancompany.core import product as prod
|
|
7468
|
+
|
|
7469
|
+
return prod.list_sprints(slug, status=status or None)
|
|
7470
|
+
|
|
7471
|
+
|
|
7472
|
+
@router.get("/api/product/{slug}/sprint/{sprint_id}")
|
|
7473
|
+
async def api_get_sprint(slug: str, sprint_id: str) -> dict:
|
|
7474
|
+
"""Get a single sprint by ID."""
|
|
7475
|
+
from onemancompany.core import product as prod
|
|
7476
|
+
|
|
7477
|
+
sprint = prod.load_sprint(slug, sprint_id)
|
|
7478
|
+
if not sprint:
|
|
7479
|
+
raise HTTPException(status_code=404, detail=f"Sprint '{sprint_id}' not found")
|
|
7480
|
+
return sprint
|
|
7481
|
+
|
|
7482
|
+
|
|
7483
|
+
@router.put("/api/product/{slug}/sprint/{sprint_id}")
|
|
7484
|
+
async def api_update_sprint(slug: str, sprint_id: str, request: Request) -> dict:
|
|
7485
|
+
"""Update sprint fields (name, goal, start_date, end_date, capacity, status)."""
|
|
7486
|
+
from onemancompany.core import product as prod
|
|
7487
|
+
|
|
7488
|
+
body = await request.json()
|
|
7489
|
+
SPRINT_MUTABLE_FIELDS = {"name", "goal", "start_date", "end_date", "capacity", "status"}
|
|
7490
|
+
filtered = {k: v for k, v in body.items() if k in SPRINT_MUTABLE_FIELDS}
|
|
7491
|
+
if "capacity" in filtered and filtered["capacity"] is not None:
|
|
7492
|
+
filtered["capacity"] = int(filtered["capacity"])
|
|
7493
|
+
try:
|
|
7494
|
+
result = prod.update_sprint(slug, sprint_id, **filtered)
|
|
7495
|
+
except ValueError as exc:
|
|
7496
|
+
raise HTTPException(status_code=400, detail=str(exc))
|
|
7497
|
+
return result
|
|
7498
|
+
|
|
7499
|
+
|
|
7500
|
+
@router.post("/api/product/{slug}/sprint/{sprint_id}/close")
|
|
7501
|
+
async def api_close_sprint(slug: str, sprint_id: str) -> dict:
|
|
7502
|
+
"""Close a sprint: calculate velocity, carry over unfinished issues, generate retrospective."""
|
|
7503
|
+
from onemancompany.core import product as prod
|
|
7504
|
+
|
|
7505
|
+
try:
|
|
7506
|
+
result = prod.close_sprint(slug, sprint_id)
|
|
7507
|
+
except ValueError as exc:
|
|
7508
|
+
raise HTTPException(status_code=400, detail=str(exc))
|
|
7509
|
+
return result
|
|
7510
|
+
|
|
7511
|
+
|
|
7512
|
+
@router.get("/api/product/{slug}/sprint/suggest-capacity")
|
|
7513
|
+
async def api_suggest_sprint_capacity(slug: str) -> dict:
|
|
7514
|
+
"""Suggest sprint capacity based on historical velocity (sliding average of last 3)."""
|
|
7515
|
+
from onemancompany.core import product as prod
|
|
7516
|
+
|
|
7517
|
+
suggestion = prod.suggest_capacity(slug)
|
|
7518
|
+
return {"suggested_capacity": suggestion}
|
|
7519
|
+
|
|
7520
|
+
|
|
7521
|
+
# ---------------------------------------------------------------------------
|
|
7522
|
+
# Issue Links
|
|
7523
|
+
# ---------------------------------------------------------------------------
|
|
7524
|
+
|
|
7525
|
+
|
|
7526
|
+
@router.post("/api/product/{slug}/issue/{issue_id}/link")
|
|
7527
|
+
async def api_add_issue_link(slug: str, issue_id: str, request: Request) -> dict:
|
|
7528
|
+
"""Add a link between two issues."""
|
|
7529
|
+
from onemancompany.core import product as prod
|
|
7530
|
+
from onemancompany.core.models import IssueRelation
|
|
7531
|
+
|
|
7532
|
+
body = await request.json()
|
|
7533
|
+
target_id = body.get("target_id", "")
|
|
7534
|
+
relation = body.get("relation", "")
|
|
7535
|
+
|
|
7536
|
+
if not target_id or not relation:
|
|
7537
|
+
raise HTTPException(status_code=400, detail="target_id and relation are required")
|
|
7538
|
+
|
|
7539
|
+
rel_map = {r.value: r for r in IssueRelation}
|
|
7540
|
+
rel = rel_map.get(relation)
|
|
7541
|
+
if not rel:
|
|
7542
|
+
raise HTTPException(status_code=400, detail=f"Invalid relation. Must be one of: {', '.join(rel_map)}")
|
|
7543
|
+
|
|
7544
|
+
try:
|
|
7545
|
+
prod.add_issue_link(slug, issue_id, target_id, rel)
|
|
7546
|
+
except ValueError as exc:
|
|
7547
|
+
raise HTTPException(status_code=400, detail=str(exc))
|
|
7548
|
+
|
|
7549
|
+
return {"linked": True, "issue_id": issue_id, "target_id": target_id, "relation": relation}
|
|
7550
|
+
|
|
7551
|
+
|
|
7552
|
+
@router.delete("/api/product/{slug}/issue/{issue_id}/link/{target_id}")
|
|
7553
|
+
async def api_remove_issue_link(slug: str, issue_id: str, target_id: str) -> dict:
|
|
7554
|
+
"""Remove all links between two issues."""
|
|
7555
|
+
from onemancompany.core import product as prod
|
|
7556
|
+
|
|
7557
|
+
prod.remove_issue_link(slug, issue_id, target_id)
|
|
7558
|
+
return {"unlinked": True, "issue_id": issue_id, "target_id": target_id}
|
|
7559
|
+
|
|
7560
|
+
|
|
7561
|
+
@router.get("/api/product/{slug}/issue/{issue_id}/links")
|
|
7562
|
+
async def api_get_issue_links(slug: str, issue_id: str) -> list[dict]:
|
|
7563
|
+
"""Get all links for an issue."""
|
|
7564
|
+
from onemancompany.core import product as prod
|
|
7565
|
+
|
|
7566
|
+
return prod.get_issue_links(slug, issue_id)
|
|
7567
|
+
|
|
7568
|
+
|
|
7569
|
+
@router.get("/api/product/{slug}/blocked-issues")
|
|
7570
|
+
async def api_blocked_issues(slug: str) -> list[dict]:
|
|
7571
|
+
"""List all blocked issues for a product."""
|
|
7572
|
+
from onemancompany.core import product as prod
|
|
7573
|
+
|
|
7574
|
+
all_issues = prod.list_issues(slug)
|
|
7575
|
+
blocked = []
|
|
7576
|
+
for issue in all_issues:
|
|
7577
|
+
if prod.is_blocked(slug, issue["id"]):
|
|
7578
|
+
blocked.append(issue)
|
|
7579
|
+
return blocked
|
|
7580
|
+
|
|
7581
|
+
|
|
7582
|
+
# ---------------------------------------------------------------------------
|
|
7583
|
+
# Reviews
|
|
7584
|
+
# ---------------------------------------------------------------------------
|
|
7585
|
+
|
|
7586
|
+
|
|
7587
|
+
@router.post("/api/product/{slug}/review")
|
|
7588
|
+
async def api_create_review(slug: str, request: Request) -> dict:
|
|
7589
|
+
"""Create a review checklist."""
|
|
7590
|
+
from onemancompany.core import product as prod
|
|
7591
|
+
|
|
7592
|
+
body = await request.json()
|
|
7593
|
+
trigger = body.get("trigger", "manual")
|
|
7594
|
+
trigger_ref = body.get("trigger_ref", "")
|
|
7595
|
+
owner = body.get("owner", "")
|
|
7596
|
+
|
|
7597
|
+
review = prod.create_review(
|
|
7598
|
+
slug=slug,
|
|
7599
|
+
trigger=trigger,
|
|
7600
|
+
trigger_ref=trigger_ref,
|
|
7601
|
+
owner=owner,
|
|
7602
|
+
)
|
|
7603
|
+
return review
|
|
7604
|
+
|
|
7605
|
+
|
|
7606
|
+
@router.get("/api/product/{slug}/reviews")
|
|
7607
|
+
async def api_list_reviews(slug: str, status: str = "") -> list[dict]:
|
|
7608
|
+
"""List reviews for a product, optionally filtered by status."""
|
|
7609
|
+
from onemancompany.core import product as prod
|
|
7610
|
+
|
|
7611
|
+
return prod.list_reviews(slug, status=status or None)
|
|
7612
|
+
|
|
7613
|
+
|
|
7614
|
+
@router.get("/api/product/{slug}/review/{review_id}")
|
|
7615
|
+
async def api_get_review(slug: str, review_id: str) -> dict:
|
|
7616
|
+
"""Get a single review."""
|
|
7617
|
+
from onemancompany.core import product as prod
|
|
7618
|
+
|
|
7619
|
+
review = prod.load_review(slug, review_id)
|
|
7620
|
+
if not review:
|
|
7621
|
+
raise HTTPException(status_code=404, detail=f"Review '{review_id}' not found")
|
|
7622
|
+
return review
|
|
7623
|
+
|
|
7624
|
+
|
|
7625
|
+
@router.put("/api/product/{slug}/review/{review_id}/item/{item_key}")
|
|
7626
|
+
async def api_update_review_item(slug: str, review_id: str, item_key: str, request: Request) -> dict:
|
|
7627
|
+
"""Check or uncheck a review checklist item."""
|
|
7628
|
+
from onemancompany.core import product as prod
|
|
7629
|
+
|
|
7630
|
+
body = await request.json()
|
|
7631
|
+
checked = body.get("checked", False)
|
|
7632
|
+
|
|
7633
|
+
try:
|
|
7634
|
+
return prod.update_review_item(slug, review_id, item_key, checked=checked)
|
|
7635
|
+
except ValueError as exc:
|
|
7636
|
+
raise HTTPException(status_code=400, detail=str(exc))
|
|
7637
|
+
|
|
7638
|
+
|
|
7639
|
+
@router.post("/api/product/{slug}/review/{review_id}/complete")
|
|
7640
|
+
async def api_complete_review(slug: str, review_id: str) -> dict:
|
|
7641
|
+
"""Complete a review (all items must be checked)."""
|
|
7642
|
+
from onemancompany.core import product as prod
|
|
7643
|
+
|
|
7644
|
+
try:
|
|
7645
|
+
return prod.complete_review(slug, review_id)
|
|
7646
|
+
except ValueError as exc:
|
|
7647
|
+
raise HTTPException(status_code=400, detail=str(exc))
|
|
@@ -61,6 +61,8 @@ VESSEL_YAML_FILENAME = "vessel.yaml"
|
|
|
61
61
|
PRODUCT_YAML_FILENAME = "product.yaml"
|
|
62
62
|
ISSUES_DIR_NAME = "issues"
|
|
63
63
|
VERSIONS_DIR_NAME = "versions"
|
|
64
|
+
SPRINTS_DIR_NAME = "sprints"
|
|
65
|
+
REVIEWS_DIR_NAME = "reviews"
|
|
64
66
|
TALENT_PERSONA_FILENAME = "talent_persona.md"
|
|
65
67
|
MCP_CONFIG_FILENAME = "mcp_config.json"
|
|
66
68
|
CONVERSATIONS_DIR_NAME = "conversations"
|
|
@@ -162,6 +162,10 @@ class EventType(str, Enum):
|
|
|
162
162
|
ISSUE_ASSIGNED = "issue_assigned"
|
|
163
163
|
KR_UPDATED = "kr_updated"
|
|
164
164
|
VERSION_RELEASED = "version_released"
|
|
165
|
+
SPRINT_CREATED = "sprint_created"
|
|
166
|
+
SPRINT_CLOSED = "sprint_closed"
|
|
167
|
+
REVIEW_CREATED = "review_created"
|
|
168
|
+
REVIEW_COMPLETED = "review_completed"
|
|
165
169
|
|
|
166
170
|
|
|
167
171
|
class ProductStatus(str, Enum):
|
|
@@ -197,6 +201,20 @@ class IssueResolution(str, Enum):
|
|
|
197
201
|
BY_DESIGN = "by_design"
|
|
198
202
|
|
|
199
203
|
|
|
204
|
+
class IssueRelation(str, Enum):
|
|
205
|
+
"""Relationship type between two issues."""
|
|
206
|
+
BLOCKS = "blocks"
|
|
207
|
+
BLOCKED_BY = "blocked_by"
|
|
208
|
+
RELATES_TO = "relates_to"
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
class SprintStatus(str, Enum):
|
|
212
|
+
"""Sprint lifecycle status."""
|
|
213
|
+
PLANNING = "planning"
|
|
214
|
+
ACTIVE = "active"
|
|
215
|
+
CLOSED = "closed"
|
|
216
|
+
|
|
217
|
+
|
|
200
218
|
# ---------------------------------------------------------------------------
|
|
201
219
|
# Performance
|
|
202
220
|
# ---------------------------------------------------------------------------
|
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
"""Product management — product records, key results, and
|
|
1
|
+
"""Product management — product records, key results, issues, and sprints.
|
|
2
2
|
|
|
3
3
|
Products stored at: PRODUCTS_DIR/{slug}/product.yaml
|
|
4
4
|
Issues stored at: PRODUCTS_DIR/{slug}/issues/{issue_id}.yaml
|
|
5
|
+
Sprints stored at: PRODUCTS_DIR/{slug}/sprints/{sprint_id}.yaml
|
|
5
6
|
|
|
6
7
|
All YAML I/O through store._read_yaml / _write_yaml.
|
|
7
8
|
Disk is the single source of truth — no in-memory caching.
|
|
@@ -20,14 +21,18 @@ from onemancompany.core.config import (
|
|
|
20
21
|
ISSUES_DIR_NAME,
|
|
21
22
|
PRODUCT_YAML_FILENAME,
|
|
22
23
|
PRODUCTS_DIR,
|
|
24
|
+
REVIEWS_DIR_NAME,
|
|
25
|
+
SPRINTS_DIR_NAME,
|
|
23
26
|
VERSIONS_DIR_NAME,
|
|
24
27
|
DirtyCategory,
|
|
25
28
|
)
|
|
26
29
|
from onemancompany.core.models import (
|
|
30
|
+
IssueRelation,
|
|
27
31
|
IssueResolution,
|
|
28
32
|
IssuePriority,
|
|
29
33
|
IssueStatus,
|
|
30
34
|
ProductStatus,
|
|
35
|
+
SprintStatus,
|
|
31
36
|
)
|
|
32
37
|
from onemancompany.core.store import _read_yaml, _write_yaml, mark_dirty
|
|
33
38
|
|
|
@@ -296,7 +301,7 @@ def create_issue(
|
|
|
296
301
|
"labels": labels or [],
|
|
297
302
|
"assignee_id": assignee_id,
|
|
298
303
|
"linked_task_ids": [],
|
|
299
|
-
"
|
|
304
|
+
"issue_links": [],
|
|
300
305
|
"milestone_version": milestone_version,
|
|
301
306
|
"created_at": now,
|
|
302
307
|
"created_by": created_by,
|
|
@@ -318,10 +323,26 @@ def create_issue(
|
|
|
318
323
|
|
|
319
324
|
|
|
320
325
|
def load_issue(slug: str, issue_id: str) -> dict | None:
|
|
321
|
-
"""Load a single issue by ID. Returns None if not found.
|
|
326
|
+
"""Load a single issue by ID. Returns None if not found.
|
|
327
|
+
|
|
328
|
+
Auto-migrates old ``linked_issue_ids`` format to ``issue_links``.
|
|
329
|
+
"""
|
|
322
330
|
path = _issues_dir(slug) / f"{issue_id}.yaml"
|
|
323
331
|
data = _read_yaml(path)
|
|
324
|
-
|
|
332
|
+
if not data:
|
|
333
|
+
return None
|
|
334
|
+
|
|
335
|
+
# Auto-migrate: linked_issue_ids → issue_links
|
|
336
|
+
if "linked_issue_ids" in data and "issue_links" not in data:
|
|
337
|
+
old_ids = data.pop("linked_issue_ids", [])
|
|
338
|
+
data["issue_links"] = [
|
|
339
|
+
{"issue_id": iid, "relation": IssueRelation.RELATES_TO.value}
|
|
340
|
+
for iid in old_ids
|
|
341
|
+
]
|
|
342
|
+
_write_yaml(path, data)
|
|
343
|
+
logger.debug("Migrated linked_issue_ids → issue_links for {}", issue_id)
|
|
344
|
+
|
|
345
|
+
return data
|
|
325
346
|
|
|
326
347
|
|
|
327
348
|
def list_issues(
|
|
@@ -330,6 +351,7 @@ def list_issues(
|
|
|
330
351
|
status: IssueStatus | None = None,
|
|
331
352
|
priority: IssuePriority | None = None,
|
|
332
353
|
labels: list[str] | None = None,
|
|
354
|
+
sprint: str | None = None,
|
|
333
355
|
) -> list[dict]:
|
|
334
356
|
"""List issues for a product, optionally filtered."""
|
|
335
357
|
issues_path = _issues_dir(slug)
|
|
@@ -351,6 +373,8 @@ def list_issues(
|
|
|
351
373
|
issue_labels = set(data.get("labels", []))
|
|
352
374
|
if not set(labels).intersection(issue_labels):
|
|
353
375
|
continue
|
|
376
|
+
if sprint is not None and data.get("sprint") != sprint:
|
|
377
|
+
continue
|
|
354
378
|
results.append(data)
|
|
355
379
|
return results
|
|
356
380
|
|
|
@@ -418,6 +442,226 @@ def reopen_issue(slug: str, issue_id: str) -> dict | None:
|
|
|
418
442
|
return data
|
|
419
443
|
|
|
420
444
|
|
|
445
|
+
# ---------------------------------------------------------------------------
|
|
446
|
+
# Issue Links
|
|
447
|
+
# ---------------------------------------------------------------------------
|
|
448
|
+
|
|
449
|
+
_REVERSE_RELATION = {
|
|
450
|
+
IssueRelation.BLOCKS.value: IssueRelation.BLOCKED_BY.value,
|
|
451
|
+
IssueRelation.BLOCKED_BY.value: IssueRelation.BLOCKS.value,
|
|
452
|
+
IssueRelation.RELATES_TO.value: IssueRelation.RELATES_TO.value,
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
def add_issue_link(
|
|
457
|
+
slug: str,
|
|
458
|
+
issue_id: str,
|
|
459
|
+
target_id: str,
|
|
460
|
+
relation: IssueRelation,
|
|
461
|
+
) -> None:
|
|
462
|
+
"""Add a bidirectional link between two issues.
|
|
463
|
+
|
|
464
|
+
Raises ValueError on self-reference or if either issue is not found.
|
|
465
|
+
Idempotent — re-adding the same link is a no-op.
|
|
466
|
+
"""
|
|
467
|
+
if issue_id == target_id:
|
|
468
|
+
raise ValueError("Cannot link an issue to itself (self-reference)")
|
|
469
|
+
|
|
470
|
+
issue = load_issue(slug, issue_id)
|
|
471
|
+
if not issue:
|
|
472
|
+
raise ValueError(f"Issue '{issue_id}' not found in '{slug}'")
|
|
473
|
+
target = load_issue(slug, target_id)
|
|
474
|
+
if not target:
|
|
475
|
+
raise ValueError(f"Issue '{target_id}' not found in '{slug}'")
|
|
476
|
+
|
|
477
|
+
rel_value = relation.value if hasattr(relation, "value") else relation
|
|
478
|
+
reverse_rel = _REVERSE_RELATION[rel_value]
|
|
479
|
+
|
|
480
|
+
# Add forward link (idempotent)
|
|
481
|
+
_add_link_entry(slug, issue_id, target_id, rel_value)
|
|
482
|
+
# Add reverse link
|
|
483
|
+
_add_link_entry(slug, target_id, issue_id, reverse_rel)
|
|
484
|
+
|
|
485
|
+
mark_dirty(DirtyCategory.PRODUCTS)
|
|
486
|
+
logger.debug("Linked {} —{}→ {}", issue_id, rel_value, target_id)
|
|
487
|
+
|
|
488
|
+
|
|
489
|
+
def _add_link_entry(slug: str, issue_id: str, target_id: str, relation: str) -> None:
|
|
490
|
+
"""Add a single link entry to an issue (idempotent)."""
|
|
491
|
+
with _get_slug_lock(slug):
|
|
492
|
+
path = _issues_dir(slug) / f"{issue_id}.yaml"
|
|
493
|
+
data = _read_yaml(path)
|
|
494
|
+
if not data:
|
|
495
|
+
return
|
|
496
|
+
links = data.setdefault("issue_links", [])
|
|
497
|
+
# Idempotent check
|
|
498
|
+
if any(l["issue_id"] == target_id and l["relation"] == relation for l in links):
|
|
499
|
+
return
|
|
500
|
+
links.append({
|
|
501
|
+
"issue_id": target_id,
|
|
502
|
+
"relation": relation,
|
|
503
|
+
"created_at": datetime.now().isoformat(),
|
|
504
|
+
})
|
|
505
|
+
_write_yaml(path, data)
|
|
506
|
+
|
|
507
|
+
|
|
508
|
+
def remove_issue_link(slug: str, issue_id: str, target_id: str) -> None:
|
|
509
|
+
"""Remove all links between two issues (both directions). Silently ignores missing links."""
|
|
510
|
+
_remove_link_entry(slug, issue_id, target_id)
|
|
511
|
+
_remove_link_entry(slug, target_id, issue_id)
|
|
512
|
+
mark_dirty(DirtyCategory.PRODUCTS)
|
|
513
|
+
logger.debug("Unlinked {} ↔ {}", issue_id, target_id)
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
def _remove_link_entry(slug: str, issue_id: str, target_id: str) -> None:
|
|
517
|
+
"""Remove all link entries from issue_id to target_id."""
|
|
518
|
+
with _get_slug_lock(slug):
|
|
519
|
+
path = _issues_dir(slug) / f"{issue_id}.yaml"
|
|
520
|
+
data = _read_yaml(path)
|
|
521
|
+
if not data:
|
|
522
|
+
return
|
|
523
|
+
links = data.get("issue_links", [])
|
|
524
|
+
data["issue_links"] = [l for l in links if l["issue_id"] != target_id]
|
|
525
|
+
_write_yaml(path, data)
|
|
526
|
+
|
|
527
|
+
|
|
528
|
+
def get_issue_links(slug: str, issue_id: str) -> list[dict]:
|
|
529
|
+
"""Return the issue_links list for an issue."""
|
|
530
|
+
issue = load_issue(slug, issue_id)
|
|
531
|
+
if not issue:
|
|
532
|
+
return []
|
|
533
|
+
return issue.get("issue_links", [])
|
|
534
|
+
|
|
535
|
+
|
|
536
|
+
def is_blocked(slug: str, issue_id: str) -> bool:
|
|
537
|
+
"""Check if an issue is blocked by any unfinished blocker."""
|
|
538
|
+
issue = load_issue(slug, issue_id)
|
|
539
|
+
if not issue:
|
|
540
|
+
return False
|
|
541
|
+
links = issue.get("issue_links", [])
|
|
542
|
+
for link in links:
|
|
543
|
+
if link["relation"] != IssueRelation.BLOCKED_BY.value:
|
|
544
|
+
continue
|
|
545
|
+
blocker = load_issue(slug, link["issue_id"])
|
|
546
|
+
if blocker and blocker.get("status") not in _DONE_STATUSES:
|
|
547
|
+
return True
|
|
548
|
+
return False
|
|
549
|
+
|
|
550
|
+
|
|
551
|
+
# ---------------------------------------------------------------------------
|
|
552
|
+
# Review Checklist
|
|
553
|
+
# ---------------------------------------------------------------------------
|
|
554
|
+
|
|
555
|
+
_DEFAULT_REVIEW_ITEMS = [
|
|
556
|
+
{"key": "update_kr", "label": "更新 KR 进度", "checked": False},
|
|
557
|
+
{"key": "review_issues", "label": "Review open issues", "checked": False},
|
|
558
|
+
{"key": "assign_backlog", "label": "安排 backlog 优先级", "checked": False},
|
|
559
|
+
{"key": "create_issues", "label": "创建新 issues", "checked": False},
|
|
560
|
+
]
|
|
561
|
+
|
|
562
|
+
|
|
563
|
+
def _reviews_dir(slug: str) -> Path:
|
|
564
|
+
return _product_dir(slug) / REVIEWS_DIR_NAME
|
|
565
|
+
|
|
566
|
+
|
|
567
|
+
def create_review(
|
|
568
|
+
slug: str,
|
|
569
|
+
*,
|
|
570
|
+
trigger: str,
|
|
571
|
+
trigger_ref: str = "",
|
|
572
|
+
owner: str,
|
|
573
|
+
items: list[dict] | None = None,
|
|
574
|
+
) -> dict:
|
|
575
|
+
"""Create a review checklist for a product. Returns the review dict."""
|
|
576
|
+
review_id = _gen_id("rev_")
|
|
577
|
+
now = datetime.now().isoformat()
|
|
578
|
+
|
|
579
|
+
data = {
|
|
580
|
+
"id": review_id,
|
|
581
|
+
"product_slug": slug,
|
|
582
|
+
"trigger": trigger,
|
|
583
|
+
"trigger_ref": trigger_ref,
|
|
584
|
+
"created_at": now,
|
|
585
|
+
"owner": owner,
|
|
586
|
+
"status": "open",
|
|
587
|
+
"items": items if items is not None else [dict(i) for i in _DEFAULT_REVIEW_ITEMS],
|
|
588
|
+
"completed_at": None,
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
rdir = _reviews_dir(slug)
|
|
592
|
+
rdir.mkdir(parents=True, exist_ok=True)
|
|
593
|
+
with _get_slug_lock(slug):
|
|
594
|
+
_write_yaml(rdir / f"{review_id}.yaml", data)
|
|
595
|
+
mark_dirty(DirtyCategory.PRODUCTS)
|
|
596
|
+
logger.debug("[PRODUCT] Review created: {} in {}", review_id, slug)
|
|
597
|
+
return data
|
|
598
|
+
|
|
599
|
+
|
|
600
|
+
def load_review(slug: str, review_id: str) -> dict | None:
|
|
601
|
+
"""Load a single review by ID."""
|
|
602
|
+
path = _reviews_dir(slug) / f"{review_id}.yaml"
|
|
603
|
+
if not path.exists():
|
|
604
|
+
return None
|
|
605
|
+
return _read_yaml(path)
|
|
606
|
+
|
|
607
|
+
|
|
608
|
+
def list_reviews(slug: str, status: str | None = None) -> list[dict]:
|
|
609
|
+
"""List all reviews for a product, optionally filtered by status."""
|
|
610
|
+
rdir = _reviews_dir(slug)
|
|
611
|
+
if not rdir.exists():
|
|
612
|
+
return []
|
|
613
|
+
reviews = []
|
|
614
|
+
for f in sorted(rdir.iterdir()):
|
|
615
|
+
if f.suffix == ".yaml":
|
|
616
|
+
data = _read_yaml(f)
|
|
617
|
+
if data and (status is None or data.get("status") == status):
|
|
618
|
+
reviews.append(data)
|
|
619
|
+
return reviews
|
|
620
|
+
|
|
621
|
+
|
|
622
|
+
def update_review_item(slug: str, review_id: str, item_key: str, *, checked: bool) -> dict:
|
|
623
|
+
"""Check or uncheck a review item. Returns updated review dict.
|
|
624
|
+
|
|
625
|
+
Raises ValueError if review or item key not found.
|
|
626
|
+
"""
|
|
627
|
+
with _get_slug_lock(slug):
|
|
628
|
+
path = _reviews_dir(slug) / f"{review_id}.yaml"
|
|
629
|
+
data = _read_yaml(path)
|
|
630
|
+
if not data:
|
|
631
|
+
raise ValueError(f"Review '{review_id}' not found in '{slug}'")
|
|
632
|
+
for item in data.get("items", []):
|
|
633
|
+
if item["key"] == item_key:
|
|
634
|
+
item["checked"] = checked
|
|
635
|
+
_write_yaml(path, data)
|
|
636
|
+
mark_dirty(DirtyCategory.PRODUCTS)
|
|
637
|
+
return data
|
|
638
|
+
raise ValueError(f"Item key '{item_key}' not found in review '{review_id}'")
|
|
639
|
+
|
|
640
|
+
|
|
641
|
+
def complete_review(slug: str, review_id: str) -> dict:
|
|
642
|
+
"""Mark a review as completed. All items must be checked.
|
|
643
|
+
|
|
644
|
+
Raises ValueError if review not found, already completed, or has unchecked items.
|
|
645
|
+
"""
|
|
646
|
+
with _get_slug_lock(slug):
|
|
647
|
+
path = _reviews_dir(slug) / f"{review_id}.yaml"
|
|
648
|
+
data = _read_yaml(path)
|
|
649
|
+
if not data:
|
|
650
|
+
raise ValueError(f"Review '{review_id}' not found in '{slug}'")
|
|
651
|
+
if data.get("status") == "completed":
|
|
652
|
+
raise ValueError(f"Review '{review_id}' is already completed")
|
|
653
|
+
unchecked = [i for i in data.get("items", []) if not i.get("checked")]
|
|
654
|
+
if unchecked:
|
|
655
|
+
keys = ", ".join(i["key"] for i in unchecked)
|
|
656
|
+
raise ValueError(f"Cannot complete review: unchecked items: {keys}")
|
|
657
|
+
data["status"] = "completed"
|
|
658
|
+
data["completed_at"] = datetime.now().isoformat()
|
|
659
|
+
_write_yaml(path, data)
|
|
660
|
+
mark_dirty(DirtyCategory.PRODUCTS)
|
|
661
|
+
logger.debug("[PRODUCT] Review completed: {}", review_id)
|
|
662
|
+
return data
|
|
663
|
+
|
|
664
|
+
|
|
421
665
|
# ---------------------------------------------------------------------------
|
|
422
666
|
# Product Versioning
|
|
423
667
|
# ---------------------------------------------------------------------------
|
|
@@ -806,3 +1050,237 @@ def sync_issue_statuses(slug: str) -> list[dict]:
|
|
|
806
1050
|
logger.debug("[PRODUCT] Issue {} status derived: {} → {}", issue["id"], current, derived.value)
|
|
807
1051
|
|
|
808
1052
|
return changed
|
|
1053
|
+
|
|
1054
|
+
|
|
1055
|
+
# ---------------------------------------------------------------------------
|
|
1056
|
+
# Sprint management
|
|
1057
|
+
# ---------------------------------------------------------------------------
|
|
1058
|
+
|
|
1059
|
+
_DONE_STATUSES = {IssueStatus.DONE.value, IssueStatus.RELEASED.value}
|
|
1060
|
+
|
|
1061
|
+
|
|
1062
|
+
def _sprints_dir(slug: str) -> Path:
|
|
1063
|
+
return PRODUCTS_DIR / slug / SPRINTS_DIR_NAME
|
|
1064
|
+
|
|
1065
|
+
|
|
1066
|
+
def create_sprint(
|
|
1067
|
+
*,
|
|
1068
|
+
slug: str,
|
|
1069
|
+
name: str,
|
|
1070
|
+
start_date: str,
|
|
1071
|
+
end_date: str,
|
|
1072
|
+
goal: str = "",
|
|
1073
|
+
capacity: int | None = None,
|
|
1074
|
+
) -> dict:
|
|
1075
|
+
"""Create a sprint for a product. Returns the sprint dict."""
|
|
1076
|
+
product = load_product(slug)
|
|
1077
|
+
if not product:
|
|
1078
|
+
raise ValueError(f"Product '{slug}' not found")
|
|
1079
|
+
|
|
1080
|
+
sprint_id = _gen_id("sprint_")
|
|
1081
|
+
now = datetime.now().isoformat()
|
|
1082
|
+
|
|
1083
|
+
data = {
|
|
1084
|
+
"id": sprint_id,
|
|
1085
|
+
"product_id": product["id"],
|
|
1086
|
+
"name": name,
|
|
1087
|
+
"goal": goal,
|
|
1088
|
+
"status": SprintStatus.PLANNING.value,
|
|
1089
|
+
"start_date": start_date,
|
|
1090
|
+
"end_date": end_date,
|
|
1091
|
+
"capacity": capacity,
|
|
1092
|
+
"velocity": None,
|
|
1093
|
+
"carry_over_count": 0,
|
|
1094
|
+
"completion_rate": None,
|
|
1095
|
+
"retrospective": None,
|
|
1096
|
+
"created_at": now,
|
|
1097
|
+
"closed_at": None,
|
|
1098
|
+
}
|
|
1099
|
+
|
|
1100
|
+
sdir = _sprints_dir(slug)
|
|
1101
|
+
sdir.mkdir(parents=True, exist_ok=True)
|
|
1102
|
+
with _get_slug_lock(slug):
|
|
1103
|
+
_write_yaml(sdir / f"{sprint_id}.yaml", data)
|
|
1104
|
+
mark_dirty(DirtyCategory.PRODUCTS)
|
|
1105
|
+
logger.debug("[PRODUCT] Sprint created: {} in {}", sprint_id, slug)
|
|
1106
|
+
return data
|
|
1107
|
+
|
|
1108
|
+
|
|
1109
|
+
def load_sprint(slug: str, sprint_id: str) -> dict | None:
|
|
1110
|
+
"""Load a single sprint by ID."""
|
|
1111
|
+
path = _sprints_dir(slug) / f"{sprint_id}.yaml"
|
|
1112
|
+
if not path.exists():
|
|
1113
|
+
return None
|
|
1114
|
+
return _read_yaml(path)
|
|
1115
|
+
|
|
1116
|
+
|
|
1117
|
+
def list_sprints(slug: str, status: str | None = None) -> list[dict]:
|
|
1118
|
+
"""List all sprints for a product, optionally filtered by status."""
|
|
1119
|
+
sdir = _sprints_dir(slug)
|
|
1120
|
+
if not sdir.exists():
|
|
1121
|
+
return []
|
|
1122
|
+
sprints = []
|
|
1123
|
+
for f in sorted(sdir.iterdir()):
|
|
1124
|
+
if f.suffix == ".yaml":
|
|
1125
|
+
data = _read_yaml(f)
|
|
1126
|
+
if data and (status is None or data.get("status") == status):
|
|
1127
|
+
sprints.append(data)
|
|
1128
|
+
return sprints
|
|
1129
|
+
|
|
1130
|
+
|
|
1131
|
+
def update_sprint(slug: str, sprint_id: str, **fields) -> dict:
|
|
1132
|
+
"""Update sprint fields. Enforces single-active-sprint constraint."""
|
|
1133
|
+
sprint = load_sprint(slug, sprint_id)
|
|
1134
|
+
if not sprint:
|
|
1135
|
+
raise ValueError(f"Sprint '{sprint_id}' not found in '{slug}'")
|
|
1136
|
+
|
|
1137
|
+
# Enforce: only one active sprint per product
|
|
1138
|
+
new_status = fields.get("status")
|
|
1139
|
+
if new_status == SprintStatus.ACTIVE.value:
|
|
1140
|
+
existing_active = get_active_sprint(slug)
|
|
1141
|
+
if existing_active and existing_active["id"] != sprint_id:
|
|
1142
|
+
raise ValueError(f"Product '{slug}' already has an active sprint: {existing_active['id']}")
|
|
1143
|
+
|
|
1144
|
+
sprint.update(fields)
|
|
1145
|
+
with _get_slug_lock(slug):
|
|
1146
|
+
_write_yaml(_sprints_dir(slug) / f"{sprint_id}.yaml", sprint)
|
|
1147
|
+
mark_dirty(DirtyCategory.PRODUCTS)
|
|
1148
|
+
return sprint
|
|
1149
|
+
|
|
1150
|
+
|
|
1151
|
+
def get_active_sprint(slug: str) -> dict | None:
|
|
1152
|
+
"""Return the current active sprint for a product, or None."""
|
|
1153
|
+
active = list_sprints(slug, status=SprintStatus.ACTIVE.value)
|
|
1154
|
+
return active[0] if active else None
|
|
1155
|
+
|
|
1156
|
+
|
|
1157
|
+
def get_sprint_velocity(slug: str, sprint_id: str) -> int:
|
|
1158
|
+
"""Calculate velocity: sum of story_points for done/released issues in this sprint."""
|
|
1159
|
+
issues = list_issues(slug, sprint=sprint_id)
|
|
1160
|
+
total = 0
|
|
1161
|
+
for issue in issues:
|
|
1162
|
+
if issue.get("status") in _DONE_STATUSES:
|
|
1163
|
+
total += issue.get("story_points") or 0
|
|
1164
|
+
return total
|
|
1165
|
+
|
|
1166
|
+
|
|
1167
|
+
def close_sprint(slug: str, sprint_id: str) -> dict:
|
|
1168
|
+
"""Close a sprint: calculate velocity, carry-over, generate retrospective."""
|
|
1169
|
+
sprint = load_sprint(slug, sprint_id)
|
|
1170
|
+
if not sprint:
|
|
1171
|
+
raise ValueError(f"Sprint '{sprint_id}' not found in '{slug}'")
|
|
1172
|
+
if sprint.get("status") != SprintStatus.ACTIVE.value:
|
|
1173
|
+
raise ValueError(f"Sprint '{sprint_id}' is not active")
|
|
1174
|
+
|
|
1175
|
+
# 1. Calculate velocity
|
|
1176
|
+
velocity = get_sprint_velocity(slug, sprint_id)
|
|
1177
|
+
|
|
1178
|
+
# 2. Identify unfinished issues
|
|
1179
|
+
all_issues = list_issues(slug, sprint=sprint_id)
|
|
1180
|
+
done_count = sum(1 for i in all_issues if i.get("status") in _DONE_STATUSES)
|
|
1181
|
+
total_count = len(all_issues)
|
|
1182
|
+
unfinished = [i for i in all_issues if i.get("status") not in _DONE_STATUSES]
|
|
1183
|
+
|
|
1184
|
+
# 3. Carry-over: find next planning sprint
|
|
1185
|
+
planning_sprints = list_sprints(slug, status=SprintStatus.PLANNING.value)
|
|
1186
|
+
next_sprint = planning_sprints[0] if planning_sprints else None
|
|
1187
|
+
|
|
1188
|
+
for issue in unfinished:
|
|
1189
|
+
if next_sprint:
|
|
1190
|
+
update_issue(slug, issue["id"], sprint=next_sprint["id"], carried_over=True)
|
|
1191
|
+
else:
|
|
1192
|
+
update_issue(slug, issue["id"], sprint="", status=IssueStatus.BACKLOG.value, carried_over=True)
|
|
1193
|
+
|
|
1194
|
+
# 4. Completion rate
|
|
1195
|
+
completion_rate = round((done_count / total_count) * 100, 2) if total_count > 0 else 0.0
|
|
1196
|
+
|
|
1197
|
+
# 5. Retrospective
|
|
1198
|
+
retrospective = build_sprint_retrospective(slug, sprint_id)
|
|
1199
|
+
|
|
1200
|
+
# 6. Update sprint record
|
|
1201
|
+
now = datetime.now().isoformat()
|
|
1202
|
+
updated = update_sprint(
|
|
1203
|
+
slug, sprint_id,
|
|
1204
|
+
status=SprintStatus.CLOSED.value,
|
|
1205
|
+
velocity=velocity,
|
|
1206
|
+
carry_over_count=len(unfinished),
|
|
1207
|
+
completion_rate=completion_rate,
|
|
1208
|
+
retrospective=retrospective,
|
|
1209
|
+
closed_at=now,
|
|
1210
|
+
)
|
|
1211
|
+
|
|
1212
|
+
logger.debug(
|
|
1213
|
+
"[PRODUCT] Sprint {} closed — velocity={}, completion={}%, carry_over={}",
|
|
1214
|
+
sprint_id, velocity, completion_rate, len(unfinished),
|
|
1215
|
+
)
|
|
1216
|
+
return updated
|
|
1217
|
+
|
|
1218
|
+
|
|
1219
|
+
def suggest_capacity(slug: str) -> int | None:
|
|
1220
|
+
"""Suggest sprint capacity based on sliding average of last 3 closed sprints.
|
|
1221
|
+
|
|
1222
|
+
Returns None if fewer than 3 closed sprints exist.
|
|
1223
|
+
"""
|
|
1224
|
+
closed = list_sprints(slug, status=SprintStatus.CLOSED.value)
|
|
1225
|
+
if len(closed) < 3:
|
|
1226
|
+
return None
|
|
1227
|
+
# Take last 3 by closed_at
|
|
1228
|
+
recent = sorted(closed, key=lambda s: s.get("closed_at") or "")[-3:]
|
|
1229
|
+
velocities = [s.get("velocity") or 0 for s in recent]
|
|
1230
|
+
return round(sum(velocities) / len(velocities))
|
|
1231
|
+
|
|
1232
|
+
|
|
1233
|
+
def build_sprint_retrospective(slug: str, sprint_id: str) -> str:
|
|
1234
|
+
"""Generate a sprint retrospective report string."""
|
|
1235
|
+
sprint = load_sprint(slug, sprint_id)
|
|
1236
|
+
if not sprint:
|
|
1237
|
+
return ""
|
|
1238
|
+
|
|
1239
|
+
issues = list_issues(slug, sprint=sprint_id)
|
|
1240
|
+
done = [i for i in issues if i.get("status") in _DONE_STATUSES]
|
|
1241
|
+
unfinished = [i for i in issues if i.get("status") not in _DONE_STATUSES]
|
|
1242
|
+
velocity = sum(i.get("story_points") or 0 for i in done)
|
|
1243
|
+
total_points = sum(i.get("story_points") or 0 for i in issues)
|
|
1244
|
+
total_count = len(issues)
|
|
1245
|
+
done_count = len(done)
|
|
1246
|
+
|
|
1247
|
+
# Compare with previous sprint velocity
|
|
1248
|
+
closed = list_sprints(slug, status=SprintStatus.CLOSED.value)
|
|
1249
|
+
closed_sorted = sorted(closed, key=lambda s: s.get("closed_at") or "")
|
|
1250
|
+
prev_velocity = None
|
|
1251
|
+
for cs in closed_sorted:
|
|
1252
|
+
if cs["id"] != sprint_id and cs.get("velocity") is not None:
|
|
1253
|
+
prev_velocity = cs["velocity"]
|
|
1254
|
+
|
|
1255
|
+
lines = [
|
|
1256
|
+
f"## Sprint Retrospective: {sprint['name']}",
|
|
1257
|
+
f"**Goal**: {sprint.get('goal') or 'N/A'}",
|
|
1258
|
+
f"**Period**: {sprint.get('start_date')} → {sprint.get('end_date')}",
|
|
1259
|
+
"",
|
|
1260
|
+
f"### Metrics",
|
|
1261
|
+
f"- **Velocity**: {velocity} story points",
|
|
1262
|
+
f"- **Completion**: {done_count}/{total_count} issues ({round(done_count / total_count * 100, 1) if total_count else 0}%)",
|
|
1263
|
+
f"- **Story points completed**: {velocity}/{total_points}",
|
|
1264
|
+
f"- **Carry-over**: {len(unfinished)} issues",
|
|
1265
|
+
]
|
|
1266
|
+
|
|
1267
|
+
if prev_velocity is not None:
|
|
1268
|
+
delta = velocity - prev_velocity
|
|
1269
|
+
direction = "↑" if delta > 0 else ("↓" if delta < 0 else "→")
|
|
1270
|
+
lines.append(f"- **vs Previous Sprint**: {direction} {abs(delta)} points ({prev_velocity} → {velocity})")
|
|
1271
|
+
|
|
1272
|
+
if done:
|
|
1273
|
+
lines.append("")
|
|
1274
|
+
lines.append("### Completed")
|
|
1275
|
+
for i in done:
|
|
1276
|
+
pts = f" ({i.get('story_points') or 0}pts)" if i.get("story_points") else ""
|
|
1277
|
+
lines.append(f"- ✓ {i['title']}{pts}")
|
|
1278
|
+
|
|
1279
|
+
if unfinished:
|
|
1280
|
+
lines.append("")
|
|
1281
|
+
lines.append("### Carried Over")
|
|
1282
|
+
for i in unfinished:
|
|
1283
|
+
pts = f" ({i.get('story_points') or 0}pts)" if i.get("story_points") else ""
|
|
1284
|
+
lines.append(f"- ○ {i['title']}{pts}")
|
|
1285
|
+
|
|
1286
|
+
return "\n".join(lines)
|
|
@@ -12,6 +12,7 @@ from loguru import logger
|
|
|
12
12
|
from onemancompany.core.events import CompanyEvent, event_bus
|
|
13
13
|
from onemancompany.core.models import (
|
|
14
14
|
EventType,
|
|
15
|
+
IssueRelation,
|
|
15
16
|
IssuePriority,
|
|
16
17
|
IssueResolution,
|
|
17
18
|
IssueStatus,
|
|
@@ -469,7 +470,74 @@ async def run_product_check(product_slug: str) -> dict:
|
|
|
469
470
|
actions_taken.append(f"Created issue for KR: {kr_title}")
|
|
470
471
|
all_issues.append(issue) # prevent duplicate creation in same cycle
|
|
471
472
|
|
|
472
|
-
# --- Step 3:
|
|
473
|
+
# --- Step 3: Sprint expiry check ---
|
|
474
|
+
from datetime import date as _date
|
|
475
|
+
|
|
476
|
+
active_sprint = prod.get_active_sprint(product_slug)
|
|
477
|
+
if active_sprint:
|
|
478
|
+
end_date_str = active_sprint.get("end_date", "")
|
|
479
|
+
try:
|
|
480
|
+
end_date = _date.fromisoformat(end_date_str)
|
|
481
|
+
if _date.today() > end_date:
|
|
482
|
+
actions_taken.append(f"Sprint '{active_sprint['name']}' expired on {end_date_str}")
|
|
483
|
+
except (ValueError, TypeError):
|
|
484
|
+
logger.debug("[PRODUCT_CHECK] Invalid end_date '{}' on sprint {}", end_date_str, active_sprint.get("id"))
|
|
485
|
+
|
|
486
|
+
# --- Step 4: Backlog grooming reminder ---
|
|
487
|
+
_BACKLOG_GROOMING_THRESHOLD = 5
|
|
488
|
+
unscheduled_low = [
|
|
489
|
+
i for i in all_issues
|
|
490
|
+
if i.get("priority") in (IssuePriority.P2.value, IssuePriority.P3.value)
|
|
491
|
+
and not i.get("sprint")
|
|
492
|
+
and i.get("status") not in (IssueStatus.DONE.value, IssueStatus.RELEASED.value)
|
|
493
|
+
]
|
|
494
|
+
if len(unscheduled_low) >= _BACKLOG_GROOMING_THRESHOLD:
|
|
495
|
+
actions_taken.append(f"{len(unscheduled_low)} P2/P3 issues unscheduled — backlog grooming needed")
|
|
496
|
+
|
|
497
|
+
# --- Step 5: Stale review check (open > 24h) ---
|
|
498
|
+
from datetime import datetime as _datetime, timedelta as _timedelta
|
|
499
|
+
|
|
500
|
+
open_reviews = prod.list_reviews(product_slug, status="open")
|
|
501
|
+
_STALE_REVIEW_HOURS = 24
|
|
502
|
+
stale_reviews = []
|
|
503
|
+
for rev in open_reviews:
|
|
504
|
+
try:
|
|
505
|
+
created = _datetime.fromisoformat(rev.get("created_at", ""))
|
|
506
|
+
if _datetime.now() - created > _timedelta(hours=_STALE_REVIEW_HOURS):
|
|
507
|
+
stale_reviews.append(rev)
|
|
508
|
+
except (ValueError, TypeError):
|
|
509
|
+
logger.debug("[PRODUCT_CHECK] Invalid created_at on review {}", rev.get("id"))
|
|
510
|
+
if stale_reviews:
|
|
511
|
+
actions_taken.append(f"{len(stale_reviews)} stale review(s) open > {_STALE_REVIEW_HOURS}h")
|
|
512
|
+
|
|
513
|
+
# --- Step 6: Blocked issue check (blocked > 7 days) ---
|
|
514
|
+
_BLOCKED_DAYS_THRESHOLD = 7
|
|
515
|
+
for issue in all_issues:
|
|
516
|
+
if issue.get("status") in (IssueStatus.DONE.value, IssueStatus.RELEASED.value):
|
|
517
|
+
continue
|
|
518
|
+
links = issue.get("issue_links", [])
|
|
519
|
+
blocked_links = [
|
|
520
|
+
link for link in links
|
|
521
|
+
if link["relation"] == IssueRelation.BLOCKED_BY.value
|
|
522
|
+
and _is_blocker_unresolved(product_slug, link["issue_id"])
|
|
523
|
+
]
|
|
524
|
+
if not blocked_links:
|
|
525
|
+
continue
|
|
526
|
+
# Use the oldest blocked_by link's created_at to determine how long blocked
|
|
527
|
+
oldest_blocked_at = None
|
|
528
|
+
for link in blocked_links:
|
|
529
|
+
try:
|
|
530
|
+
link_created = _datetime.fromisoformat(link.get("created_at", ""))
|
|
531
|
+
if oldest_blocked_at is None or link_created < oldest_blocked_at:
|
|
532
|
+
oldest_blocked_at = link_created
|
|
533
|
+
except (ValueError, TypeError):
|
|
534
|
+
logger.debug("[PRODUCT_CHECK] Invalid created_at on link in issue {}", issue.get("id"))
|
|
535
|
+
if oldest_blocked_at and _datetime.now() - oldest_blocked_at > _timedelta(days=_BLOCKED_DAYS_THRESHOLD):
|
|
536
|
+
actions_taken.append(
|
|
537
|
+
f"Issue '{issue['title']}' blocked for >{_BLOCKED_DAYS_THRESHOLD} days"
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
# --- Step 7: Check if owner review is needed ---
|
|
473
541
|
# Conditions: backlog issues with no one working, or KRs at 0% with completed projects
|
|
474
542
|
needs_review = False
|
|
475
543
|
review_reasons = []
|
|
@@ -491,6 +559,26 @@ async def run_product_check(product_slug: str) -> dict:
|
|
|
491
559
|
needs_review = True
|
|
492
560
|
review_reasons.append(f"{len(stale_krs)} KRs at 0% despite {len(completed_projects)} completed projects")
|
|
493
561
|
|
|
562
|
+
# Sprint expired → needs owner review
|
|
563
|
+
if active_sprint:
|
|
564
|
+
try:
|
|
565
|
+
end_date = _date.fromisoformat(active_sprint.get("end_date", ""))
|
|
566
|
+
if _date.today() > end_date:
|
|
567
|
+
needs_review = True
|
|
568
|
+
review_reasons.append(f"Sprint '{active_sprint['name']}' expired")
|
|
569
|
+
except (ValueError, TypeError):
|
|
570
|
+
logger.debug("[PRODUCT_CHECK] Invalid end_date on sprint {} for review check", active_sprint.get("id"))
|
|
571
|
+
|
|
572
|
+
# Backlog grooming threshold → needs owner review
|
|
573
|
+
if len(unscheduled_low) >= _BACKLOG_GROOMING_THRESHOLD:
|
|
574
|
+
needs_review = True
|
|
575
|
+
review_reasons.append(f"{len(unscheduled_low)} P2/P3 issues need sprint assignment")
|
|
576
|
+
|
|
577
|
+
# Stale reviews → needs owner review
|
|
578
|
+
if stale_reviews:
|
|
579
|
+
needs_review = True
|
|
580
|
+
review_reasons.append(f"{len(stale_reviews)} stale review(s) pending")
|
|
581
|
+
|
|
494
582
|
if needs_review:
|
|
495
583
|
reason = "; ".join(review_reasons)
|
|
496
584
|
notified = await notify_owner(product_slug, reason=reason)
|
|
@@ -581,6 +669,38 @@ async def handle_issue_assigned(event: CompanyEvent) -> None:
|
|
|
581
669
|
)
|
|
582
670
|
|
|
583
671
|
|
|
672
|
+
def _is_blocker_unresolved(slug: str, issue_id: str) -> bool:
|
|
673
|
+
"""Check if a blocker issue is still unresolved (not done/released)."""
|
|
674
|
+
blocker = prod.load_issue(slug, issue_id)
|
|
675
|
+
if not blocker:
|
|
676
|
+
return False
|
|
677
|
+
return blocker.get("status") not in (IssueStatus.DONE.value, IssueStatus.RELEASED.value)
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
async def handle_sprint_closed(event: CompanyEvent) -> None:
|
|
681
|
+
"""When a sprint is closed, auto-create a review checklist for the product owner."""
|
|
682
|
+
slug = event.payload.get("product_slug", "")
|
|
683
|
+
sprint_id = event.payload.get("sprint_id", "")
|
|
684
|
+
|
|
685
|
+
if not slug:
|
|
686
|
+
logger.debug("[PRODUCT_TRIGGER] handle_sprint_closed: no product_slug, skip")
|
|
687
|
+
return
|
|
688
|
+
|
|
689
|
+
product = prod.load_product(slug)
|
|
690
|
+
if not product:
|
|
691
|
+
logger.warning("[PRODUCT_TRIGGER] handle_sprint_closed: product '{}' not found", slug)
|
|
692
|
+
return
|
|
693
|
+
|
|
694
|
+
owner_id = product.get("owner_id", "")
|
|
695
|
+
prod.create_review(
|
|
696
|
+
slug=slug,
|
|
697
|
+
trigger="sprint_closed",
|
|
698
|
+
trigger_ref=sprint_id,
|
|
699
|
+
owner=owner_id,
|
|
700
|
+
)
|
|
701
|
+
logger.info("[PRODUCT_TRIGGER] Auto-created review for sprint {} in {}", sprint_id, slug)
|
|
702
|
+
|
|
703
|
+
|
|
584
704
|
# ---------------------------------------------------------------------------
|
|
585
705
|
# Registration
|
|
586
706
|
# ---------------------------------------------------------------------------
|
|
@@ -610,6 +730,8 @@ def register_product_triggers() -> "asyncio.Task":
|
|
|
610
730
|
# Only handle if it has product context
|
|
611
731
|
if event.payload.get("product_slug"):
|
|
612
732
|
await handle_project_complete(event)
|
|
733
|
+
elif event.type == EventType.SPRINT_CLOSED:
|
|
734
|
+
await handle_sprint_closed(event)
|
|
613
735
|
except Exception:
|
|
614
736
|
logger.exception(
|
|
615
737
|
"[PRODUCT_TRIGGER] Error handling event {}", event.type
|