@pennyfarthing/core 10.0.3 → 10.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -7
- package/package.json +7 -1
- package/packages/core/dist/cli/commands/cyclist.d.ts +5 -1
- package/packages/core/dist/cli/commands/cyclist.d.ts.map +1 -1
- package/packages/core/dist/cli/commands/cyclist.js +4 -4
- package/packages/core/dist/cli/commands/cyclist.js.map +1 -1
- package/packages/core/dist/cli/commands/cyclist.test.js +2 -2
- package/packages/core/dist/cli/commands/cyclist.test.js.map +1 -1
- package/packages/core/dist/cli/commands/doctor-legacy.test.js +17 -16
- package/packages/core/dist/cli/commands/doctor-legacy.test.js.map +1 -1
- package/packages/core/dist/cli/commands/doctor.d.ts.map +1 -1
- package/packages/core/dist/cli/commands/doctor.js +251 -4
- package/packages/core/dist/cli/commands/doctor.js.map +1 -1
- package/packages/core/dist/cli/commands/init.d.ts +7 -0
- package/packages/core/dist/cli/commands/init.d.ts.map +1 -1
- package/packages/core/dist/cli/commands/init.js +43 -7
- package/packages/core/dist/cli/commands/init.js.map +1 -1
- package/packages/core/dist/cli/commands/update.d.ts.map +1 -1
- package/packages/core/dist/cli/commands/update.js +26 -0
- package/packages/core/dist/cli/commands/update.js.map +1 -1
- package/packages/core/dist/cli/index.js +1 -1
- package/packages/core/dist/cli/index.js.map +1 -1
- package/packages/core/dist/cli/ocean-profiles.test.js +1 -1
- package/packages/core/dist/cli/ocean-profiles.test.js.map +1 -1
- package/packages/core/dist/cli/utils/files.d.ts +10 -0
- package/packages/core/dist/cli/utils/files.d.ts.map +1 -1
- package/packages/core/dist/cli/utils/files.js +35 -0
- package/packages/core/dist/cli/utils/files.js.map +1 -1
- package/packages/core/dist/cli/utils/python.d.ts +22 -0
- package/packages/core/dist/cli/utils/python.d.ts.map +1 -0
- package/packages/core/dist/cli/utils/python.js +102 -0
- package/packages/core/dist/cli/utils/python.js.map +1 -0
- package/packages/core/dist/cli/utils/settings.d.ts.map +1 -1
- package/packages/core/dist/cli/utils/settings.js +10 -0
- package/packages/core/dist/cli/utils/settings.js.map +1 -1
- package/packages/core/dist/scripts/generate-report.d.ts.map +1 -1
- package/packages/core/dist/scripts/generate-report.js +11 -7
- package/packages/core/dist/scripts/generate-report.js.map +1 -1
- package/packages/core/dist/scripts/generate-spider-report.d.ts.map +1 -1
- package/packages/core/dist/scripts/generate-spider-report.js +12 -8
- package/packages/core/dist/scripts/generate-spider-report.js.map +1 -1
- package/packages/core/dist/scripts/generate-spider.d.ts.map +1 -1
- package/packages/core/dist/scripts/generate-spider.js +6 -4
- package/packages/core/dist/scripts/generate-spider.js.map +1 -1
- package/packages/core/dist/scripts/generate-spider.test.js +2 -2
- package/packages/core/dist/scripts/generate-spider.test.js.map +1 -1
- package/pennyfarthing-dist/agents/README.md +1 -3
- package/pennyfarthing-dist/agents/architect.md +0 -6
- package/pennyfarthing-dist/agents/devops.md +0 -6
- package/pennyfarthing-dist/agents/orchestrator.md +0 -6
- package/pennyfarthing-dist/agents/pm.md +1 -7
- package/pennyfarthing-dist/agents/sm-finish.md +1 -1
- package/pennyfarthing-dist/agents/sm-setup.md +2 -2
- package/pennyfarthing-dist/agents/sm.md +4 -11
- package/pennyfarthing-dist/commands/architect.md +11 -3
- package/pennyfarthing-dist/commands/close-epic.md +24 -131
- package/pennyfarthing-dist/commands/create-theme.md +14 -24
- package/pennyfarthing-dist/commands/dev.md +11 -3
- package/pennyfarthing-dist/commands/devops.md +11 -3
- package/pennyfarthing-dist/commands/health-check.md +1 -3
- package/pennyfarthing-dist/commands/help.md +8 -12
- package/pennyfarthing-dist/commands/list-themes.md +14 -16
- package/pennyfarthing-dist/commands/orchestrator.md +11 -3
- package/pennyfarthing-dist/commands/parallel-work.md +1 -3
- package/pennyfarthing-dist/commands/pm.md +11 -3
- package/pennyfarthing-dist/commands/prime.md +6 -6
- package/pennyfarthing-dist/commands/repo-status.md +2 -2
- package/pennyfarthing-dist/commands/reviewer.md +11 -3
- package/pennyfarthing-dist/commands/run-ci.md +1 -1
- package/pennyfarthing-dist/commands/set-theme.md +14 -51
- package/pennyfarthing-dist/commands/setup.md +1 -1
- package/pennyfarthing-dist/commands/show-theme.md +14 -16
- package/pennyfarthing-dist/commands/sm.md +11 -3
- package/pennyfarthing-dist/commands/sprint.md +8 -8
- package/pennyfarthing-dist/commands/tea.md +11 -3
- package/pennyfarthing-dist/commands/tech-writer.md +11 -3
- package/pennyfarthing-dist/commands/theme-maker.md +14 -671
- package/pennyfarthing-dist/commands/theme.md +95 -0
- package/pennyfarthing-dist/commands/ux-designer.md +11 -3
- package/pennyfarthing-dist/commands/work.md +3 -5
- package/pennyfarthing-dist/guides/agent-coordination.md +11 -13
- package/pennyfarthing-dist/guides/agent-template-tactical.md +2 -3
- package/pennyfarthing-dist/guides/command-tag-taxonomy.md +212 -0
- package/pennyfarthing-dist/guides/hooks.md +5 -5
- package/pennyfarthing-dist/guides/patterns/fan-out-fan-in-pattern.md +3 -3
- package/pennyfarthing-dist/guides/patterns/helper-delegation-pattern.md +9 -59
- package/pennyfarthing-dist/guides/patterns/tdd-flow-pattern.md +4 -5
- package/pennyfarthing-dist/guides/prime.md +2 -2
- package/pennyfarthing-dist/guides/skill-schema.md +25 -26
- package/pennyfarthing-dist/guides/xml-tags.md +2 -2
- package/pennyfarthing-dist/scripts/README.md +2 -2
- package/pennyfarthing-dist/scripts/core/agent-session.sh +6 -2
- package/pennyfarthing-dist/scripts/core/prime.sh +8 -10
- package/pennyfarthing-dist/scripts/git/git-status-all.sh +1 -1
- package/pennyfarthing-dist/scripts/git/install-git-hooks.sh +8 -6
- package/pennyfarthing-dist/scripts/git/worktree-manager.sh +3 -3
- package/pennyfarthing-dist/scripts/hooks/post-merge.sh +14 -12
- package/pennyfarthing-dist/scripts/hooks/pre-commit.sh +4 -3
- package/pennyfarthing-dist/scripts/hooks/pre-push.sh +11 -5
- package/pennyfarthing-dist/scripts/hooks/sprint-yaml-validation.sh +1 -1
- package/pennyfarthing-dist/scripts/misc/README.md +1 -1
- package/pennyfarthing-dist/scripts/misc/repo-utils.sh +3 -3
- package/pennyfarthing-dist/scripts/misc/validate-subagent-frontmatter.sh +1 -2
- package/pennyfarthing-dist/scripts/sprint/README.md +32 -17
- package/pennyfarthing-dist/scripts/story/README.md +1 -1
- package/pennyfarthing-dist/scripts/test/test-setup.sh +1 -1
- package/pennyfarthing-dist/scripts/tests/handoff-phase-update.test.sh +5 -5
- package/pennyfarthing-dist/scripts/tests/test-drift-detection.sh +3 -79
- package/pennyfarthing-dist/scripts/theme/README.md +1 -1
- package/pennyfarthing-dist/scripts/validation/validate-agent-schema.sh +0 -1
- package/pennyfarthing-dist/scripts/workflow/finish-story.sh +62 -17
- package/pennyfarthing-dist/skills/dev-patterns/SKILL.md +2 -2
- package/pennyfarthing-dist/skills/skill-registry.yaml +41 -28
- package/pennyfarthing-dist/skills/sprint/skill.md +386 -68
- package/pennyfarthing-dist/skills/story/skill.md +14 -206
- package/pennyfarthing-dist/skills/theme/skill.md +290 -75
- package/pennyfarthing-dist/skills/theme-creation/SKILL.md +23 -166
- package/pennyfarthing-dist/skills/workflow/skill.md +4 -4
- package/pennyfarthing-dist/templates/agent-scopes.yaml.template +0 -11
- package/pennyfarthing-dist/templates/auto-load-sm.sh.template +14 -0
- package/pennyfarthing-dist/templates/settings.local.json.template +9 -0
- package/pennyfarthing-dist/workflows/2party-tdd.yaml +399 -0
- package/pennyfarthing-dist/workflows/epics-and-stories/steps/step-05-import-to-future.md +42 -25
- package/pennyfarthing-dist/workflows/git-cleanup.yaml +1 -1
- package/pennyfarthing-dist/workflows/project-setup/steps/step-10-complete.md +1 -1
- package/pennyfarthing_scripts/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/hooks.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/schema_validation_hook.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/workflow.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/cli.py +15 -0
- package/pennyfarthing_scripts/codemarkers/__init__.py +19 -0
- package/pennyfarthing_scripts/codemarkers/__main__.py +6 -0
- package/pennyfarthing_scripts/codemarkers/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/codemarkers/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/codemarkers/__pycache__/analyze.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/codemarkers/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/codemarkers/__pycache__/formatters.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/codemarkers/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/codemarkers/analyze.py +326 -0
- package/pennyfarthing_scripts/codemarkers/cli.py +129 -0
- package/pennyfarthing_scripts/codemarkers/formatters.py +89 -0
- package/pennyfarthing_scripts/codemarkers/models.py +45 -0
- package/pennyfarthing_scripts/common/__pycache__/config.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/common/__pycache__/themes.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/common/config.py +2 -1
- package/pennyfarthing_scripts/complexity/__init__.py +15 -0
- package/pennyfarthing_scripts/complexity/__main__.py +6 -0
- package/pennyfarthing_scripts/complexity/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/complexity/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/complexity/__pycache__/analyze.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/complexity/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/complexity/__pycache__/formatters.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/complexity/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/complexity/analyze.py +207 -0
- package/pennyfarthing_scripts/complexity/cli.py +78 -0
- package/pennyfarthing_scripts/complexity/formatters.py +64 -0
- package/pennyfarthing_scripts/complexity/models.py +32 -0
- package/pennyfarthing_scripts/deadcode/__init__.py +6 -0
- package/pennyfarthing_scripts/deadcode/__main__.py +6 -0
- package/pennyfarthing_scripts/deadcode/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/deadcode/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/deadcode/__pycache__/analyze.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/deadcode/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/deadcode/__pycache__/formatters.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/deadcode/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/deadcode/analyze.py +323 -0
- package/pennyfarthing_scripts/deadcode/cli.py +163 -0
- package/pennyfarthing_scripts/deadcode/formatters.py +106 -0
- package/pennyfarthing_scripts/deadcode/models.py +54 -0
- package/pennyfarthing_scripts/dependencies/__init__.py +20 -0
- package/pennyfarthing_scripts/dependencies/__main__.py +5 -0
- package/pennyfarthing_scripts/dependencies/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/dependencies/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/dependencies/__pycache__/analyze.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/dependencies/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/dependencies/__pycache__/formatters.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/dependencies/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/dependencies/analyze.py +155 -0
- package/pennyfarthing_scripts/dependencies/cli.py +72 -0
- package/pennyfarthing_scripts/dependencies/formatters.py +63 -0
- package/pennyfarthing_scripts/dependencies/models.py +39 -0
- package/pennyfarthing_scripts/healthscore/__init__.py +21 -0
- package/pennyfarthing_scripts/healthscore/__main__.py +6 -0
- package/pennyfarthing_scripts/healthscore/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/healthscore/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/healthscore/__pycache__/analyze.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/healthscore/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/healthscore/__pycache__/formatters.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/healthscore/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/healthscore/analyze.py +161 -0
- package/pennyfarthing_scripts/healthscore/cli.py +76 -0
- package/pennyfarthing_scripts/healthscore/formatters.py +46 -0
- package/pennyfarthing_scripts/healthscore/models.py +44 -0
- package/pennyfarthing_scripts/hotspots/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/hotspots/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/hotspots/__pycache__/analyze.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/hotspots/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/hotspots/__pycache__/formatters.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/hotspots/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/hotspots/analyze.py +28 -1
- package/pennyfarthing_scripts/hotspots/cli.py +11 -9
- package/pennyfarthing_scripts/jira/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/bidirectional.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/client.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/create.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/operations.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/reconcile.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/bidirectional.py +42 -15
- package/pennyfarthing_scripts/jira/cli.py +78 -1
- package/pennyfarthing_scripts/jira/client.py +28 -0
- package/pennyfarthing_scripts/prime/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/persona.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/tiers.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/workflow.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/workflow.py +5 -3
- package/pennyfarthing_scripts/sprint/__pycache__/archive.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/archive_epic.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/epic_add.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/loader.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/story_add.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/story_finish.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/story_update.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/validate_cmd.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/validator.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/work.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/yaml_io.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/archive.py +63 -6
- package/pennyfarthing_scripts/sprint/archive_epic.py +198 -85
- package/pennyfarthing_scripts/sprint/cli.py +1565 -65
- package/pennyfarthing_scripts/sprint/epic_add.py +173 -0
- package/pennyfarthing_scripts/sprint/loader.py +46 -2
- package/pennyfarthing_scripts/sprint/story_add.py +202 -27
- package/pennyfarthing_scripts/sprint/story_finish.py +211 -0
- package/pennyfarthing_scripts/sprint/validate_cmd.py +44 -5
- package/pennyfarthing_scripts/sprint/validator.py +13 -3
- package/pennyfarthing_scripts/sprint/work.py +43 -3
- package/pennyfarthing_scripts/sprint/yaml_io.py +124 -15
- package/pennyfarthing_scripts/tests/__pycache__/test_codemarkers.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_healthscore.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_sprint_package.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_sprint_validator.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_story_add.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_story_update.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_validate_cmd.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_yaml_io.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/test_codemarkers.py +682 -0
- package/pennyfarthing_scripts/tests/test_healthscore.py +524 -0
- package/pennyfarthing_scripts/tests/test_sprint_package.py +166 -0
- package/pennyfarthing_scripts/tests/test_yaml_io.py +117 -0
- package/pennyfarthing_scripts/theme/__init__.py +5 -0
- package/pennyfarthing_scripts/theme/__main__.py +6 -0
- package/pennyfarthing_scripts/theme/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/theme/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/theme/cli.py +286 -0
- package/scripts/README.md +53 -0
- package/scripts/postinstall.cjs +34 -0
- package/pennyfarthing-dist/agents/workflow-status-check.md +0 -96
- package/pennyfarthing-dist/scripts/sprint/archive-story.sh +0 -133
- package/pennyfarthing-dist/scripts/sprint/available-stories.sh +0 -91
- package/pennyfarthing-dist/scripts/sprint/check-story.sh +0 -158
- package/pennyfarthing-dist/scripts/sprint/get-epic-field.sh +0 -52
- package/pennyfarthing-dist/scripts/sprint/get-story-field.sh +0 -63
- package/pennyfarthing-dist/scripts/sprint/list-future.sh +0 -145
- package/pennyfarthing-dist/scripts/sprint/new-sprint.sh +0 -110
- package/pennyfarthing-dist/scripts/sprint/promote-epic.sh +0 -148
- package/pennyfarthing-dist/scripts/sprint/sprint-common.sh +0 -415
- package/pennyfarthing-dist/scripts/sprint/sprint-info.sh +0 -33
- package/pennyfarthing-dist/scripts/sprint/sprint-metrics.sh +0 -230
- package/pennyfarthing-dist/scripts/sprint/sprint-status.sh +0 -134
- package/pennyfarthing-dist/scripts/sprint/validate-sprint-yaml.sh +0 -139
- package/pennyfarthing-dist/skills/sprint/scripts/archive-story.sh +0 -101
- package/pennyfarthing-dist/skills/sprint/scripts/available-stories.sh +0 -97
- package/pennyfarthing-dist/skills/sprint/scripts/check-story.sh +0 -164
- package/pennyfarthing-dist/skills/sprint/scripts/create-jira-epic.sh +0 -23
- package/pennyfarthing-dist/skills/sprint/scripts/new-sprint.sh +0 -116
- package/pennyfarthing-dist/skills/sprint/scripts/promote-epic.sh +0 -164
- package/pennyfarthing-dist/skills/sprint/scripts/sprint-info.sh +0 -39
- package/pennyfarthing-dist/skills/sprint/scripts/sprint-status.sh +0 -147
- package/pennyfarthing-dist/skills/sprint/scripts/sync-epic-jira.sh +0 -23
|
@@ -9,6 +9,9 @@ Commands:
|
|
|
9
9
|
backlog Show available stories
|
|
10
10
|
work Start work on a story
|
|
11
11
|
archive Archive a completed story
|
|
12
|
+
story Story subcommands (show, add, update, size, template, finish, claim)
|
|
13
|
+
epic Epic subcommands (show, add, promote, archive, cancel, import, remove)
|
|
14
|
+
initiative Initiative subcommands (show, cancel)
|
|
12
15
|
"""
|
|
13
16
|
|
|
14
17
|
import click
|
|
@@ -22,7 +25,9 @@ def sprint():
|
|
|
22
25
|
Commands:
|
|
23
26
|
status - Show sprint status
|
|
24
27
|
backlog - Show available stories
|
|
25
|
-
story -
|
|
28
|
+
story - Story operations (show, add, update, size, template, finish, claim)
|
|
29
|
+
epic - Epic operations (show, add, promote, archive, cancel, import, remove)
|
|
30
|
+
initiative - Initiative operations (show, cancel)
|
|
26
31
|
work - Start work on a story
|
|
27
32
|
archive - Archive a completed story
|
|
28
33
|
"""
|
|
@@ -50,17 +55,67 @@ def status(filter: str | None):
|
|
|
50
55
|
|
|
51
56
|
@sprint.command()
|
|
52
57
|
def backlog():
|
|
53
|
-
"""Show available stories
|
|
54
|
-
|
|
55
|
-
|
|
58
|
+
"""Show available stories grouped by epic.
|
|
59
|
+
|
|
60
|
+
Shows stories with backlog, ready, or planning status.
|
|
61
|
+
Output is grouped by epic with a markdown table per epic.
|
|
62
|
+
"""
|
|
63
|
+
from pennyfarthing_scripts.sprint.loader import load_sprint
|
|
56
64
|
|
|
57
|
-
|
|
58
|
-
|
|
65
|
+
data = load_sprint()
|
|
66
|
+
if not data or "epics" not in data:
|
|
67
|
+
click.echo("No sprint data available")
|
|
68
|
+
return
|
|
69
|
+
|
|
70
|
+
sprint_info = data.get("sprint", {})
|
|
71
|
+
click.echo(f"# Available Stories - {sprint_info.get('name', 'Unknown Sprint')}")
|
|
59
72
|
click.echo("")
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
73
|
+
|
|
74
|
+
available_statuses = {"backlog", "ready", "planning"}
|
|
75
|
+
total_count = 0
|
|
76
|
+
total_points = 0
|
|
77
|
+
|
|
78
|
+
for epic in data["epics"]:
|
|
79
|
+
if not isinstance(epic, dict):
|
|
80
|
+
continue
|
|
81
|
+
|
|
82
|
+
stories = [
|
|
83
|
+
s for s in epic.get("stories", [])
|
|
84
|
+
if s.get("status") in available_statuses
|
|
85
|
+
]
|
|
86
|
+
if not stories:
|
|
87
|
+
continue
|
|
88
|
+
|
|
89
|
+
click.echo(f"### {epic.get('title', 'Unknown Epic')}")
|
|
90
|
+
if epic.get("description"):
|
|
91
|
+
desc = epic["description"].strip().split("\n")[0][:200]
|
|
92
|
+
click.echo(f"*{desc}*")
|
|
93
|
+
click.echo("")
|
|
94
|
+
click.echo("| ID | Title | Pts | Pri | Status | Assigned | Workflow |")
|
|
95
|
+
click.echo("|----|-------|-----|-----|--------|----------|----------|")
|
|
96
|
+
|
|
97
|
+
for s in stories:
|
|
98
|
+
title = s.get("title", "?")
|
|
99
|
+
if len(title) > 40:
|
|
100
|
+
title = title[:37] + "..."
|
|
101
|
+
sid = s.get("id", "?")
|
|
102
|
+
pts = s.get("points", "?")
|
|
103
|
+
pri = s.get("priority", "P2")
|
|
104
|
+
stat = s.get("status", "backlog")
|
|
105
|
+
wf = s.get("workflow", "tdd")
|
|
106
|
+
assigned = s.get("assigned_to", "")
|
|
107
|
+
if assigned:
|
|
108
|
+
parts = assigned.split("@")[0].split(".")
|
|
109
|
+
if len(parts) >= 2:
|
|
110
|
+
assigned = f"{parts[0][0].upper()}. {parts[-1].capitalize()}"
|
|
111
|
+
click.echo(f"| {sid} | {title} | {pts} | {pri} | {stat} | {assigned} | {wf} |")
|
|
112
|
+
total_count += 1
|
|
113
|
+
total_points += s.get("points", 0) or 0
|
|
114
|
+
|
|
115
|
+
click.echo("")
|
|
116
|
+
|
|
117
|
+
click.echo("---")
|
|
118
|
+
click.echo(f"**Total available:** {total_count} stories, {total_points} points")
|
|
64
119
|
|
|
65
120
|
|
|
66
121
|
@sprint.command()
|
|
@@ -103,8 +158,48 @@ def work(story_id: str | None, dry_run: bool):
|
|
|
103
158
|
|
|
104
159
|
@sprint.command()
|
|
105
160
|
@click.argument("story_id")
|
|
161
|
+
@click.argument("pr_number", required=False)
|
|
162
|
+
@click.option("--apply", is_flag=True, help="Also remove from current-sprint.yaml")
|
|
163
|
+
@click.option("--dry-run", is_flag=True, help="Show what would be done without making changes")
|
|
164
|
+
def archive(story_id: str, pr_number: str | None, apply: bool, dry_run: bool):
|
|
165
|
+
"""Archive a completed story.
|
|
166
|
+
|
|
167
|
+
\b
|
|
168
|
+
Arguments:
|
|
169
|
+
STORY_ID - Story ID to archive
|
|
170
|
+
PR_NUMBER - Optional PR number if merged via PR
|
|
171
|
+
"""
|
|
172
|
+
# Lazy import
|
|
173
|
+
from pennyfarthing_scripts.sprint.archive import archive_story
|
|
174
|
+
|
|
175
|
+
result = archive_story(
|
|
176
|
+
story_id,
|
|
177
|
+
pr_number=pr_number,
|
|
178
|
+
dry_run=dry_run,
|
|
179
|
+
apply=apply,
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
if result.get("success"):
|
|
183
|
+
if result.get("dry_run"):
|
|
184
|
+
click.echo(f"[DRY-RUN] {result.get('message')}")
|
|
185
|
+
else:
|
|
186
|
+
click.echo(result.get("message"))
|
|
187
|
+
else:
|
|
188
|
+
raise click.ClickException(f"Failed: {result.get('error')}")
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
# --- Story subgroup ---
|
|
192
|
+
|
|
193
|
+
@sprint.group()
|
|
194
|
+
def story():
|
|
195
|
+
"""Story operations (show, add, update, size, template, finish, claim)."""
|
|
196
|
+
pass
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
@story.command("show")
|
|
200
|
+
@click.argument("story_id")
|
|
106
201
|
@click.option("--json", "output_json", is_flag=True, help="Output as JSON")
|
|
107
|
-
def
|
|
202
|
+
def story_show(story_id: str, output_json: bool):
|
|
108
203
|
"""Show details for a specific story.
|
|
109
204
|
|
|
110
205
|
\b
|
|
@@ -138,43 +233,420 @@ def story(story_id: str, output_json: bool):
|
|
|
138
233
|
click.echo(f"Description: {story_data.get('description')}")
|
|
139
234
|
|
|
140
235
|
|
|
141
|
-
@
|
|
236
|
+
@story.command("size")
|
|
237
|
+
@click.argument("points", required=False, type=int)
|
|
238
|
+
def story_size(points: int | None):
|
|
239
|
+
"""Display story sizing guidelines.
|
|
240
|
+
|
|
241
|
+
\b
|
|
242
|
+
Arguments:
|
|
243
|
+
POINTS - Optional specific point value to show guidance for
|
|
244
|
+
"""
|
|
245
|
+
from pennyfarthing_scripts.story.size import format_size_info, get_sizing_guidelines
|
|
246
|
+
|
|
247
|
+
guidelines = get_sizing_guidelines(points)
|
|
248
|
+
click.echo(format_size_info(guidelines))
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
@story.command("template")
|
|
252
|
+
@click.argument("template_type", required=False)
|
|
253
|
+
def story_template(template_type: str | None):
|
|
254
|
+
"""Display story templates by type.
|
|
255
|
+
|
|
256
|
+
\b
|
|
257
|
+
Arguments:
|
|
258
|
+
TYPE - Template type (feature, bug, refactor, chore)
|
|
259
|
+
"""
|
|
260
|
+
from pennyfarthing_scripts.story.template import get_all_templates, get_template
|
|
261
|
+
|
|
262
|
+
if template_type:
|
|
263
|
+
template = get_template(template_type)
|
|
264
|
+
if template:
|
|
265
|
+
click.echo(f"Type: {template['type']}")
|
|
266
|
+
click.echo(f"Description: {template['description']}")
|
|
267
|
+
click.echo("")
|
|
268
|
+
click.echo("Template:")
|
|
269
|
+
click.echo(template["template"])
|
|
270
|
+
else:
|
|
271
|
+
raise click.ClickException(f"Unknown template type: {template_type}")
|
|
272
|
+
else:
|
|
273
|
+
click.echo("Available templates:")
|
|
274
|
+
for name, template in get_all_templates().items():
|
|
275
|
+
click.echo(f" {name}: {template['description']}")
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
@story.command("finish")
|
|
142
279
|
@click.argument("story_id")
|
|
143
|
-
@click.
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
def archive(story_id: str, pr_number: str | None, apply: bool, dry_run: bool):
|
|
147
|
-
"""Archive a completed story.
|
|
280
|
+
@click.option("--dry-run", is_flag=True, help="Show what would be done without executing")
|
|
281
|
+
def story_finish(story_id: str, dry_run: bool):
|
|
282
|
+
"""Complete a story: archive session, merge PR, transition Jira, update sprint YAML.
|
|
148
283
|
|
|
149
284
|
\b
|
|
150
285
|
Arguments:
|
|
151
|
-
STORY_ID
|
|
152
|
-
PR_NUMBER - Optional PR number if merged via PR
|
|
286
|
+
STORY_ID - Story ID (e.g., 83-2)
|
|
153
287
|
"""
|
|
154
|
-
|
|
155
|
-
from pennyfarthing_scripts.sprint.
|
|
288
|
+
from pennyfarthing_scripts.common.config import get_project_root
|
|
289
|
+
from pennyfarthing_scripts.sprint.story_finish import finish_story
|
|
156
290
|
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
291
|
+
root = get_project_root()
|
|
292
|
+
result = finish_story(root, story_id, dry_run=dry_run)
|
|
293
|
+
|
|
294
|
+
if not result["success"]:
|
|
295
|
+
raise click.ClickException(result["error"])
|
|
296
|
+
|
|
297
|
+
if result.get("dry_run"):
|
|
298
|
+
click.echo(f"[DRY RUN] Finish story {story_id} ({result.get('jira_key', '?')})")
|
|
299
|
+
for step in result.get("steps", []):
|
|
300
|
+
click.echo(f" {step['step']}. {step['action']}")
|
|
301
|
+
return
|
|
302
|
+
|
|
303
|
+
click.echo(f"=== Story {story_id} Complete ===")
|
|
304
|
+
jira_key = result.get("jira_key", "")
|
|
305
|
+
click.echo(f"Jira: https://1898andco.atlassian.net/browse/{jira_key}")
|
|
306
|
+
for step in result.get("steps", []):
|
|
307
|
+
warning = step.get("warning", "")
|
|
308
|
+
error = step.get("error", "")
|
|
309
|
+
suffix = f" (warning: {warning})" if warning else f" (error: {error})" if error else ""
|
|
310
|
+
click.echo(f" {step['step']}. {step['action']}{suffix}")
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
@story.command("claim")
|
|
314
|
+
@click.argument("story_id")
|
|
315
|
+
@click.option("--claim/--unclaim", default=True, help="Claim or unclaim the story")
|
|
316
|
+
def story_claim(story_id: str, claim: bool):
|
|
317
|
+
"""Claim or unclaim a story in Jira.
|
|
318
|
+
|
|
319
|
+
\b
|
|
320
|
+
Arguments:
|
|
321
|
+
STORY_ID - Story ID / Jira key to claim
|
|
322
|
+
"""
|
|
323
|
+
from pennyfarthing_scripts.jira.claim import claim_issue, unclaim_issue
|
|
324
|
+
|
|
325
|
+
if claim:
|
|
326
|
+
result = claim_issue(story_id)
|
|
327
|
+
else:
|
|
328
|
+
result = unclaim_issue(story_id)
|
|
163
329
|
|
|
164
330
|
if result.get("success"):
|
|
165
|
-
|
|
166
|
-
click.echo(f"[DRY-RUN] {result.get('message')}")
|
|
167
|
-
else:
|
|
168
|
-
click.echo(result.get("message"))
|
|
331
|
+
click.echo(result.get("message", f"{'Claimed' if claim else 'Unclaimed'} {story_id}"))
|
|
169
332
|
else:
|
|
170
|
-
raise click.ClickException(
|
|
333
|
+
raise click.ClickException(result.get("error", "Unknown error"))
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
# Register story-add as story.add
|
|
337
|
+
from pennyfarthing_scripts.sprint.story_add import story_add_command
|
|
338
|
+
|
|
339
|
+
story.add_command(story_add_command, "add")
|
|
171
340
|
|
|
341
|
+
# Register story-update as story.update
|
|
342
|
+
from pennyfarthing_scripts.sprint.story_update import story_update_command
|
|
172
343
|
|
|
173
|
-
|
|
344
|
+
story.add_command(story_update_command, "update")
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
# --- Epic subgroup ---
|
|
348
|
+
|
|
349
|
+
@sprint.group()
|
|
350
|
+
def epic():
|
|
351
|
+
"""Epic operations (show, add, promote, archive, cancel, import, remove)."""
|
|
352
|
+
pass
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
@epic.command("show")
|
|
356
|
+
@click.argument("epic_id")
|
|
357
|
+
@click.option("--json", "output_json", is_flag=True, help="Output as JSON")
|
|
358
|
+
def epic_show(epic_id: str, output_json: bool):
|
|
359
|
+
"""Show details for a specific epic.
|
|
360
|
+
|
|
361
|
+
Searches both the current sprint and future initiative shards.
|
|
362
|
+
|
|
363
|
+
\b
|
|
364
|
+
Arguments:
|
|
365
|
+
EPIC_ID - Epic ID (e.g., epic-42 or MSSCI-14298)
|
|
366
|
+
|
|
367
|
+
\b
|
|
368
|
+
Examples:
|
|
369
|
+
pf sprint epic show MSSCI-14298
|
|
370
|
+
pf sprint epic show epic-42
|
|
371
|
+
pf sprint epic show epic-42 --json
|
|
372
|
+
"""
|
|
373
|
+
import json as json_mod
|
|
374
|
+
|
|
375
|
+
from pennyfarthing_scripts.common.config import get_project_root
|
|
376
|
+
from pennyfarthing_scripts.sprint.loader import load_sprint
|
|
377
|
+
|
|
378
|
+
root = get_project_root()
|
|
379
|
+
epic_data = None
|
|
380
|
+
source = None
|
|
381
|
+
|
|
382
|
+
# 1. Search current sprint
|
|
383
|
+
sprint_data = load_sprint(root)
|
|
384
|
+
if sprint_data and "epics" in sprint_data:
|
|
385
|
+
for e in sprint_data["epics"]:
|
|
386
|
+
if isinstance(e, dict):
|
|
387
|
+
eid = str(e.get("id", ""))
|
|
388
|
+
ejira = str(e.get("jira", ""))
|
|
389
|
+
if epic_id in (eid, ejira, eid.replace("epic-", ""), f"epic-{epic_id}"):
|
|
390
|
+
epic_data = e
|
|
391
|
+
source = "current sprint"
|
|
392
|
+
break
|
|
393
|
+
|
|
394
|
+
# 2. Search future initiative shards
|
|
395
|
+
if not epic_data:
|
|
396
|
+
epic_data, source = _find_epic_in_initiatives(epic_id, root)
|
|
397
|
+
|
|
398
|
+
if not epic_data:
|
|
399
|
+
raise click.ClickException(f"Epic not found: {epic_id}")
|
|
400
|
+
|
|
401
|
+
if output_json:
|
|
402
|
+
# Convert to plain dict for JSON serialization
|
|
403
|
+
click.echo(json_mod.dumps(dict(epic_data), indent=2, default=str))
|
|
404
|
+
else:
|
|
405
|
+
click.echo(f"Epic: {epic_data.get('id', epic_id)}")
|
|
406
|
+
click.echo(f"Title: {epic_data.get('title', 'N/A')}")
|
|
407
|
+
click.echo(f"Status: {epic_data.get('status', 'N/A')}")
|
|
408
|
+
click.echo(f"Points: {epic_data.get('points', 'N/A')}")
|
|
409
|
+
click.echo(f"Source: {source}")
|
|
410
|
+
if epic_data.get("priority"):
|
|
411
|
+
click.echo(f"Priority: {epic_data.get('priority')}")
|
|
412
|
+
if epic_data.get("jira"):
|
|
413
|
+
click.echo(f"Jira: {epic_data.get('jira')}")
|
|
414
|
+
if epic_data.get("repos"):
|
|
415
|
+
click.echo(f"Repos: {epic_data.get('repos')}")
|
|
416
|
+
if epic_data.get("description"):
|
|
417
|
+
click.echo(f"Description: {epic_data.get('description').rstrip()}")
|
|
418
|
+
|
|
419
|
+
stories = epic_data.get("stories", [])
|
|
420
|
+
if stories:
|
|
421
|
+
click.echo(f"\nStories ({len(stories)}):")
|
|
422
|
+
for s in stories:
|
|
423
|
+
sid = s.get("id", "?")
|
|
424
|
+
stitle = s.get("title", "?")
|
|
425
|
+
spts = s.get("points", "?")
|
|
426
|
+
sstat = s.get("status", "?")
|
|
427
|
+
click.echo(f" {sid}: {stitle} [{spts}pts] ({sstat})")
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
def _epic_shard_path(sprint_dir, ref: str):
|
|
431
|
+
"""Resolve an epic shard file path from a ref string.
|
|
432
|
+
|
|
433
|
+
Handles both 'epic-42' and 'MSSCI-12792' style refs.
|
|
434
|
+
The file naming convention is epic-{ref}.yaml, but refs that
|
|
435
|
+
already start with 'epic-' should not be double-prefixed.
|
|
436
|
+
"""
|
|
437
|
+
if ref.startswith("epic-"):
|
|
438
|
+
return sprint_dir / f"{ref}.yaml"
|
|
439
|
+
return sprint_dir / f"epic-{ref}.yaml"
|
|
440
|
+
|
|
441
|
+
|
|
442
|
+
def _epic_ref_matches(ref: str, epic_id: str) -> bool:
|
|
443
|
+
"""Check if an initiative epic ref matches the requested epic_id."""
|
|
444
|
+
# Normalize both to compare without prefix
|
|
445
|
+
ref_bare = ref.replace("epic-", "") if ref.startswith("epic-") else ref
|
|
446
|
+
id_bare = epic_id.replace("epic-", "") if epic_id.startswith("epic-") else epic_id
|
|
447
|
+
return ref_bare == id_bare or ref == epic_id
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
def _find_epic_in_initiatives(epic_id: str, root):
|
|
451
|
+
"""Search initiative shard files for an epic by ID.
|
|
452
|
+
|
|
453
|
+
Returns (epic_dict, source_string) or (None, None).
|
|
454
|
+
"""
|
|
455
|
+
import yaml
|
|
456
|
+
|
|
457
|
+
sprint_dir = root / "sprint"
|
|
458
|
+
for init_file in sorted(sprint_dir.glob("initiative-*.yaml")):
|
|
459
|
+
with open(init_file) as f:
|
|
460
|
+
init_data = yaml.safe_load(f.read())
|
|
461
|
+
if not init_data:
|
|
462
|
+
continue
|
|
463
|
+
|
|
464
|
+
init_name = init_data.get("name", init_file.stem)
|
|
465
|
+
epics = init_data.get("epics", [])
|
|
466
|
+
for e in epics:
|
|
467
|
+
if isinstance(e, str):
|
|
468
|
+
if _epic_ref_matches(e, epic_id):
|
|
469
|
+
shard = _epic_shard_path(sprint_dir, e)
|
|
470
|
+
if shard.exists():
|
|
471
|
+
with open(shard) as sf:
|
|
472
|
+
epic_data = yaml.safe_load(sf.read())
|
|
473
|
+
if epic_data:
|
|
474
|
+
return epic_data, f"initiative: {init_name}"
|
|
475
|
+
elif isinstance(e, dict):
|
|
476
|
+
eid = str(e.get("id", ""))
|
|
477
|
+
if _epic_ref_matches(eid, epic_id):
|
|
478
|
+
return e, f"initiative: {init_name}"
|
|
479
|
+
|
|
480
|
+
return None, None
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
@epic.command("cancel")
|
|
484
|
+
@click.argument("epic_id")
|
|
485
|
+
@click.option("--jira", is_flag=True, help="Also cancel the epic in Jira")
|
|
486
|
+
@click.option("--dry-run", is_flag=True, help="Show what would be done without making changes")
|
|
487
|
+
def epic_cancel(epic_id: str, jira: bool, dry_run: bool):
|
|
488
|
+
"""Cancel an epic and all its stories.
|
|
489
|
+
|
|
490
|
+
Sets the epic status to 'canceled' and all stories to 'canceled'.
|
|
491
|
+
Searches both the current sprint and future initiative shards.
|
|
492
|
+
|
|
493
|
+
\b
|
|
494
|
+
Arguments:
|
|
495
|
+
EPIC_ID - Epic ID (e.g., epic-42 or MSSCI-14298)
|
|
496
|
+
|
|
497
|
+
\b
|
|
498
|
+
Examples:
|
|
499
|
+
pf sprint epic cancel epic-42 --dry-run
|
|
500
|
+
pf sprint epic cancel epic-42
|
|
501
|
+
pf sprint epic cancel epic-42 --jira
|
|
502
|
+
"""
|
|
503
|
+
from pennyfarthing_scripts.common.config import get_project_root
|
|
504
|
+
from pennyfarthing_scripts.sprint.loader import load_sprint
|
|
505
|
+
from pennyfarthing_scripts.sprint.yaml_io import read_sprint, write_sprint
|
|
506
|
+
|
|
507
|
+
root = get_project_root()
|
|
508
|
+
sprint_dir = root / "sprint"
|
|
509
|
+
sprint_path = sprint_dir / "current-sprint.yaml"
|
|
510
|
+
|
|
511
|
+
# 1. Try current sprint
|
|
512
|
+
sprint_data = read_sprint(sprint_path) if sprint_path.exists() else None
|
|
513
|
+
found_in_sprint = False
|
|
514
|
+
if sprint_data and "epics" in sprint_data:
|
|
515
|
+
for e in sprint_data["epics"]:
|
|
516
|
+
if not isinstance(e, dict):
|
|
517
|
+
continue
|
|
518
|
+
eid = str(e.get("id", ""))
|
|
519
|
+
ejira = str(e.get("jira", ""))
|
|
520
|
+
if epic_id in (eid, ejira, eid.replace("epic-", ""), f"epic-{epic_id}"):
|
|
521
|
+
found_in_sprint = True
|
|
522
|
+
jira_key = e.get("jira")
|
|
523
|
+
stories = e.get("stories", [])
|
|
524
|
+
story_count = len(stories)
|
|
525
|
+
|
|
526
|
+
click.echo(f"Epic: {eid}")
|
|
527
|
+
click.echo(f"Title: {e.get('title', 'N/A')}")
|
|
528
|
+
click.echo(f"Stories: {story_count}")
|
|
529
|
+
|
|
530
|
+
if jira_key and not jira:
|
|
531
|
+
click.echo(f"\nWarning: Epic has Jira key {jira_key} -- pass --jira to also cancel in Jira")
|
|
532
|
+
|
|
533
|
+
if dry_run:
|
|
534
|
+
click.echo(f"\n[DRY-RUN] Would cancel {eid} and {story_count} stories")
|
|
535
|
+
return
|
|
536
|
+
|
|
537
|
+
e["status"] = "canceled"
|
|
538
|
+
for s in stories:
|
|
539
|
+
s["status"] = "canceled"
|
|
540
|
+
|
|
541
|
+
write_sprint(sprint_path, sprint_data)
|
|
542
|
+
click.echo(f"\nCanceled {eid} and {story_count} stories in current sprint")
|
|
543
|
+
|
|
544
|
+
if jira and jira_key:
|
|
545
|
+
_transition_jira(jira_key, "Cancelled")
|
|
546
|
+
click.echo(f"Transitioned Jira {jira_key} to Cancelled")
|
|
547
|
+
return
|
|
548
|
+
|
|
549
|
+
# 2. Try initiative shards
|
|
550
|
+
if not found_in_sprint:
|
|
551
|
+
_cancel_epic_in_initiatives(epic_id, root, jira=jira, dry_run=dry_run)
|
|
552
|
+
|
|
553
|
+
|
|
554
|
+
def _transition_jira(jira_key: str, status: str) -> bool:
|
|
555
|
+
"""Transition a Jira issue to the given status."""
|
|
556
|
+
import subprocess
|
|
557
|
+
|
|
558
|
+
try:
|
|
559
|
+
result = subprocess.run(
|
|
560
|
+
["jira", "issue", "move", jira_key, status],
|
|
561
|
+
capture_output=True,
|
|
562
|
+
text=True,
|
|
563
|
+
timeout=30,
|
|
564
|
+
)
|
|
565
|
+
return result.returncode == 0
|
|
566
|
+
except Exception:
|
|
567
|
+
return False
|
|
568
|
+
|
|
569
|
+
|
|
570
|
+
def _cancel_epic_in_initiatives(epic_id: str, root, *, jira: bool, dry_run: bool):
|
|
571
|
+
"""Find and cancel an epic in initiative shard files."""
|
|
572
|
+
import yaml
|
|
573
|
+
|
|
574
|
+
sprint_dir = root / "sprint"
|
|
575
|
+
|
|
576
|
+
for init_file in sorted(sprint_dir.glob("initiative-*.yaml")):
|
|
577
|
+
with open(init_file) as f:
|
|
578
|
+
raw = f.read()
|
|
579
|
+
init_data = yaml.safe_load(raw)
|
|
580
|
+
if not init_data:
|
|
581
|
+
continue
|
|
582
|
+
|
|
583
|
+
init_name = init_data.get("name", init_file.stem)
|
|
584
|
+
epics = init_data.get("epics", [])
|
|
585
|
+
|
|
586
|
+
for i, e in enumerate(epics):
|
|
587
|
+
matched = False
|
|
588
|
+
epic_dict = None
|
|
589
|
+
|
|
590
|
+
if isinstance(e, str):
|
|
591
|
+
if _epic_ref_matches(e, epic_id):
|
|
592
|
+
shard = _epic_shard_path(sprint_dir, e)
|
|
593
|
+
if shard.exists():
|
|
594
|
+
with open(shard) as sf:
|
|
595
|
+
epic_dict = yaml.safe_load(sf.read())
|
|
596
|
+
matched = True
|
|
597
|
+
elif isinstance(e, dict):
|
|
598
|
+
eid = str(e.get("id", ""))
|
|
599
|
+
if _epic_ref_matches(eid, epic_id):
|
|
600
|
+
epic_dict = e
|
|
601
|
+
matched = True
|
|
602
|
+
|
|
603
|
+
if not matched or not epic_dict:
|
|
604
|
+
continue
|
|
605
|
+
|
|
606
|
+
jira_key = epic_dict.get("jira")
|
|
607
|
+
stories = epic_dict.get("stories", [])
|
|
608
|
+
story_count = len(stories)
|
|
609
|
+
|
|
610
|
+
click.echo(f"Epic: {epic_dict.get('id', epic_id)}")
|
|
611
|
+
click.echo(f"Title: {epic_dict.get('title', 'N/A')}")
|
|
612
|
+
click.echo(f"Initiative: {init_name}")
|
|
613
|
+
click.echo(f"Stories: {story_count}")
|
|
614
|
+
|
|
615
|
+
if jira_key and not jira:
|
|
616
|
+
click.echo(f"\nWarning: Epic has Jira key {jira_key} -- pass --jira to also cancel in Jira")
|
|
617
|
+
|
|
618
|
+
if dry_run:
|
|
619
|
+
click.echo(f"\n[DRY-RUN] Would cancel {epic_dict.get('id', epic_id)} and {story_count} stories")
|
|
620
|
+
return
|
|
621
|
+
|
|
622
|
+
epic_dict["status"] = "canceled"
|
|
623
|
+
for s in stories:
|
|
624
|
+
s["status"] = "canceled"
|
|
625
|
+
|
|
626
|
+
# Write back — either shard file or inline in initiative
|
|
627
|
+
if isinstance(e, str):
|
|
628
|
+
shard = _epic_shard_path(sprint_dir, e)
|
|
629
|
+
with open(shard, "w") as sf:
|
|
630
|
+
yaml.dump(dict(epic_dict), sf, default_flow_style=False, sort_keys=False)
|
|
631
|
+
else:
|
|
632
|
+
with open(init_file, "w") as f:
|
|
633
|
+
yaml.dump(init_data, f, default_flow_style=False, sort_keys=False)
|
|
634
|
+
|
|
635
|
+
click.echo(f"\nCanceled {epic_dict.get('id', epic_id)} and {story_count} stories")
|
|
636
|
+
|
|
637
|
+
if jira and jira_key:
|
|
638
|
+
_transition_jira(jira_key, "Cancelled")
|
|
639
|
+
click.echo(f"Transitioned Jira {jira_key} to Cancelled")
|
|
640
|
+
return
|
|
641
|
+
|
|
642
|
+
raise click.ClickException(f"Epic not found: {epic_id}")
|
|
643
|
+
|
|
644
|
+
|
|
645
|
+
@epic.command("archive")
|
|
174
646
|
@click.argument("epic_id", required=False)
|
|
175
647
|
@click.option("--dry-run", is_flag=True, help="Show what would be done without making changes")
|
|
176
648
|
@click.option("--jira", is_flag=True, help="Also update Jira epic status to Done")
|
|
177
|
-
def
|
|
649
|
+
def epic_archive(epic_id: str | None, dry_run: bool, jira: bool):
|
|
178
650
|
"""Archive completed epics.
|
|
179
651
|
|
|
180
652
|
\b
|
|
@@ -183,10 +655,10 @@ def archive_epic(epic_id: str | None, dry_run: bool, jira: bool):
|
|
|
183
655
|
|
|
184
656
|
\b
|
|
185
657
|
Examples:
|
|
186
|
-
pf sprint archive
|
|
187
|
-
pf sprint
|
|
188
|
-
pf sprint
|
|
189
|
-
pf sprint
|
|
658
|
+
pf sprint epic archive # Scan and archive all completed
|
|
659
|
+
pf sprint epic archive --dry-run # Preview what would be archived
|
|
660
|
+
pf sprint epic archive epic-64 # Archive specific epic
|
|
661
|
+
pf sprint epic archive epic-64 --jira # Archive and update Jira
|
|
190
662
|
"""
|
|
191
663
|
# Lazy import
|
|
192
664
|
from pennyfarthing_scripts.sprint.archive_epic import (
|
|
@@ -204,9 +676,9 @@ def archive_epic(epic_id: str | None, dry_run: bool, jira: bool):
|
|
|
204
676
|
click.echo(f"[DRY-RUN] {result.get('message')}")
|
|
205
677
|
if "archived" in result:
|
|
206
678
|
for r in result["archived"]:
|
|
207
|
-
|
|
208
|
-
eid =
|
|
209
|
-
stories = len(
|
|
679
|
+
e = r.get("epic", {})
|
|
680
|
+
eid = e.get("id") if e else r.get("epic_id")
|
|
681
|
+
stories = len(e.get("stories", [])) if e else r.get("stories_archived", 0)
|
|
210
682
|
click.echo(f" Would archive: {eid} ({stories} stories)")
|
|
211
683
|
else:
|
|
212
684
|
click.echo(result.get("message"))
|
|
@@ -222,12 +694,12 @@ def archive_epic(epic_id: str | None, dry_run: bool, jira: bool):
|
|
|
222
694
|
raise click.ClickException(error_msg)
|
|
223
695
|
|
|
224
696
|
|
|
225
|
-
@
|
|
697
|
+
@epic.command("import")
|
|
226
698
|
@click.argument("epics_file")
|
|
227
699
|
@click.argument("initiative_name", required=False)
|
|
228
700
|
@click.option("--marker", default="imported", help="Marker tag for stories (default: imported)")
|
|
229
701
|
@click.option("--dry-run", is_flag=True, help="Show what would be done without making changes")
|
|
230
|
-
def
|
|
702
|
+
def epic_import(epics_file: str, initiative_name: str | None, marker: str, dry_run: bool):
|
|
231
703
|
"""Import BMAD epics-and-stories output to future.yaml.
|
|
232
704
|
|
|
233
705
|
\b
|
|
@@ -237,9 +709,9 @@ def import_epic(epics_file: str, initiative_name: str | None, marker: str, dry_r
|
|
|
237
709
|
|
|
238
710
|
\b
|
|
239
711
|
Examples:
|
|
240
|
-
pf sprint
|
|
241
|
-
pf sprint
|
|
242
|
-
pf sprint
|
|
712
|
+
pf sprint epic import docs/planning/my-feature-epics.md
|
|
713
|
+
pf sprint epic import docs/planning/my-feature-epics.md "My Feature" --marker my-feature
|
|
714
|
+
pf sprint epic import docs/planning/my-feature-epics.md --dry-run
|
|
243
715
|
"""
|
|
244
716
|
# Lazy import
|
|
245
717
|
from pennyfarthing_scripts.sprint.import_epic import import_epic as do_import
|
|
@@ -270,10 +742,10 @@ def import_epic(epics_file: str, initiative_name: str | None, marker: str, dry_r
|
|
|
270
742
|
raise click.ClickException(result.get("error", "Unknown error"))
|
|
271
743
|
|
|
272
744
|
|
|
273
|
-
@
|
|
745
|
+
@epic.command("remove")
|
|
274
746
|
@click.argument("epic_id")
|
|
275
747
|
@click.option("--dry-run", is_flag=True, help="Show what would be removed without making changes")
|
|
276
|
-
def
|
|
748
|
+
def epic_remove(epic_id: str, dry_run: bool):
|
|
277
749
|
"""Remove an epic from future.yaml (for cancelled pre-Jira epics).
|
|
278
750
|
|
|
279
751
|
\b
|
|
@@ -282,8 +754,8 @@ def remove_epic(epic_id: str, dry_run: bool):
|
|
|
282
754
|
|
|
283
755
|
\b
|
|
284
756
|
Examples:
|
|
285
|
-
pf sprint
|
|
286
|
-
pf sprint
|
|
757
|
+
pf sprint epic remove epic-41
|
|
758
|
+
pf sprint epic remove epic-41 --dry-run
|
|
287
759
|
"""
|
|
288
760
|
from pathlib import Path
|
|
289
761
|
|
|
@@ -305,14 +777,14 @@ def remove_epic(epic_id: str, dry_run: bool):
|
|
|
305
777
|
found = False
|
|
306
778
|
for init in data["future"]["initiatives"]:
|
|
307
779
|
epics = init.get("epics", [])
|
|
308
|
-
for
|
|
309
|
-
if
|
|
780
|
+
for e in epics:
|
|
781
|
+
if e.get("id") == epic_id:
|
|
310
782
|
found = True
|
|
311
|
-
story_count = len(
|
|
783
|
+
story_count = len(e.get("stories", []))
|
|
312
784
|
click.echo(f"Found epic in initiative '{init.get('name', 'unknown')}':")
|
|
313
785
|
click.echo(f" ID: {epic_id}")
|
|
314
|
-
click.echo(f" Title: {
|
|
315
|
-
click.echo(f" Points: {
|
|
786
|
+
click.echo(f" Title: {e.get('title', 'unknown')}")
|
|
787
|
+
click.echo(f" Points: {e.get('points', '?')}")
|
|
316
788
|
click.echo(f" Stories: {story_count}")
|
|
317
789
|
|
|
318
790
|
if dry_run:
|
|
@@ -320,9 +792,9 @@ def remove_epic(epic_id: str, dry_run: bool):
|
|
|
320
792
|
return
|
|
321
793
|
|
|
322
794
|
# Remove using yq to preserve comments and formatting
|
|
323
|
-
import subprocess
|
|
795
|
+
import subprocess as sp
|
|
324
796
|
|
|
325
|
-
result =
|
|
797
|
+
result = sp.run(
|
|
326
798
|
[
|
|
327
799
|
"yq", "eval", "-i",
|
|
328
800
|
f'del(.future.initiatives[].epics[] | select(.id == "{epic_id}"))',
|
|
@@ -343,20 +815,1048 @@ def remove_epic(epic_id: str, dry_run: bool):
|
|
|
343
815
|
)
|
|
344
816
|
|
|
345
817
|
|
|
346
|
-
|
|
347
|
-
|
|
818
|
+
@epic.command("promote")
|
|
819
|
+
@click.argument("epic_id")
|
|
820
|
+
def epic_promote(epic_id: str):
|
|
821
|
+
"""Move an epic from future initiatives to current-sprint.yaml.
|
|
348
822
|
|
|
349
|
-
|
|
823
|
+
Detects ID collisions and assigns new IDs if needed.
|
|
824
|
+
Automatically removes the epic from its initiative shard after promotion.
|
|
350
825
|
|
|
351
|
-
|
|
352
|
-
|
|
826
|
+
\b
|
|
827
|
+
Arguments:
|
|
828
|
+
EPIC_ID - Epic ID (e.g., epic-41 or 41)
|
|
353
829
|
|
|
354
|
-
|
|
830
|
+
\b
|
|
831
|
+
Examples:
|
|
832
|
+
pf sprint epic promote epic-41
|
|
833
|
+
pf sprint epic promote 41
|
|
834
|
+
"""
|
|
835
|
+
import copy
|
|
355
836
|
|
|
356
|
-
|
|
357
|
-
|
|
837
|
+
import yaml
|
|
838
|
+
|
|
839
|
+
from pennyfarthing_scripts.common.config import get_project_root
|
|
840
|
+
|
|
841
|
+
root = get_project_root()
|
|
842
|
+
sprint_dir = root / "sprint"
|
|
843
|
+
sprint_file = sprint_dir / "current-sprint.yaml"
|
|
844
|
+
|
|
845
|
+
if not sprint_file.exists():
|
|
846
|
+
raise click.ClickException(f"Sprint file not found: {sprint_file}")
|
|
847
|
+
|
|
848
|
+
# Find the epic in initiative shards
|
|
849
|
+
epic_data = None
|
|
850
|
+
source_init_file = None
|
|
851
|
+
source_ref = None
|
|
852
|
+
|
|
853
|
+
for init_file in sorted(sprint_dir.glob("initiative-*.yaml")):
|
|
854
|
+
with open(init_file) as f:
|
|
855
|
+
init_data = yaml.safe_load(f.read())
|
|
856
|
+
if not init_data:
|
|
857
|
+
continue
|
|
858
|
+
for e in init_data.get("epics", []):
|
|
859
|
+
edata = _resolve_epic_ref(e, sprint_dir)
|
|
860
|
+
if not edata:
|
|
861
|
+
continue
|
|
862
|
+
eid = str(edata.get("id", ""))
|
|
863
|
+
if _epic_ref_matches(eid, epic_id):
|
|
864
|
+
epic_data = copy.deepcopy(edata)
|
|
865
|
+
source_init_file = init_file
|
|
866
|
+
source_ref = e
|
|
867
|
+
break
|
|
868
|
+
if epic_data:
|
|
869
|
+
break
|
|
870
|
+
|
|
871
|
+
if not epic_data:
|
|
872
|
+
raise click.ClickException(f"Epic {epic_id} not found in future initiatives")
|
|
873
|
+
|
|
874
|
+
# Load current sprint
|
|
875
|
+
with open(sprint_file) as f:
|
|
876
|
+
sprint_data = yaml.safe_load(f.read())
|
|
877
|
+
|
|
878
|
+
if not sprint_data:
|
|
879
|
+
raise click.ClickException(f"Invalid sprint file: {sprint_file}")
|
|
880
|
+
|
|
881
|
+
if "epics" not in sprint_data:
|
|
882
|
+
sprint_data["epics"] = []
|
|
883
|
+
|
|
884
|
+
# Check for ID collision
|
|
885
|
+
original_id = str(epic_data.get("id", epic_id))
|
|
886
|
+
new_epic_id = original_id
|
|
887
|
+
existing_ids = {str(e.get("id", "")) for e in sprint_data["epics"] if isinstance(e, dict)}
|
|
888
|
+
|
|
889
|
+
if new_epic_id in existing_ids:
|
|
890
|
+
max_num = 0
|
|
891
|
+
for eid in existing_ids:
|
|
892
|
+
if eid.startswith("epic-"):
|
|
893
|
+
try:
|
|
894
|
+
max_num = max(max_num, int(eid.replace("epic-", "")))
|
|
895
|
+
except ValueError:
|
|
896
|
+
pass
|
|
897
|
+
new_epic_id = f"epic-{max_num + 1}"
|
|
898
|
+
click.echo(f"Warning: Epic ID {original_id} already exists. Assigning new ID: {new_epic_id}")
|
|
899
|
+
|
|
900
|
+
# Transform epic for current sprint
|
|
901
|
+
old_id_num = original_id.replace("epic-", "")
|
|
902
|
+
new_id_num = new_epic_id.replace("epic-", "")
|
|
903
|
+
|
|
904
|
+
epic_data["id"] = new_epic_id
|
|
905
|
+
epic_data["status"] = "backlog"
|
|
906
|
+
if not epic_data.get("title", "").startswith("Epic:"):
|
|
907
|
+
epic_data["title"] = f"Epic: {epic_data.get('title', 'Unknown')}"
|
|
908
|
+
|
|
909
|
+
for s in epic_data.get("stories", []):
|
|
910
|
+
sid = str(s.get("id", ""))
|
|
911
|
+
if sid.startswith(f"{old_id_num}-"):
|
|
912
|
+
s["id"] = sid.replace(f"{old_id_num}-", f"{new_id_num}-", 1)
|
|
913
|
+
s["status"] = "backlog"
|
|
914
|
+
s.setdefault("repos", "pennyfarthing")
|
|
915
|
+
s.setdefault("workflow", "tdd")
|
|
916
|
+
s.setdefault("priority", "P2")
|
|
917
|
+
s.setdefault("acceptance_criteria", [])
|
|
918
|
+
|
|
919
|
+
story_count = len(epic_data.get("stories", []))
|
|
920
|
+
|
|
921
|
+
click.echo("")
|
|
922
|
+
click.echo("Promoting epic to current sprint:")
|
|
923
|
+
click.echo(f" Original ID: {original_id}")
|
|
924
|
+
if new_epic_id != original_id:
|
|
925
|
+
click.echo(f" New ID: {new_epic_id}")
|
|
926
|
+
click.echo(f" Title: {epic_data.get('title')}")
|
|
927
|
+
click.echo(f" Points: {epic_data.get('points', 0)}")
|
|
928
|
+
click.echo(f" Stories: {story_count}")
|
|
929
|
+
click.echo("")
|
|
930
|
+
|
|
931
|
+
# Append to sprint
|
|
932
|
+
sprint_data["epics"].append(epic_data)
|
|
933
|
+
|
|
934
|
+
from pennyfarthing_scripts.sprint.yaml_io import write_sprint
|
|
935
|
+
write_sprint(sprint_file, sprint_data)
|
|
936
|
+
click.echo(f"Added epic to {sprint_file}")
|
|
937
|
+
|
|
938
|
+
# Remove from initiative shard
|
|
939
|
+
with open(source_init_file) as f:
|
|
940
|
+
init_data = yaml.safe_load(f.read())
|
|
941
|
+
|
|
942
|
+
if isinstance(source_ref, str):
|
|
943
|
+
# String ref — remove from list and delete shard file
|
|
944
|
+
init_data["epics"] = [e for e in init_data.get("epics", []) if e != source_ref]
|
|
945
|
+
shard = _epic_shard_path(sprint_dir, source_ref)
|
|
946
|
+
if shard.exists():
|
|
947
|
+
shard.unlink()
|
|
948
|
+
else:
|
|
949
|
+
# Inline dict — remove matching entry
|
|
950
|
+
init_data["epics"] = [
|
|
951
|
+
e for e in init_data.get("epics", [])
|
|
952
|
+
if not (isinstance(e, dict) and _epic_ref_matches(str(e.get("id", "")), epic_id))
|
|
953
|
+
]
|
|
954
|
+
|
|
955
|
+
remaining_epics = init_data.get("epics", [])
|
|
956
|
+
if remaining_epics:
|
|
957
|
+
# Initiative still has epics — update shard in place
|
|
958
|
+
with open(source_init_file, "w") as f:
|
|
959
|
+
yaml.dump(init_data, f, default_flow_style=False, sort_keys=False)
|
|
960
|
+
click.echo(f"Removed {original_id} from {source_init_file.name}")
|
|
961
|
+
else:
|
|
962
|
+
# Initiative is empty — remove shard and future.yaml reference
|
|
963
|
+
init_name = init_data.get("name", "")
|
|
964
|
+
init_slug = source_init_file.stem.replace("initiative-", "")
|
|
965
|
+
source_init_file.unlink()
|
|
966
|
+
click.echo(f"Removed empty initiative shard: {source_init_file.name}")
|
|
967
|
+
|
|
968
|
+
# Remove from future.yaml
|
|
969
|
+
future_file = sprint_dir / "future.yaml"
|
|
970
|
+
if future_file.exists():
|
|
971
|
+
with open(future_file) as f:
|
|
972
|
+
future_data = yaml.safe_load(f.read()) or {}
|
|
973
|
+
future_inits = future_data.get("future", {}).get("initiatives", [])
|
|
974
|
+
if init_slug in future_inits:
|
|
975
|
+
future_inits.remove(init_slug)
|
|
976
|
+
with open(future_file, "w") as f:
|
|
977
|
+
yaml.dump(future_data, f, default_flow_style=False, sort_keys=False)
|
|
978
|
+
click.echo(f"Removed '{init_slug}' from future.yaml")
|
|
979
|
+
|
|
980
|
+
click.echo("")
|
|
981
|
+
click.echo("Promotion complete!")
|
|
982
|
+
click.echo("")
|
|
983
|
+
click.echo("Next steps:")
|
|
984
|
+
click.echo(f" 1. Review the epic: pf sprint epic show {new_epic_id}")
|
|
985
|
+
click.echo(f" 2. Create Jira epic: pf jira create epic {new_epic_id}")
|
|
986
|
+
click.echo(f" 3. Start work: /sprint work {new_id_num}-1")
|
|
987
|
+
|
|
988
|
+
|
|
989
|
+
# Register epic-add as epic.add
|
|
990
|
+
from pennyfarthing_scripts.sprint.epic_add import epic_add_command
|
|
991
|
+
|
|
992
|
+
epic.add_command(epic_add_command, "add")
|
|
993
|
+
|
|
994
|
+
|
|
995
|
+
# --- Initiative subgroup ---
|
|
358
996
|
|
|
997
|
+
@sprint.group()
|
|
998
|
+
def initiative():
|
|
999
|
+
"""Initiative operations (show, cancel)."""
|
|
1000
|
+
pass
|
|
1001
|
+
|
|
1002
|
+
|
|
1003
|
+
@initiative.command("show")
|
|
1004
|
+
@click.argument("name")
|
|
1005
|
+
@click.option("--json", "output_json", is_flag=True, help="Output as JSON")
|
|
1006
|
+
def initiative_show(name: str, output_json: bool):
|
|
1007
|
+
"""Show details for a specific initiative.
|
|
1008
|
+
|
|
1009
|
+
\b
|
|
1010
|
+
Arguments:
|
|
1011
|
+
NAME - Initiative slug (e.g., benchmark-reliability, technical-debt)
|
|
1012
|
+
|
|
1013
|
+
\b
|
|
1014
|
+
Examples:
|
|
1015
|
+
pf sprint initiative show benchmark-reliability
|
|
1016
|
+
pf sprint initiative show technical-debt --json
|
|
1017
|
+
"""
|
|
1018
|
+
import json as json_mod
|
|
1019
|
+
|
|
1020
|
+
import yaml
|
|
1021
|
+
|
|
1022
|
+
from pennyfarthing_scripts.common.config import get_project_root
|
|
1023
|
+
|
|
1024
|
+
root = get_project_root()
|
|
1025
|
+
init_file = root / "sprint" / f"initiative-{name}.yaml"
|
|
1026
|
+
|
|
1027
|
+
if not init_file.exists():
|
|
1028
|
+
raise click.ClickException(f"Initiative not found: {name}\n Expected: {init_file}")
|
|
1029
|
+
|
|
1030
|
+
with open(init_file) as f:
|
|
1031
|
+
init_data = yaml.safe_load(f.read())
|
|
1032
|
+
|
|
1033
|
+
if not init_data:
|
|
1034
|
+
raise click.ClickException(f"Empty initiative file: {init_file}")
|
|
1035
|
+
|
|
1036
|
+
if output_json:
|
|
1037
|
+
click.echo(json_mod.dumps(init_data, indent=2, default=str))
|
|
1038
|
+
return
|
|
1039
|
+
|
|
1040
|
+
click.echo(f"Initiative: {init_data.get('name', name)}")
|
|
1041
|
+
click.echo(f"Status: {init_data.get('status', 'N/A')}")
|
|
1042
|
+
if init_data.get("total_points"):
|
|
1043
|
+
click.echo(f"Total Points: {init_data.get('total_points')}")
|
|
1044
|
+
if init_data.get("blocked_by"):
|
|
1045
|
+
click.echo(f"Blocked By: {init_data.get('blocked_by')}")
|
|
1046
|
+
if init_data.get("description"):
|
|
1047
|
+
click.echo(f"Description: {init_data.get('description').rstrip()}")
|
|
1048
|
+
|
|
1049
|
+
epics = init_data.get("epics", [])
|
|
1050
|
+
if epics:
|
|
1051
|
+
click.echo(f"\nEpics ({len(epics)}):")
|
|
1052
|
+
sprint_dir = root / "sprint"
|
|
1053
|
+
for e in epics:
|
|
1054
|
+
if isinstance(e, str):
|
|
1055
|
+
# String ref — try to load shard for details
|
|
1056
|
+
shard = _epic_shard_path(sprint_dir, e)
|
|
1057
|
+
if shard.exists():
|
|
1058
|
+
with open(shard) as sf:
|
|
1059
|
+
edata = yaml.safe_load(sf.read())
|
|
1060
|
+
if edata:
|
|
1061
|
+
etitle = edata.get("title", "?")
|
|
1062
|
+
epts = edata.get("points", "?")
|
|
1063
|
+
estat = edata.get("status", "?")
|
|
1064
|
+
click.echo(f" {edata.get('id', e)}: {etitle} [{epts}pts] ({estat})")
|
|
1065
|
+
continue
|
|
1066
|
+
click.echo(f" {e} (shard not found)")
|
|
1067
|
+
elif isinstance(e, dict):
|
|
1068
|
+
eid = e.get("id", "?")
|
|
1069
|
+
etitle = e.get("title", "?")
|
|
1070
|
+
epts = e.get("points", "?")
|
|
1071
|
+
estat = e.get("status", "?")
|
|
1072
|
+
click.echo(f" {eid}: {etitle} [{epts}pts] ({estat})")
|
|
1073
|
+
|
|
1074
|
+
standalone_stories = init_data.get("standalone_stories", [])
|
|
1075
|
+
if standalone_stories:
|
|
1076
|
+
click.echo(f"\nStandalone Stories ({len(standalone_stories)}):")
|
|
1077
|
+
for s in standalone_stories:
|
|
1078
|
+
sid = s.get("id", "?")
|
|
1079
|
+
stitle = s.get("title", "?")
|
|
1080
|
+
spts = s.get("points", "?")
|
|
1081
|
+
sstat = s.get("status", "?")
|
|
1082
|
+
click.echo(f" {sid}: {stitle} [{spts}pts] ({sstat})")
|
|
1083
|
+
|
|
1084
|
+
|
|
1085
|
+
@initiative.command("cancel")
|
|
1086
|
+
@click.argument("name")
|
|
1087
|
+
@click.option("--jira", is_flag=True, help="Also cancel epics in Jira")
|
|
1088
|
+
@click.option("--dry-run", is_flag=True, help="Show what would be done without making changes")
|
|
1089
|
+
def initiative_cancel(name: str, jira: bool, dry_run: bool):
|
|
1090
|
+
"""Cancel an initiative and all its epics/stories.
|
|
1091
|
+
|
|
1092
|
+
Sets the initiative status to 'canceled' and cancels all epics and stories
|
|
1093
|
+
within it.
|
|
1094
|
+
|
|
1095
|
+
\b
|
|
1096
|
+
Arguments:
|
|
1097
|
+
NAME - Initiative slug (e.g., benchmark-reliability, technical-debt)
|
|
1098
|
+
|
|
1099
|
+
\b
|
|
1100
|
+
Examples:
|
|
1101
|
+
pf sprint initiative cancel technical-debt --dry-run
|
|
1102
|
+
pf sprint initiative cancel technical-debt
|
|
1103
|
+
pf sprint initiative cancel technical-debt --jira
|
|
1104
|
+
"""
|
|
1105
|
+
import yaml
|
|
1106
|
+
|
|
1107
|
+
from pennyfarthing_scripts.common.config import get_project_root
|
|
1108
|
+
|
|
1109
|
+
root = get_project_root()
|
|
1110
|
+
sprint_dir = root / "sprint"
|
|
1111
|
+
init_file = sprint_dir / f"initiative-{name}.yaml"
|
|
1112
|
+
|
|
1113
|
+
if not init_file.exists():
|
|
1114
|
+
raise click.ClickException(f"Initiative not found: {name}\n Expected: {init_file}")
|
|
1115
|
+
|
|
1116
|
+
with open(init_file) as f:
|
|
1117
|
+
init_data = yaml.safe_load(f.read())
|
|
1118
|
+
|
|
1119
|
+
if not init_data:
|
|
1120
|
+
raise click.ClickException(f"Empty initiative file: {init_file}")
|
|
1121
|
+
|
|
1122
|
+
init_name = init_data.get("name", name)
|
|
1123
|
+
epics = init_data.get("epics", [])
|
|
1124
|
+
standalone_stories = init_data.get("standalone_stories", [])
|
|
1125
|
+
|
|
1126
|
+
# Collect Jira keys for warning
|
|
1127
|
+
jira_keys = []
|
|
1128
|
+
epic_count = 0
|
|
1129
|
+
story_count = 0
|
|
1130
|
+
|
|
1131
|
+
for e in epics:
|
|
1132
|
+
if isinstance(e, str):
|
|
1133
|
+
shard = _epic_shard_path(sprint_dir, e)
|
|
1134
|
+
if shard.exists():
|
|
1135
|
+
with open(shard) as sf:
|
|
1136
|
+
edata = yaml.safe_load(sf.read())
|
|
1137
|
+
if edata:
|
|
1138
|
+
epic_count += 1
|
|
1139
|
+
if edata.get("jira"):
|
|
1140
|
+
jira_keys.append(edata["jira"])
|
|
1141
|
+
story_count += len(edata.get("stories", []))
|
|
1142
|
+
elif isinstance(e, dict):
|
|
1143
|
+
epic_count += 1
|
|
1144
|
+
if e.get("jira"):
|
|
1145
|
+
jira_keys.append(e["jira"])
|
|
1146
|
+
story_count += len(e.get("stories", []))
|
|
1147
|
+
|
|
1148
|
+
story_count += len(standalone_stories)
|
|
1149
|
+
|
|
1150
|
+
click.echo(f"Initiative: {init_name}")
|
|
1151
|
+
click.echo(f"Epics: {epic_count}")
|
|
1152
|
+
click.echo(f"Stories: {story_count}")
|
|
1153
|
+
|
|
1154
|
+
if jira_keys and not jira:
|
|
1155
|
+
click.echo(f"\nWarning: {len(jira_keys)} epic(s) have Jira keys -- pass --jira to also cancel in Jira")
|
|
1156
|
+
for k in jira_keys:
|
|
1157
|
+
click.echo(f" {k}")
|
|
1158
|
+
|
|
1159
|
+
if dry_run:
|
|
1160
|
+
click.echo(f"\n[DRY-RUN] Would cancel initiative '{init_name}' ({epic_count} epics, {story_count} stories)")
|
|
1161
|
+
return
|
|
1162
|
+
|
|
1163
|
+
# Cancel all epics
|
|
1164
|
+
for i, e in enumerate(epics):
|
|
1165
|
+
if isinstance(e, str):
|
|
1166
|
+
shard = _epic_shard_path(sprint_dir, e)
|
|
1167
|
+
if shard.exists():
|
|
1168
|
+
with open(shard) as sf:
|
|
1169
|
+
edata = yaml.safe_load(sf.read())
|
|
1170
|
+
if edata:
|
|
1171
|
+
edata["status"] = "canceled"
|
|
1172
|
+
for s in edata.get("stories", []):
|
|
1173
|
+
s["status"] = "canceled"
|
|
1174
|
+
with open(shard, "w") as sf:
|
|
1175
|
+
yaml.dump(edata, sf, default_flow_style=False, sort_keys=False)
|
|
1176
|
+
if jira and edata.get("jira"):
|
|
1177
|
+
_transition_jira(edata["jira"], "Cancelled")
|
|
1178
|
+
elif isinstance(e, dict):
|
|
1179
|
+
e["status"] = "canceled"
|
|
1180
|
+
for s in e.get("stories", []):
|
|
1181
|
+
s["status"] = "canceled"
|
|
1182
|
+
if jira and e.get("jira"):
|
|
1183
|
+
_transition_jira(e["jira"], "Cancelled")
|
|
1184
|
+
|
|
1185
|
+
# Cancel standalone stories
|
|
1186
|
+
for s in standalone_stories:
|
|
1187
|
+
s["status"] = "canceled"
|
|
1188
|
+
|
|
1189
|
+
# Update initiative status
|
|
1190
|
+
init_data["status"] = "canceled"
|
|
1191
|
+
|
|
1192
|
+
with open(init_file, "w") as f:
|
|
1193
|
+
yaml.dump(init_data, f, default_flow_style=False, sort_keys=False)
|
|
1194
|
+
|
|
1195
|
+
click.echo(f"\nCanceled initiative '{init_name}' ({epic_count} epics, {story_count} stories)")
|
|
1196
|
+
if jira and jira_keys:
|
|
1197
|
+
click.echo(f"Transitioned {len(jira_keys)} Jira epic(s) to Cancelled")
|
|
1198
|
+
|
|
1199
|
+
|
|
1200
|
+
# --- Check command (replaces check-story.sh) ---
|
|
1201
|
+
|
|
1202
|
+
@sprint.command()
|
|
1203
|
+
@click.argument("id")
|
|
1204
|
+
def check(id: str):
|
|
1205
|
+
"""Check story/epic availability. Returns JSON.
|
|
1206
|
+
|
|
1207
|
+
\b
|
|
1208
|
+
Arguments:
|
|
1209
|
+
ID - Story ID, epic ID, or 'next' for highest priority
|
|
1210
|
+
|
|
1211
|
+
\b
|
|
1212
|
+
Returns JSON with type, details, and availability:
|
|
1213
|
+
type: "story" | "epic" | "next" | "not_found"
|
|
1214
|
+
"""
|
|
1215
|
+
import json
|
|
1216
|
+
|
|
1217
|
+
from pennyfarthing_scripts.sprint.loader import (
|
|
1218
|
+
find_epic,
|
|
1219
|
+
get_all_stories,
|
|
1220
|
+
load_sprint,
|
|
1221
|
+
)
|
|
1222
|
+
from pennyfarthing_scripts.sprint.work import check_story, get_next_story
|
|
1223
|
+
|
|
1224
|
+
data = load_sprint()
|
|
1225
|
+
|
|
1226
|
+
if id == "next":
|
|
1227
|
+
result = get_next_story()
|
|
1228
|
+
if result.get("available"):
|
|
1229
|
+
story = result["story"]
|
|
1230
|
+
# Find parent epic
|
|
1231
|
+
epic_id = _find_epic_for_story(data, story.get("id", ""))
|
|
1232
|
+
out = {
|
|
1233
|
+
"type": "next",
|
|
1234
|
+
"story": {
|
|
1235
|
+
"id": story.get("id"),
|
|
1236
|
+
"title": story.get("title"),
|
|
1237
|
+
"points": story.get("points", 0),
|
|
1238
|
+
"priority": story.get("priority", "P2"),
|
|
1239
|
+
"workflow": story.get("workflow", "tdd"),
|
|
1240
|
+
"repos": story.get("repos", "pennyfarthing"),
|
|
1241
|
+
"epic_id": epic_id,
|
|
1242
|
+
"acceptance_criteria": story.get("acceptance_criteria", []),
|
|
1243
|
+
},
|
|
1244
|
+
}
|
|
1245
|
+
else:
|
|
1246
|
+
out = {"type": "next", "story": None, "message": "No available stories in backlog"}
|
|
1247
|
+
click.echo(json.dumps(out, indent=2))
|
|
1248
|
+
return
|
|
1249
|
+
|
|
1250
|
+
# Check if it's an epic
|
|
1251
|
+
if data:
|
|
1252
|
+
epic = find_epic(data, id)
|
|
1253
|
+
if epic:
|
|
1254
|
+
available_statuses = {"backlog", "ready", "planning"}
|
|
1255
|
+
available = [
|
|
1256
|
+
s for s in epic.get("stories", [])
|
|
1257
|
+
if s.get("status") in available_statuses
|
|
1258
|
+
]
|
|
1259
|
+
# Sort by priority
|
|
1260
|
+
priority_order = {"P0": 0, "P1": 1, "P2": 2, "P3": 3}
|
|
1261
|
+
available.sort(key=lambda s: priority_order.get(s.get("priority", "P2"), 2))
|
|
1262
|
+
|
|
1263
|
+
first = available[0] if available else None
|
|
1264
|
+
out = {
|
|
1265
|
+
"type": "epic",
|
|
1266
|
+
"id": str(epic.get("id", id)),
|
|
1267
|
+
"title": epic.get("title", "Unknown"),
|
|
1268
|
+
"available_stories": len(available),
|
|
1269
|
+
}
|
|
1270
|
+
if first:
|
|
1271
|
+
out["first_story"] = {
|
|
1272
|
+
"id": first.get("id"),
|
|
1273
|
+
"title": first.get("title"),
|
|
1274
|
+
"points": first.get("points", 0),
|
|
1275
|
+
"workflow": first.get("workflow", "tdd"),
|
|
1276
|
+
"repos": first.get("repos", "pennyfarthing"),
|
|
1277
|
+
"acceptance_criteria": first.get("acceptance_criteria", []),
|
|
1278
|
+
}
|
|
1279
|
+
else:
|
|
1280
|
+
out["first_story"] = None
|
|
1281
|
+
out["message"] = "No available stories in this epic"
|
|
1282
|
+
click.echo(json.dumps(out, indent=2))
|
|
1283
|
+
return
|
|
1284
|
+
|
|
1285
|
+
# Check if it's a story
|
|
1286
|
+
result = check_story(id)
|
|
1287
|
+
story = result.get("story")
|
|
1288
|
+
if story:
|
|
1289
|
+
epic_id = _find_epic_for_story(data, story.get("id", ""))
|
|
1290
|
+
out = {
|
|
1291
|
+
"type": "story",
|
|
1292
|
+
"id": story.get("id", id),
|
|
1293
|
+
"title": story.get("title", "Unknown"),
|
|
1294
|
+
"points": story.get("points", 0),
|
|
1295
|
+
"workflow": story.get("workflow", "tdd"),
|
|
1296
|
+
"status": story.get("status", "backlog"),
|
|
1297
|
+
"assigned_to": story.get("assigned_to", ""),
|
|
1298
|
+
"epic_id": epic_id,
|
|
1299
|
+
"repos": story.get("repos", "pennyfarthing"),
|
|
1300
|
+
"available": result.get("available", False),
|
|
1301
|
+
"acceptance_criteria": story.get("acceptance_criteria", []),
|
|
1302
|
+
}
|
|
1303
|
+
click.echo(json.dumps(out, indent=2))
|
|
1304
|
+
return
|
|
1305
|
+
|
|
1306
|
+
# Not found
|
|
1307
|
+
click.echo(json.dumps({
|
|
1308
|
+
"type": "not_found",
|
|
1309
|
+
"id": id,
|
|
1310
|
+
"message": "Story or epic not found in current sprint",
|
|
1311
|
+
}, indent=2))
|
|
1312
|
+
|
|
1313
|
+
|
|
1314
|
+
def _find_epic_for_story(data: dict | None, story_id: str) -> str:
|
|
1315
|
+
"""Find the parent epic ID for a story."""
|
|
1316
|
+
if not data or "epics" not in data:
|
|
1317
|
+
return ""
|
|
1318
|
+
for epic in data["epics"]:
|
|
1319
|
+
if not isinstance(epic, dict):
|
|
1320
|
+
continue
|
|
1321
|
+
for s in epic.get("stories", []):
|
|
1322
|
+
if s.get("id") == story_id:
|
|
1323
|
+
return str(epic.get("id", ""))
|
|
1324
|
+
return ""
|
|
1325
|
+
|
|
1326
|
+
|
|
1327
|
+
# --- Info command (replaces sprint-info.sh) ---
|
|
1328
|
+
|
|
1329
|
+
@sprint.command()
|
|
1330
|
+
def info():
|
|
1331
|
+
"""Output sprint info as JSON for Cyclist sidebar.
|
|
1332
|
+
|
|
1333
|
+
\b
|
|
1334
|
+
Returns: {"remaining": N, "inProgress": N, "endDate": "YYYY-MM-DD"}
|
|
1335
|
+
"""
|
|
1336
|
+
import json
|
|
1337
|
+
|
|
1338
|
+
from pennyfarthing_scripts.sprint.loader import get_all_stories, get_sprint_info
|
|
1339
|
+
|
|
1340
|
+
sprint_data = get_sprint_info()
|
|
1341
|
+
stories = get_all_stories()
|
|
1342
|
+
|
|
1343
|
+
end_date = sprint_data.get("end_date")
|
|
1344
|
+
|
|
1345
|
+
remaining = sum(
|
|
1346
|
+
s.get("points", 0) or 0
|
|
1347
|
+
for s in stories
|
|
1348
|
+
if s.get("status") in ("backlog", "planning", "ready", None)
|
|
1349
|
+
)
|
|
1350
|
+
in_progress = sum(
|
|
1351
|
+
s.get("points", 0) or 0
|
|
1352
|
+
for s in stories
|
|
1353
|
+
if s.get("status") == "in_progress"
|
|
1354
|
+
)
|
|
1355
|
+
|
|
1356
|
+
click.echo(json.dumps({
|
|
1357
|
+
"remaining": remaining,
|
|
1358
|
+
"inProgress": in_progress,
|
|
1359
|
+
"endDate": str(end_date) if end_date else None,
|
|
1360
|
+
}))
|
|
1361
|
+
|
|
1362
|
+
|
|
1363
|
+
# --- Metrics command (replaces sprint-metrics.sh) ---
|
|
1364
|
+
|
|
1365
|
+
@sprint.command()
|
|
1366
|
+
@click.option("--json", "output_json", is_flag=True, help="Output in JSON format")
|
|
1367
|
+
def metrics(output_json: bool):
|
|
1368
|
+
"""Display sprint metrics and progress.
|
|
1369
|
+
|
|
1370
|
+
Shows points, stories, timeline, and velocity tracking.
|
|
1371
|
+
"""
|
|
1372
|
+
import json
|
|
1373
|
+
from datetime import date, datetime
|
|
1374
|
+
|
|
1375
|
+
from pennyfarthing_scripts.sprint.loader import get_all_stories, get_sprint_info
|
|
1376
|
+
|
|
1377
|
+
sprint_data = get_sprint_info()
|
|
1378
|
+
stories = get_all_stories()
|
|
1379
|
+
|
|
1380
|
+
if not sprint_data:
|
|
1381
|
+
click.echo("No sprint data available")
|
|
1382
|
+
return
|
|
1383
|
+
|
|
1384
|
+
sprint_name = sprint_data.get("name", "Unknown")
|
|
1385
|
+
goal = sprint_data.get("goal", "")
|
|
1386
|
+
start_date_str = sprint_data.get("start_date", "")
|
|
1387
|
+
end_date_str = sprint_data.get("end_date", "")
|
|
1388
|
+
|
|
1389
|
+
# Count stories/points by status
|
|
1390
|
+
done_stories = [s for s in stories if s.get("status") in ("done", "completed")]
|
|
1391
|
+
wip_stories = [s for s in stories if s.get("status") == "in_progress"]
|
|
1392
|
+
backlog_stories = [s for s in stories if s.get("status") in ("backlog", "planning", "ready", None)]
|
|
1393
|
+
|
|
1394
|
+
done_pts = sum(s.get("points", 0) or 0 for s in done_stories)
|
|
1395
|
+
wip_pts = sum(s.get("points", 0) or 0 for s in wip_stories)
|
|
1396
|
+
backlog_pts = sum(s.get("points", 0) or 0 for s in backlog_stories)
|
|
1397
|
+
total_pts = done_pts + wip_pts + backlog_pts
|
|
1398
|
+
|
|
1399
|
+
# Date calculations
|
|
1400
|
+
today = date.today()
|
|
1401
|
+
try:
|
|
1402
|
+
start_date = datetime.strptime(str(start_date_str), "%Y-%m-%d").date()
|
|
1403
|
+
end_date = datetime.strptime(str(end_date_str), "%Y-%m-%d").date()
|
|
1404
|
+
except (ValueError, TypeError):
|
|
1405
|
+
start_date = today
|
|
1406
|
+
end_date = today
|
|
1407
|
+
|
|
1408
|
+
total_days = (end_date - start_date).days or 1
|
|
1409
|
+
days_elapsed = max(0, (today - start_date).days)
|
|
1410
|
+
days_remaining = max(0, (end_date - today).days)
|
|
1411
|
+
|
|
1412
|
+
pct_complete = (done_pts * 100 // total_pts) if total_pts > 0 else 0
|
|
1413
|
+
pct_time = (days_elapsed * 100 // total_days) if total_days > 0 else 0
|
|
1414
|
+
|
|
1415
|
+
velocity_target = sprint_data.get("velocity_target", total_pts)
|
|
1416
|
+
expected_pts = (velocity_target * days_elapsed // total_days) if total_days > 0 else 0
|
|
1417
|
+
|
|
1418
|
+
if output_json:
|
|
1419
|
+
click.echo(json.dumps({
|
|
1420
|
+
"sprint": sprint_name,
|
|
1421
|
+
"dates": {
|
|
1422
|
+
"start": str(start_date_str),
|
|
1423
|
+
"end": str(end_date_str),
|
|
1424
|
+
"today": str(today),
|
|
1425
|
+
},
|
|
1426
|
+
"points": {
|
|
1427
|
+
"total": total_pts,
|
|
1428
|
+
"completed": done_pts,
|
|
1429
|
+
"in_progress": wip_pts,
|
|
1430
|
+
"backlog": backlog_pts,
|
|
1431
|
+
"velocity_target": velocity_target,
|
|
1432
|
+
},
|
|
1433
|
+
"stories": {
|
|
1434
|
+
"total": len(stories),
|
|
1435
|
+
"done": len(done_stories),
|
|
1436
|
+
"in_progress": len(wip_stories),
|
|
1437
|
+
"backlog": len(backlog_stories),
|
|
1438
|
+
},
|
|
1439
|
+
"progress": {
|
|
1440
|
+
"percent_complete": pct_complete,
|
|
1441
|
+
"percent_time": pct_time,
|
|
1442
|
+
"days_elapsed": days_elapsed,
|
|
1443
|
+
"days_remaining": days_remaining,
|
|
1444
|
+
"total_days": total_days,
|
|
1445
|
+
},
|
|
1446
|
+
"velocity": {
|
|
1447
|
+
"expected_points": expected_pts,
|
|
1448
|
+
"actual_points": done_pts,
|
|
1449
|
+
"on_track": done_pts >= expected_pts,
|
|
1450
|
+
},
|
|
1451
|
+
}, indent=2))
|
|
1452
|
+
return
|
|
1453
|
+
|
|
1454
|
+
# Human-readable output
|
|
1455
|
+
click.echo("")
|
|
1456
|
+
click.echo(f" Sprint: {sprint_name}")
|
|
1457
|
+
click.echo(f" Goal: {goal}")
|
|
1458
|
+
click.echo("")
|
|
1459
|
+
click.echo(f" Timeline: {start_date_str} to {end_date_str} (Day {days_elapsed}/{total_days}, {days_remaining} remaining)")
|
|
1460
|
+
click.echo("")
|
|
1461
|
+
click.echo(f" Points: {done_pts} done / {wip_pts} WIP / {backlog_pts} backlog = {total_pts} total ({pct_complete}%)")
|
|
1462
|
+
click.echo(f" Stories: {len(done_stories)} done / {len(wip_stories)} WIP / {len(backlog_stories)} backlog = {len(stories)} total")
|
|
1463
|
+
click.echo("")
|
|
1464
|
+
click.echo(f" Velocity: {done_pts}/{expected_pts} expected ({velocity_target} target)")
|
|
1465
|
+
if done_pts >= expected_pts:
|
|
1466
|
+
click.echo(" Status: On track")
|
|
1467
|
+
else:
|
|
1468
|
+
click.echo(" Status: Behind schedule")
|
|
1469
|
+
|
|
1470
|
+
|
|
1471
|
+
# --- Story field command (replaces get-story-field.sh) ---
|
|
1472
|
+
|
|
1473
|
+
@story.command("field")
|
|
1474
|
+
@click.argument("story_id")
|
|
1475
|
+
@click.argument("field_name")
|
|
1476
|
+
def story_field(story_id: str, field_name: str):
|
|
1477
|
+
"""Get a field value from a story.
|
|
1478
|
+
|
|
1479
|
+
\b
|
|
1480
|
+
Arguments:
|
|
1481
|
+
STORY_ID - Story ID (e.g., 79-1 or MSSCI-12345)
|
|
1482
|
+
FIELD_NAME - Field to extract (e.g., workflow, status, points)
|
|
1483
|
+
|
|
1484
|
+
Returns the field value or "null" if not found.
|
|
1485
|
+
"""
|
|
1486
|
+
from pennyfarthing_scripts.sprint.loader import get_story_by_id, get_story_field, load_sprint
|
|
1487
|
+
|
|
1488
|
+
# Default values for common fields
|
|
1489
|
+
defaults = {
|
|
1490
|
+
"workflow": "tdd",
|
|
1491
|
+
"status": "backlog",
|
|
1492
|
+
"repos": "pennyfarthing",
|
|
1493
|
+
}
|
|
1494
|
+
|
|
1495
|
+
# Try get_story_field first (works with epic-story format like "79-1")
|
|
1496
|
+
data = load_sprint()
|
|
1497
|
+
if data:
|
|
1498
|
+
value = get_story_field(data, story_id, field_name)
|
|
1499
|
+
if value is not None:
|
|
1500
|
+
click.echo(str(value))
|
|
1501
|
+
return
|
|
1502
|
+
|
|
1503
|
+
# Fallback: try direct story lookup (works with Jira keys)
|
|
1504
|
+
story = get_story_by_id(story_id)
|
|
1505
|
+
if story:
|
|
1506
|
+
value = story.get(field_name)
|
|
1507
|
+
if value is not None:
|
|
1508
|
+
click.echo(str(value))
|
|
1509
|
+
return
|
|
1510
|
+
|
|
1511
|
+
# Return default or null
|
|
1512
|
+
click.echo(defaults.get(field_name, "null"))
|
|
1513
|
+
|
|
1514
|
+
|
|
1515
|
+
# --- Epic field command (replaces get-epic-field.sh) ---
|
|
1516
|
+
|
|
1517
|
+
@epic.command("field")
|
|
1518
|
+
@click.argument("epic_id")
|
|
1519
|
+
@click.argument("field_name")
|
|
1520
|
+
def epic_field(epic_id: str, field_name: str):
|
|
1521
|
+
"""Get a field value from an epic.
|
|
1522
|
+
|
|
1523
|
+
\b
|
|
1524
|
+
Arguments:
|
|
1525
|
+
EPIC_ID - Epic ID (e.g., epic-79 or 79)
|
|
1526
|
+
FIELD_NAME - Field to extract (e.g., jira, title, status)
|
|
1527
|
+
|
|
1528
|
+
Returns the field value or "null" if not found.
|
|
1529
|
+
"""
|
|
1530
|
+
from pennyfarthing_scripts.sprint.loader import find_epic, load_sprint
|
|
1531
|
+
|
|
1532
|
+
data = load_sprint()
|
|
1533
|
+
if not data:
|
|
1534
|
+
click.echo("null")
|
|
1535
|
+
return
|
|
1536
|
+
|
|
1537
|
+
epic = find_epic(data, epic_id)
|
|
1538
|
+
if not epic:
|
|
1539
|
+
click.echo("null")
|
|
1540
|
+
return
|
|
1541
|
+
|
|
1542
|
+
value = epic.get(field_name)
|
|
1543
|
+
if value is not None:
|
|
1544
|
+
click.echo(str(value).rstrip())
|
|
1545
|
+
else:
|
|
1546
|
+
click.echo("null")
|
|
1547
|
+
|
|
1548
|
+
|
|
1549
|
+
# --- Future command (replaces list-future.sh) ---
|
|
1550
|
+
|
|
1551
|
+
@sprint.command()
|
|
1552
|
+
@click.argument("epic_id", required=False)
|
|
1553
|
+
def future(epic_id: str | None):
|
|
1554
|
+
"""Show future work initiatives and epics.
|
|
1555
|
+
|
|
1556
|
+
\b
|
|
1557
|
+
Arguments:
|
|
1558
|
+
EPIC_ID - Optional epic ID to show detailed stories (e.g., epic-55)
|
|
1559
|
+
|
|
1560
|
+
\b
|
|
1561
|
+
Examples:
|
|
1562
|
+
pf sprint future # Show all initiatives
|
|
1563
|
+
pf sprint future epic-55 # Show stories for specific epic
|
|
1564
|
+
"""
|
|
1565
|
+
import yaml
|
|
1566
|
+
|
|
1567
|
+
from pennyfarthing_scripts.common.config import get_project_root
|
|
1568
|
+
|
|
1569
|
+
root = get_project_root()
|
|
1570
|
+
sprint_dir = root / "sprint"
|
|
1571
|
+
|
|
1572
|
+
init_files = sorted(sprint_dir.glob("initiative-*.yaml"))
|
|
1573
|
+
if not init_files:
|
|
1574
|
+
click.echo("No future initiatives found.")
|
|
1575
|
+
return
|
|
1576
|
+
|
|
1577
|
+
# If specific epic requested, show detailed view
|
|
1578
|
+
if epic_id:
|
|
1579
|
+
_show_future_epic_detail(epic_id, init_files, sprint_dir)
|
|
1580
|
+
return
|
|
1581
|
+
|
|
1582
|
+
# Default: show initiative summary
|
|
1583
|
+
click.echo("# Future Work - Available for Promotion")
|
|
1584
|
+
click.echo("")
|
|
1585
|
+
|
|
1586
|
+
total_epics = 0
|
|
1587
|
+
total_points = 0
|
|
1588
|
+
|
|
1589
|
+
for init_file in init_files:
|
|
1590
|
+
with open(init_file) as f:
|
|
1591
|
+
init_data = yaml.safe_load(f.read())
|
|
1592
|
+
if not init_data:
|
|
1593
|
+
continue
|
|
1594
|
+
|
|
1595
|
+
init_name = init_data.get("name", init_file.stem)
|
|
1596
|
+
init_status = init_data.get("status", "planning")
|
|
1597
|
+
blocked_by = init_data.get("blocked_by")
|
|
1598
|
+
init_points = init_data.get("total_points", 0)
|
|
1599
|
+
|
|
1600
|
+
if init_status == "ready":
|
|
1601
|
+
status_tag = "[READY]"
|
|
1602
|
+
elif blocked_by:
|
|
1603
|
+
status_tag = "[BLOCKED]"
|
|
1604
|
+
else:
|
|
1605
|
+
status_tag = f"[{init_status}]"
|
|
1606
|
+
|
|
1607
|
+
click.echo(f"## {init_name} {status_tag}")
|
|
1608
|
+
click.echo(f"**Total:** {init_points} points")
|
|
1609
|
+
if blocked_by:
|
|
1610
|
+
click.echo(f"**Blocked:** {blocked_by}")
|
|
1611
|
+
click.echo("")
|
|
1612
|
+
|
|
1613
|
+
click.echo("| Epic | Title | Pts | Pri | Status |")
|
|
1614
|
+
click.echo("|------|-------|-----|-----|--------|")
|
|
1615
|
+
|
|
1616
|
+
epics = init_data.get("epics", [])
|
|
1617
|
+
for e in epics:
|
|
1618
|
+
edata = _resolve_epic_ref(e, sprint_dir)
|
|
1619
|
+
if not edata:
|
|
1620
|
+
continue
|
|
1621
|
+
eid = edata.get("id", "?")
|
|
1622
|
+
etitle = edata.get("title", "?")
|
|
1623
|
+
if len(etitle) > 40:
|
|
1624
|
+
etitle = etitle[:37] + "..."
|
|
1625
|
+
epts = edata.get("points", "?")
|
|
1626
|
+
epri = edata.get("priority", "P2")
|
|
1627
|
+
estat = edata.get("status", "planning")
|
|
1628
|
+
click.echo(f"| {eid} | {etitle} | {epts} | {epri} | {estat} |")
|
|
1629
|
+
total_epics += 1
|
|
1630
|
+
total_points += edata.get("points", 0) or 0
|
|
1631
|
+
|
|
1632
|
+
click.echo("")
|
|
1633
|
+
|
|
1634
|
+
click.echo("---")
|
|
1635
|
+
click.echo(f"**Summary:** {total_epics} epics, {total_points} points total")
|
|
1636
|
+
click.echo("")
|
|
1637
|
+
click.echo("To see epic details: `pf sprint future epic-55`")
|
|
1638
|
+
click.echo("To promote an epic: `pf sprint epic promote epic-55`")
|
|
1639
|
+
|
|
1640
|
+
|
|
1641
|
+
def _resolve_epic_ref(ref, sprint_dir) -> dict | None:
|
|
1642
|
+
"""Resolve an epic reference (string ref or inline dict) to a dict."""
|
|
1643
|
+
import yaml
|
|
1644
|
+
|
|
1645
|
+
if isinstance(ref, dict):
|
|
1646
|
+
return ref
|
|
1647
|
+
if isinstance(ref, str):
|
|
1648
|
+
shard = _epic_shard_path(sprint_dir, ref)
|
|
1649
|
+
if shard.exists():
|
|
1650
|
+
with open(shard) as f:
|
|
1651
|
+
return yaml.safe_load(f.read())
|
|
1652
|
+
return None
|
|
1653
|
+
|
|
1654
|
+
|
|
1655
|
+
def _show_future_epic_detail(epic_id: str, init_files, sprint_dir):
|
|
1656
|
+
"""Show detailed view of a specific future epic."""
|
|
1657
|
+
import yaml
|
|
1658
|
+
|
|
1659
|
+
for init_file in init_files:
|
|
1660
|
+
with open(init_file) as f:
|
|
1661
|
+
init_data = yaml.safe_load(f.read())
|
|
1662
|
+
if not init_data:
|
|
1663
|
+
continue
|
|
1664
|
+
|
|
1665
|
+
for e in init_data.get("epics", []):
|
|
1666
|
+
edata = _resolve_epic_ref(e, sprint_dir)
|
|
1667
|
+
if not edata:
|
|
1668
|
+
continue
|
|
1669
|
+
eid = str(edata.get("id", ""))
|
|
1670
|
+
if epic_id not in (eid, eid.replace("epic-", ""), f"epic-{epic_id}"):
|
|
1671
|
+
continue
|
|
1672
|
+
|
|
1673
|
+
click.echo(f"# Epic Details: {eid}")
|
|
1674
|
+
click.echo("")
|
|
1675
|
+
click.echo(f"**Title:** {edata.get('title', '?')}")
|
|
1676
|
+
click.echo(f"**Points:** {edata.get('points', '?')} | **Priority:** {edata.get('priority', 'P2')} | **Status:** {edata.get('status', 'planning')}")
|
|
1677
|
+
click.echo("")
|
|
1678
|
+
desc = edata.get("description", "No description")
|
|
1679
|
+
if desc:
|
|
1680
|
+
click.echo("**Description:**")
|
|
1681
|
+
for line in str(desc).strip().split("\n")[:5]:
|
|
1682
|
+
click.echo(line)
|
|
1683
|
+
click.echo("")
|
|
1684
|
+
|
|
1685
|
+
stories = edata.get("stories", [])
|
|
1686
|
+
if stories:
|
|
1687
|
+
click.echo("## Stories")
|
|
1688
|
+
click.echo("")
|
|
1689
|
+
click.echo("| ID | Title | Pts | Pri | Status |")
|
|
1690
|
+
click.echo("|----|-------|-----|-----|--------|")
|
|
1691
|
+
for s in stories:
|
|
1692
|
+
stitle = s.get("title", "?")
|
|
1693
|
+
if len(stitle) > 45:
|
|
1694
|
+
stitle = stitle[:42] + "..."
|
|
1695
|
+
click.echo(f"| {s.get('id', '?')} | {stitle} | {s.get('points', '?')} | {s.get('priority', 'P1')} | {s.get('status', 'planning')} |")
|
|
1696
|
+
click.echo("")
|
|
1697
|
+
|
|
1698
|
+
click.echo("---")
|
|
1699
|
+
click.echo(f"To promote this epic: `pf sprint epic promote {eid}`")
|
|
1700
|
+
return
|
|
1701
|
+
|
|
1702
|
+
raise click.ClickException(f"Epic {epic_id} not found in future initiatives")
|
|
1703
|
+
|
|
1704
|
+
|
|
1705
|
+
# --- New sprint command (replaces new-sprint.sh) ---
|
|
1706
|
+
|
|
1707
|
+
@sprint.command("new")
|
|
1708
|
+
@click.argument("sprint_yyww")
|
|
1709
|
+
@click.argument("jira_id", type=int)
|
|
1710
|
+
@click.argument("start_date")
|
|
1711
|
+
@click.argument("end_date")
|
|
1712
|
+
@click.argument("goal")
|
|
1713
|
+
def new_sprint(sprint_yyww: str, jira_id: int, start_date: str, end_date: str, goal: str):
|
|
1714
|
+
"""Initialize a new sprint.
|
|
1715
|
+
|
|
1716
|
+
\b
|
|
1717
|
+
Arguments:
|
|
1718
|
+
SPRINT_YYWW Sprint identifier in YYWW format (e.g., 2607)
|
|
1719
|
+
JIRA_ID Jira sprint ID number (e.g., 278)
|
|
1720
|
+
START_DATE Sprint start date YYYY-MM-DD
|
|
1721
|
+
END_DATE Sprint end date YYYY-MM-DD
|
|
1722
|
+
GOAL Sprint goal (quoted string)
|
|
1723
|
+
|
|
1724
|
+
\b
|
|
1725
|
+
Examples:
|
|
1726
|
+
pf sprint new 2607 278 2026-02-16 2026-03-01 "Performance and polish"
|
|
1727
|
+
"""
|
|
1728
|
+
from pennyfarthing_scripts.common.config import get_project_root
|
|
1729
|
+
|
|
1730
|
+
root = get_project_root()
|
|
1731
|
+
sprint_file = root / "sprint" / "current-sprint.yaml"
|
|
1732
|
+
archive_file = root / "sprint" / "archive" / f"sprint-{sprint_yyww}-completed.yaml"
|
|
1733
|
+
|
|
1734
|
+
# Warn if current sprint is active
|
|
1735
|
+
if sprint_file.exists():
|
|
1736
|
+
import yaml
|
|
1737
|
+
|
|
1738
|
+
with open(sprint_file) as f:
|
|
1739
|
+
existing = yaml.safe_load(f.read())
|
|
1740
|
+
if existing and existing.get("sprint", {}).get("status") == "active":
|
|
1741
|
+
click.echo("Warning: Current sprint is still active!")
|
|
1742
|
+
click.echo("Current sprint file will be overwritten.")
|
|
1743
|
+
if not click.confirm("Continue?"):
|
|
1744
|
+
click.echo("Aborted.")
|
|
1745
|
+
return
|
|
1746
|
+
|
|
1747
|
+
# Create sprint file using write_sprint for consistency
|
|
1748
|
+
from pennyfarthing_scripts.sprint.yaml_io import write_sprint
|
|
1749
|
+
|
|
1750
|
+
sprint_data = {
|
|
1751
|
+
"sprint": {
|
|
1752
|
+
"name": f"TO Sprint {sprint_yyww}",
|
|
1753
|
+
"jira_sprint_id": jira_id,
|
|
1754
|
+
"jira_sprint_name": f"TO Sprint {sprint_yyww}",
|
|
1755
|
+
"goal": goal,
|
|
1756
|
+
"start_date": start_date,
|
|
1757
|
+
"end_date": end_date,
|
|
1758
|
+
"status": "active",
|
|
1759
|
+
},
|
|
1760
|
+
"epics": [],
|
|
1761
|
+
}
|
|
1762
|
+
write_sprint(sprint_file, sprint_data)
|
|
1763
|
+
click.echo(f"Created {sprint_file}")
|
|
1764
|
+
|
|
1765
|
+
# Create archive file
|
|
1766
|
+
from datetime import date
|
|
1767
|
+
|
|
1768
|
+
archive_content = f"""# Sprint TO Sprint {sprint_yyww} - Completed Stories
|
|
1769
|
+
# Jira Sprint ID: {jira_id}
|
|
1770
|
+
# Archived: {date.today()}
|
|
1771
|
+
|
|
1772
|
+
sprint:
|
|
1773
|
+
name: "TO Sprint {sprint_yyww}"
|
|
1774
|
+
jira_sprint_id: {jira_id}
|
|
1775
|
+
jira_sprint_name: "TO Sprint {sprint_yyww}"
|
|
1776
|
+
goal: {goal}
|
|
1777
|
+
|
|
1778
|
+
completed:
|
|
1779
|
+
# Completed stories will be appended here by pf sprint archive
|
|
1780
|
+
"""
|
|
1781
|
+
archive_file.parent.mkdir(parents=True, exist_ok=True)
|
|
1782
|
+
archive_file.write_text(archive_content)
|
|
1783
|
+
click.echo(f"Created {archive_file}")
|
|
1784
|
+
|
|
1785
|
+
click.echo("")
|
|
1786
|
+
click.echo(f"New sprint initialized:")
|
|
1787
|
+
click.echo(f" Name: TO Sprint {sprint_yyww}")
|
|
1788
|
+
click.echo(f" Jira ID: {jira_id}")
|
|
1789
|
+
click.echo(f" Dates: {start_date} to {end_date}")
|
|
1790
|
+
click.echo(f" Goal: {goal}")
|
|
1791
|
+
click.echo("")
|
|
1792
|
+
click.echo("Next steps:")
|
|
1793
|
+
click.echo(" 1. Add epics: pf sprint epic promote <epic-id>")
|
|
1794
|
+
click.echo(" 2. Check status: pf sprint status")
|
|
1795
|
+
|
|
1796
|
+
|
|
1797
|
+
# --- Standalone command ---
|
|
1798
|
+
|
|
1799
|
+
@sprint.command()
|
|
1800
|
+
@click.argument("title", required=False)
|
|
1801
|
+
@click.argument("points", required=False, type=int)
|
|
1802
|
+
def standalone(title: str | None, points: int | None):
|
|
1803
|
+
"""Wrap current changes into a standalone Jira story, branch, PR, and merge.
|
|
1804
|
+
|
|
1805
|
+
This is an agent-executed workflow. Use /standalone to run it interactively.
|
|
1806
|
+
"""
|
|
1807
|
+
click.echo("The standalone command is an agent-executed workflow.")
|
|
1808
|
+
click.echo("Use /standalone to run it interactively with full agent support.")
|
|
1809
|
+
|
|
1810
|
+
|
|
1811
|
+
# --- Backwards compatibility aliases (hidden) ---
|
|
1812
|
+
|
|
1813
|
+
# Hidden alias: sprint story-add -> sprint story add
|
|
1814
|
+
sprint.add_command(story_add_command, "story-add")
|
|
1815
|
+
sprint.commands["story-add"].hidden = True
|
|
1816
|
+
|
|
1817
|
+
# Hidden alias: sprint story-update -> sprint story update
|
|
359
1818
|
sprint.add_command(story_update_command, "story-update")
|
|
1819
|
+
sprint.commands["story-update"].hidden = True
|
|
1820
|
+
|
|
1821
|
+
# Hidden alias: sprint archive-epic -> sprint epic archive
|
|
1822
|
+
@sprint.command("archive-epic", hidden=True)
|
|
1823
|
+
@click.argument("epic_id", required=False)
|
|
1824
|
+
@click.option("--dry-run", is_flag=True)
|
|
1825
|
+
@click.option("--jira", is_flag=True)
|
|
1826
|
+
def archive_epic_compat(epic_id, dry_run, jira):
|
|
1827
|
+
"""(Deprecated) Use 'sprint epic archive' instead."""
|
|
1828
|
+
ctx = click.get_current_context()
|
|
1829
|
+
ctx.invoke(epic_archive, epic_id=epic_id, dry_run=dry_run, jira=jira)
|
|
1830
|
+
|
|
1831
|
+
# Hidden alias: sprint import-epic -> sprint epic import
|
|
1832
|
+
@sprint.command("import-epic", hidden=True)
|
|
1833
|
+
@click.argument("epics_file")
|
|
1834
|
+
@click.argument("initiative_name", required=False)
|
|
1835
|
+
@click.option("--marker", default="imported")
|
|
1836
|
+
@click.option("--dry-run", is_flag=True)
|
|
1837
|
+
def import_epic_compat(epics_file, initiative_name, marker, dry_run):
|
|
1838
|
+
"""(Deprecated) Use 'sprint epic import' instead."""
|
|
1839
|
+
ctx = click.get_current_context()
|
|
1840
|
+
ctx.invoke(epic_import, epics_file=epics_file, initiative_name=initiative_name, marker=marker, dry_run=dry_run)
|
|
1841
|
+
|
|
1842
|
+
# Hidden alias: sprint remove-epic -> sprint epic remove
|
|
1843
|
+
@sprint.command("remove-epic", hidden=True)
|
|
1844
|
+
@click.argument("epic_id")
|
|
1845
|
+
@click.option("--dry-run", is_flag=True)
|
|
1846
|
+
def remove_epic_compat(epic_id, dry_run):
|
|
1847
|
+
"""(Deprecated) Use 'sprint epic remove' instead."""
|
|
1848
|
+
ctx = click.get_current_context()
|
|
1849
|
+
ctx.invoke(epic_remove, epic_id=epic_id, dry_run=dry_run)
|
|
1850
|
+
|
|
1851
|
+
# Hidden alias: sprint epic-add -> sprint epic add
|
|
1852
|
+
sprint.add_command(epic_add_command, "epic-add")
|
|
1853
|
+
sprint.commands["epic-add"].hidden = True
|
|
1854
|
+
|
|
1855
|
+
|
|
1856
|
+
# Register validate command from validate_cmd module
|
|
1857
|
+
from pennyfarthing_scripts.sprint.validate_cmd import validate_command
|
|
1858
|
+
|
|
1859
|
+
sprint.add_command(validate_command)
|
|
360
1860
|
|
|
361
1861
|
|
|
362
1862
|
# For backwards compatibility when running as module
|