workstream-cli 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- workstream/ARCHITECTURE.md +89 -0
- workstream/__init__.py +8 -0
- workstream/cli.py +136 -0
- workstream/commands/__init__.py +0 -0
- workstream/commands/backfill.py +139 -0
- workstream/commands/block.py +93 -0
- workstream/commands/checkin.py +51 -0
- workstream/commands/cron.py +119 -0
- workstream/commands/focus_cmd.py +273 -0
- workstream/commands/idea.py +172 -0
- workstream/commands/index.py +89 -0
- workstream/commands/init.py +567 -0
- workstream/commands/inspect_cmd.py +354 -0
- workstream/commands/list_cmd.py +99 -0
- workstream/commands/nest.py +108 -0
- workstream/commands/new.py +95 -0
- workstream/commands/next_cmd.py +333 -0
- workstream/commands/report.py +190 -0
- workstream/commands/resume.py +145 -0
- workstream/commands/review.py +227 -0
- workstream/commands/serve.py +23 -0
- workstream/commands/setup.py +178 -0
- workstream/commands/show.py +123 -0
- workstream/commands/snooze.py +117 -0
- workstream/commands/stale.py +116 -0
- workstream/commands/sweep.py +1753 -0
- workstream/commands/tree.py +105 -0
- workstream/commands/update_status.py +117 -0
- workstream/config.py +322 -0
- workstream/extensions/__init__.py +0 -0
- workstream/extensions/workstream.ts +633 -0
- workstream/focus_artifact.py +157 -0
- workstream/git.py +194 -0
- workstream/harness.py +49 -0
- workstream/llm.py +78 -0
- workstream/markdown.py +501 -0
- workstream/models.py +274 -0
- workstream/plan_index.py +88 -0
- workstream/provisioning.py +196 -0
- workstream/repo_discovery.py +158 -0
- workstream/review_artifact.py +96 -0
- workstream/scripts/migrate_statuses.py +120 -0
- workstream/skills/__init__.py +0 -0
- workstream/skills/workstream_context/SKILL.md +75 -0
- workstream/skills/workstream_context/__init__.py +0 -0
- workstream/skills/workstream_focus/SKILL.md +141 -0
- workstream/skills/workstream_init/SKILL.md +86 -0
- workstream/skills/workstream_review/SKILL.md +224 -0
- workstream/skills/workstream_sweep/SKILL.md +178 -0
- workstream/sweep_state.py +93 -0
- workstream/templates/dashboard.html +382 -0
- workstream/templates/detail.html +360 -0
- workstream/templates/plan.html +210 -0
- workstream/test/__init__.py +0 -0
- workstream/test/conftest.py +221 -0
- workstream/test/fixtures/sample_sprint_note.md +10 -0
- workstream/test/fixtures/sample_workstream.md +41 -0
- workstream/test/test_backfill.py +180 -0
- workstream/test/test_batch_writeback.py +81 -0
- workstream/test/test_commands.py +938 -0
- workstream/test/test_config.py +54 -0
- workstream/test/test_focus_artifact.py +211 -0
- workstream/test/test_git.py +88 -0
- workstream/test/test_heuristics.py +136 -0
- workstream/test/test_hierarchy.py +231 -0
- workstream/test/test_init.py +452 -0
- workstream/test/test_inspect.py +143 -0
- workstream/test/test_llm.py +78 -0
- workstream/test/test_markdown.py +626 -0
- workstream/test/test_models.py +506 -0
- workstream/test/test_next.py +206 -0
- workstream/test/test_plan_index.py +83 -0
- workstream/test/test_provisioning.py +270 -0
- workstream/test/test_repo_discovery.py +181 -0
- workstream/test/test_resume.py +71 -0
- workstream/test/test_sweep.py +1196 -0
- workstream/test/test_sweep_state.py +86 -0
- workstream/test/test_thoughts.py +516 -0
- workstream/test/test_web.py +606 -0
- workstream/thoughts.py +505 -0
- workstream/web.py +444 -0
- workstream_cli-0.0.1.dist-info/LICENSE +21 -0
- workstream_cli-0.0.1.dist-info/METADATA +93 -0
- workstream_cli-0.0.1.dist-info/RECORD +86 -0
- workstream_cli-0.0.1.dist-info/WHEEL +4 -0
- workstream_cli-0.0.1.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,1196 @@
|
|
|
1
|
+
"""Tests for workstream.commands.sweep — plan scanning and workstream update."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from textwrap import dedent
|
|
7
|
+
|
|
8
|
+
import pytest
|
|
9
|
+
|
|
10
|
+
from workstream.commands.sweep import (
|
|
11
|
+
scan_repo_plans,
|
|
12
|
+
_match_plans_to_workstream,
|
|
13
|
+
_auto_associate_repos,
|
|
14
|
+
_build_plan_review_prompt,
|
|
15
|
+
_parse_plan_review_response,
|
|
16
|
+
_repo_matches_slug,
|
|
17
|
+
_recompute_activity_dates,
|
|
18
|
+
_auto_classify_plans,
|
|
19
|
+
)
|
|
20
|
+
from workstream.config import Config, RepoConfig
|
|
21
|
+
from workstream.markdown import load_workstream, parse_frontmatter, save_workstream
|
|
22
|
+
from workstream.models import BranchRef, PlanRef
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# ---------------------------------------------------------------------------
|
|
26
|
+
# Fixtures
|
|
27
|
+
# ---------------------------------------------------------------------------
|
|
28
|
+
|
|
29
|
+
@pytest.fixture()
|
|
30
|
+
def repo_with_plans(tmp_path: Path) -> Path:
|
|
31
|
+
"""Create a fake repo directory with .plans/ containing 2 plan files."""
|
|
32
|
+
repo = tmp_path / 'myapp'
|
|
33
|
+
repo.mkdir()
|
|
34
|
+
plans_dir = repo / '.plans'
|
|
35
|
+
plans_dir.mkdir()
|
|
36
|
+
|
|
37
|
+
(plans_dir / 'api-v2-migration.md').write_text(dedent("""\
|
|
38
|
+
---
|
|
39
|
+
title: API v2 Migration
|
|
40
|
+
status: active
|
|
41
|
+
workstream: api-redesign
|
|
42
|
+
---
|
|
43
|
+
|
|
44
|
+
Migration plan content.
|
|
45
|
+
"""), encoding='utf-8')
|
|
46
|
+
|
|
47
|
+
(plans_dir / 'perf-improvements.md').write_text(dedent("""\
|
|
48
|
+
---
|
|
49
|
+
title: Performance Improvements
|
|
50
|
+
status: draft
|
|
51
|
+
---
|
|
52
|
+
|
|
53
|
+
Perf plan content.
|
|
54
|
+
"""), encoding='utf-8')
|
|
55
|
+
|
|
56
|
+
return repo
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@pytest.fixture()
|
|
60
|
+
def sweep_workstreams_dir(tmp_path: Path) -> Path:
|
|
61
|
+
"""Create a workstreams dir with one workstream that should match plans."""
|
|
62
|
+
ws_dir = tmp_path / 'workstreams'
|
|
63
|
+
ws_dir.mkdir()
|
|
64
|
+
|
|
65
|
+
(ws_dir / '2026-03-15-a1b2c3d4e5-api-redesign.md').write_text(dedent("""\
|
|
66
|
+
---
|
|
67
|
+
id: 2026-03-15-a1b2c3d4e5
|
|
68
|
+
title: API Redesign
|
|
69
|
+
status: active
|
|
70
|
+
size: week
|
|
71
|
+
repos:
|
|
72
|
+
- myapp
|
|
73
|
+
created: '2026-03-15'
|
|
74
|
+
updated: '2026-03-25'
|
|
75
|
+
---
|
|
76
|
+
|
|
77
|
+
# API Redesign
|
|
78
|
+
|
|
79
|
+
## Thread
|
|
80
|
+
### 2026-03-25
|
|
81
|
+
Working on it.
|
|
82
|
+
|
|
83
|
+
## Next
|
|
84
|
+
- Finish migration
|
|
85
|
+
"""), encoding='utf-8')
|
|
86
|
+
|
|
87
|
+
return ws_dir
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
# ---------------------------------------------------------------------------
|
|
91
|
+
# Tests
|
|
92
|
+
# ---------------------------------------------------------------------------
|
|
93
|
+
|
|
94
|
+
def test_scan_repo_plans(repo_with_plans: Path):
|
|
95
|
+
"""scan_repo_plans finds .plans/*.md and parses their frontmatter."""
|
|
96
|
+
results = scan_repo_plans(repo_with_plans / '.plans')
|
|
97
|
+
assert len(results) == 2
|
|
98
|
+
|
|
99
|
+
paths = [r['path'] for r in results]
|
|
100
|
+
assert 'api-v2-migration.md' in paths
|
|
101
|
+
assert 'perf-improvements.md' in paths
|
|
102
|
+
|
|
103
|
+
# Verify frontmatter was parsed
|
|
104
|
+
api_plan = next(r for r in results if r['path'] == 'api-v2-migration.md')
|
|
105
|
+
assert api_plan['meta']['title'] == 'API v2 Migration'
|
|
106
|
+
assert api_plan['meta']['status'] == 'active'
|
|
107
|
+
assert api_plan['meta']['workstream'] == 'api-redesign'
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def test_scan_repo_no_plans_dir(tmp_path: Path):
|
|
111
|
+
"""scan_repo_plans returns [] when .plans/ doesn't exist."""
|
|
112
|
+
repo = tmp_path / 'empty-repo'
|
|
113
|
+
repo.mkdir()
|
|
114
|
+
assert scan_repo_plans(repo / '.plans') == []
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def test_match_plans_by_workstream_field(repo_with_plans: Path):
|
|
118
|
+
"""Plans with a workstream field matching the slug get matched."""
|
|
119
|
+
plans = scan_repo_plans(repo_with_plans / '.plans')
|
|
120
|
+
matched = _match_plans_to_workstream(
|
|
121
|
+
'API Redesign', 'api-redesign', '2026-03-15-a1b2c3d4e5',
|
|
122
|
+
['myapp'], 'myapp', plans,
|
|
123
|
+
)
|
|
124
|
+
# Both match: one by workstream field (slug), one by repo name fallback
|
|
125
|
+
assert len(matched) == 2
|
|
126
|
+
assert all(isinstance(p, PlanRef) for p in matched)
|
|
127
|
+
api_ref = next(p for p in matched if p.path == 'api-v2-migration.md')
|
|
128
|
+
assert api_ref.repo == 'myapp'
|
|
129
|
+
assert api_ref.status == 'active'
|
|
130
|
+
|
|
131
|
+
def test_sweep_merges_explicit_and_discovered_repos(
|
|
132
|
+
sweep_workstreams_dir: Path, repo_with_plans: Path, tmp_path: Path, monkeypatch
|
|
133
|
+
):
|
|
134
|
+
"""Explicit config.repos and auto-discovered repo_dirs repos are both scanned.
|
|
135
|
+
|
|
136
|
+
Previously, having any entries in config.repos would skip auto-discovery
|
|
137
|
+
entirely, causing plans in non-explicit repos to be invisible.
|
|
138
|
+
"""
|
|
139
|
+
# Create a second repo that will be "auto-discovered"
|
|
140
|
+
discovered_repo = tmp_path / 'repos' / 'other-app'
|
|
141
|
+
discovered_repo.mkdir(parents=True)
|
|
142
|
+
(discovered_repo / '.git').mkdir() # fake git repo for discovery
|
|
143
|
+
plans_dir = discovered_repo / '.plans'
|
|
144
|
+
plans_dir.mkdir()
|
|
145
|
+
(plans_dir / 'extra-plan.md').write_text(
|
|
146
|
+
'---\ntitle: Extra Plan\nstatus: draft\n---\n\nExtra content.',
|
|
147
|
+
encoding='utf-8',
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
# Config has explicit repo + repo_dirs for discovery
|
|
151
|
+
config = Config(
|
|
152
|
+
workstreams_dir=str(sweep_workstreams_dir),
|
|
153
|
+
repos=[RepoConfig(path=str(repo_with_plans), name='myapp')],
|
|
154
|
+
repo_dirs=[str(tmp_path / 'repos')],
|
|
155
|
+
)
|
|
156
|
+
monkeypatch.setattr('workstream.commands.sweep._scan_repo_branches', lambda p: [])
|
|
157
|
+
|
|
158
|
+
from workstream.commands.sweep import _sweep_handler
|
|
159
|
+
_sweep_handler(config=config, discover=False)
|
|
160
|
+
|
|
161
|
+
# Reload the workstream — it should have plans from both repos.
|
|
162
|
+
# The explicit repo (myapp) contributes 2 plans matched by ws.repos.
|
|
163
|
+
# The discovered repo (other-app) has no workstream match, so it won't
|
|
164
|
+
# appear in the workstream's plans section, but it SHOULD be scanned.
|
|
165
|
+
# Verify by checking that sweep didn't error and the explicit repo still works.
|
|
166
|
+
ws_file = sweep_workstreams_dir / '2026-03-15-a1b2c3d4e5-api-redesign.md'
|
|
167
|
+
ws = load_workstream(ws_file)
|
|
168
|
+
assert len(ws.plans) == 2 # myapp's 2 plans still matched
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def test_sweep_updates_plans_section(
|
|
173
|
+
sweep_workstreams_dir: Path, repo_with_plans: Path, tmp_path: Path, monkeypatch
|
|
174
|
+
):
|
|
175
|
+
"""Full sweep logic: load ws, scan repo, match plans, save updated file."""
|
|
176
|
+
# Set up config to point at our fixtures
|
|
177
|
+
config = Config(
|
|
178
|
+
workstreams_dir=str(sweep_workstreams_dir),
|
|
179
|
+
repos=[RepoConfig(path=str(repo_with_plans), name='myapp')],
|
|
180
|
+
)
|
|
181
|
+
# Stub out branch scanning (no real git repo)
|
|
182
|
+
monkeypatch.setattr('workstream.commands.sweep._scan_repo_branches', lambda p: [])
|
|
183
|
+
|
|
184
|
+
from workstream.commands.sweep import _sweep_handler
|
|
185
|
+
_sweep_handler(config=config, discover=False)
|
|
186
|
+
|
|
187
|
+
# Reload the workstream and verify Plans section was updated
|
|
188
|
+
ws_file = sweep_workstreams_dir / '2026-03-15-a1b2c3d4e5-api-redesign.md'
|
|
189
|
+
ws = load_workstream(ws_file)
|
|
190
|
+
assert len(ws.plans) == 2
|
|
191
|
+
plan_paths = [p.path for p in ws.plans]
|
|
192
|
+
assert 'api-v2-migration.md' in plan_paths
|
|
193
|
+
assert 'perf-improvements.md' in plan_paths
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def test_sweep_idempotent(
|
|
197
|
+
sweep_workstreams_dir: Path, repo_with_plans: Path, tmp_path: Path, monkeypatch
|
|
198
|
+
):
|
|
199
|
+
"""Running sweep twice with no changes produces 0 updates on the second run."""
|
|
200
|
+
config = Config(
|
|
201
|
+
workstreams_dir=str(sweep_workstreams_dir),
|
|
202
|
+
repos=[RepoConfig(path=str(repo_with_plans), name='myapp')],
|
|
203
|
+
)
|
|
204
|
+
monkeypatch.setattr('workstream.commands.sweep._scan_repo_branches', lambda p: [])
|
|
205
|
+
|
|
206
|
+
from workstream.commands.sweep import _sweep_handler
|
|
207
|
+
|
|
208
|
+
# First sweep: should update (plans discovered)
|
|
209
|
+
_sweep_handler(config=config, discover=False)
|
|
210
|
+
|
|
211
|
+
# Second sweep: nothing changed, should be 0 updates
|
|
212
|
+
# Capture output to verify
|
|
213
|
+
import io, sys
|
|
214
|
+
buf = io.StringIO()
|
|
215
|
+
monkeypatch.setattr(sys, 'stdout', buf)
|
|
216
|
+
_sweep_handler(config=config, discover=False)
|
|
217
|
+
output = buf.getvalue()
|
|
218
|
+
|
|
219
|
+
assert 'Sweep complete: 0 workstream(s) updated.' in output, (
|
|
220
|
+
f'Expected 0 updates on idempotent sweep, got:\n{output}'
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
# ---------------------------------------------------------------------------
|
|
225
|
+
# Phase 1: HTML comment status markers
|
|
226
|
+
# ---------------------------------------------------------------------------
|
|
227
|
+
|
|
228
|
+
def test_scan_repo_plans_html_comment_status(tmp_path: Path):
|
|
229
|
+
"""Legacy plans with <!-- STATUS: FINALIZED --> get meta['status'] parsed."""
|
|
230
|
+
repo = tmp_path / 'legacy-repo'
|
|
231
|
+
repo.mkdir()
|
|
232
|
+
plans_dir = repo / '.plans'
|
|
233
|
+
plans_dir.mkdir()
|
|
234
|
+
|
|
235
|
+
(plans_dir / 'old-plan.md').write_text(
|
|
236
|
+
'<!-- STATUS: FINALIZED 2025-12-15T10:30:00Z -->\n\n# Old Plan\n\nContent here.',
|
|
237
|
+
encoding='utf-8',
|
|
238
|
+
)
|
|
239
|
+
results = scan_repo_plans(plans_dir)
|
|
240
|
+
assert len(results) == 1
|
|
241
|
+
assert results[0]['meta']['status'] == 'active' # finalized → active
|
|
242
|
+
assert results[0]['meta']['finalized_at'] == '2025-12-15T10:30:00Z'
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
def test_scan_repo_plans_html_comment_no_timestamp(tmp_path: Path):
|
|
246
|
+
"""HTML comment status without timestamp works."""
|
|
247
|
+
repo = tmp_path / 'repo'
|
|
248
|
+
repo.mkdir()
|
|
249
|
+
plans_dir = repo / '.plans'
|
|
250
|
+
plans_dir.mkdir()
|
|
251
|
+
|
|
252
|
+
(plans_dir / 'draft.md').write_text(
|
|
253
|
+
'<!-- STATUS: DRAFT -->\n\n# Draft Plan',
|
|
254
|
+
encoding='utf-8',
|
|
255
|
+
)
|
|
256
|
+
results = scan_repo_plans(plans_dir)
|
|
257
|
+
assert results[0]['meta']['status'] == 'draft'
|
|
258
|
+
assert 'finalized_at' not in results[0]['meta']
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def test_scan_repo_plans_html_comment_finalized_maps_to_active(tmp_path: Path):
|
|
262
|
+
"""FINALIZED in HTML comment is mapped to 'active' (approved, not implemented)."""
|
|
263
|
+
repo = tmp_path / 'repo'
|
|
264
|
+
repo.mkdir()
|
|
265
|
+
plans_dir = repo / '.plans'
|
|
266
|
+
plans_dir.mkdir()
|
|
267
|
+
|
|
268
|
+
(plans_dir / 'plan.md').write_text(
|
|
269
|
+
'<!-- STATUS: FINALIZED -->\n\n# Finalized Plan',
|
|
270
|
+
encoding='utf-8',
|
|
271
|
+
)
|
|
272
|
+
results = scan_repo_plans(plans_dir)
|
|
273
|
+
assert results[0]['meta']['status'] == 'active'
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
def test_scan_repo_plans_no_status(tmp_path: Path):
|
|
277
|
+
"""Plans with neither frontmatter nor HTML comment → empty meta."""
|
|
278
|
+
repo = tmp_path / 'repo'
|
|
279
|
+
repo.mkdir()
|
|
280
|
+
plans_dir = repo / '.plans'
|
|
281
|
+
plans_dir.mkdir()
|
|
282
|
+
|
|
283
|
+
(plans_dir / 'bare.md').write_text('# Just a Plan\n\nNo status markers.', encoding='utf-8')
|
|
284
|
+
results = scan_repo_plans(plans_dir)
|
|
285
|
+
assert results[0]['meta'] == {}
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
# ---------------------------------------------------------------------------
|
|
289
|
+
# Phase 1: Auto-associate repos
|
|
290
|
+
# ---------------------------------------------------------------------------
|
|
291
|
+
|
|
292
|
+
def test_auto_associate_repos_exact_match(tmp_path: Path):
|
|
293
|
+
"""Workstream 'fixlog' + repo 'fixlog' → ws.repos == ['fixlog']."""
|
|
294
|
+
from workstream.models import Workstream
|
|
295
|
+
ws_dir = tmp_path / 'ws'
|
|
296
|
+
ws_dir.mkdir()
|
|
297
|
+
ws = Workstream(id='2026-01-01-abc', title='FixLog', status='active')
|
|
298
|
+
ws.source_path = ws_dir / f'{ws.id}-fixlog.md'
|
|
299
|
+
save_workstream(ws, ws.source_path)
|
|
300
|
+
|
|
301
|
+
count = _auto_associate_repos([ws], ['fixlog', 'other-repo'])
|
|
302
|
+
assert count == 1
|
|
303
|
+
assert ws.repos == ['fixlog']
|
|
304
|
+
|
|
305
|
+
# Verify it was persisted
|
|
306
|
+
reloaded = load_workstream(ws.source_path)
|
|
307
|
+
assert reloaded.repos == ['fixlog']
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
def test_auto_associate_repos_no_false_positives(tmp_path: Path):
|
|
311
|
+
"""Short slug 'top' should NOT match repo 'laptop'."""
|
|
312
|
+
from workstream.models import Workstream
|
|
313
|
+
ws = Workstream(id='2026-01-01-abc', title='Top', status='active')
|
|
314
|
+
# No source_path — can't save, but also shouldn't match
|
|
315
|
+
count = _auto_associate_repos([ws], ['laptop', 'desktop'])
|
|
316
|
+
assert count == 0
|
|
317
|
+
assert ws.repos == []
|
|
318
|
+
|
|
319
|
+
|
|
320
|
+
def test_auto_associate_repos_skips_already_set(tmp_path: Path):
|
|
321
|
+
"""Workstreams with existing repos are not modified."""
|
|
322
|
+
from workstream.models import Workstream
|
|
323
|
+
ws = Workstream(id='2026-01-01-abc', title='FixLog', status='active', repos=['custom-repo'])
|
|
324
|
+
count = _auto_associate_repos([ws], ['fixlog'])
|
|
325
|
+
assert count == 0
|
|
326
|
+
assert ws.repos == ['custom-repo']
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def test_auto_associate_repos_skips_repos_explicit(tmp_path: Path):
|
|
330
|
+
"""Workstreams with _repos_explicit=True and empty repos are not auto-associated."""
|
|
331
|
+
from workstream.models import Workstream
|
|
332
|
+
ws = Workstream(
|
|
333
|
+
id='2026-01-01-abc', title='FinFam Fundraising', status='active',
|
|
334
|
+
repos=[], _repos_explicit=True,
|
|
335
|
+
)
|
|
336
|
+
count = _auto_associate_repos([ws], ['finfam'])
|
|
337
|
+
assert count == 0
|
|
338
|
+
assert ws.repos == []
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
def test_auto_associate_repos_runs_when_not_explicit(tmp_path: Path):
|
|
342
|
+
"""Workstreams with _repos_explicit=False and empty repos get auto-associated."""
|
|
343
|
+
from workstream.models import Workstream
|
|
344
|
+
ws_dir = tmp_path / 'ws'
|
|
345
|
+
ws_dir.mkdir()
|
|
346
|
+
ws = Workstream(
|
|
347
|
+
id='2026-01-01-abc', title='FinFam Fundraising', status='active',
|
|
348
|
+
repos=[], _repos_explicit=False,
|
|
349
|
+
)
|
|
350
|
+
ws.source_path = ws_dir / f'{ws.id}-finfam-fundraising.md'
|
|
351
|
+
save_workstream(ws, ws.source_path)
|
|
352
|
+
count = _auto_associate_repos([ws], ['finfam'])
|
|
353
|
+
# 'finfam' matches slug 'finfam-fundraising' via prefix
|
|
354
|
+
assert count == 1
|
|
355
|
+
assert 'finfam' in ws.repos
|
|
356
|
+
|
|
357
|
+
# ---------------------------------------------------------------------------
|
|
358
|
+
# Phase 1: Discover repos fallback
|
|
359
|
+
# ---------------------------------------------------------------------------
|
|
360
|
+
|
|
361
|
+
def test_sweep_discovers_repos_from_repo_dirs(
|
|
362
|
+
sweep_workstreams_dir: Path, repo_with_plans: Path, tmp_path: Path, monkeypatch
|
|
363
|
+
):
|
|
364
|
+
"""When config.repos is empty, repos are discovered from repo_dirs."""
|
|
365
|
+
config = Config(
|
|
366
|
+
workstreams_dir=str(sweep_workstreams_dir),
|
|
367
|
+
repos=[], # empty!
|
|
368
|
+
repo_dirs=[str(tmp_path)], # must be non-empty to trigger discovery
|
|
369
|
+
)
|
|
370
|
+
monkeypatch.setattr('workstream.commands.sweep._scan_repo_branches', lambda p: [])
|
|
371
|
+
# Mock discover_repos to return our fixture repo
|
|
372
|
+
monkeypatch.setattr(
|
|
373
|
+
'workstream.repo_discovery.discover_repos',
|
|
374
|
+
lambda dirs: [repo_with_plans],
|
|
375
|
+
)
|
|
376
|
+
|
|
377
|
+
from workstream.commands.sweep import _sweep_handler
|
|
378
|
+
_sweep_handler(config=config, discover=False)
|
|
379
|
+
|
|
380
|
+
# The workstream should still get plans matched (repo name = 'myapp')
|
|
381
|
+
ws_file = sweep_workstreams_dir / '2026-03-15-a1b2c3d4e5-api-redesign.md'
|
|
382
|
+
ws = load_workstream(ws_file)
|
|
383
|
+
assert len(ws.plans) == 2
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
def test_sweep_deduplicates_same_remote_repos(
|
|
387
|
+
sweep_workstreams_dir: Path, tmp_path: Path, monkeypatch
|
|
388
|
+
):
|
|
389
|
+
"""Repos sharing the same remote URL are deduplicated — keep more plans."""
|
|
390
|
+
# Create two 'repos' with .plans/ dirs of different sizes
|
|
391
|
+
repo_a = tmp_path / 'myapp'
|
|
392
|
+
repo_a.mkdir()
|
|
393
|
+
plans_a = repo_a / '.plans'
|
|
394
|
+
plans_a.mkdir()
|
|
395
|
+
(plans_a / 'plan1.md').write_text('---\ntitle: P1\nstatus: active\nworkstream: api-redesign\n---\n', encoding='utf-8')
|
|
396
|
+
(plans_a / 'plan2.md').write_text('---\ntitle: P2\nstatus: draft\n---\n', encoding='utf-8')
|
|
397
|
+
|
|
398
|
+
repo_b = tmp_path / 'myapp-clone'
|
|
399
|
+
repo_b.mkdir()
|
|
400
|
+
plans_b = repo_b / '.plans'
|
|
401
|
+
plans_b.mkdir()
|
|
402
|
+
(plans_b / 'plan1.md').write_text('---\ntitle: P1\nstatus: active\nworkstream: api-redesign\n---\n', encoding='utf-8')
|
|
403
|
+
|
|
404
|
+
config = Config(
|
|
405
|
+
workstreams_dir=str(sweep_workstreams_dir),
|
|
406
|
+
repos=[],
|
|
407
|
+
)
|
|
408
|
+
monkeypatch.setattr('workstream.commands.sweep._scan_repo_branches', lambda p: [])
|
|
409
|
+
monkeypatch.setattr(
|
|
410
|
+
'workstream.repo_discovery.discover_repos',
|
|
411
|
+
lambda dirs: [repo_a, repo_b],
|
|
412
|
+
)
|
|
413
|
+
# Both repos return the same remote URL
|
|
414
|
+
monkeypatch.setattr('workstream.git.remote_url', lambda path, remote='origin': 'git@github.com:test/myapp.git')
|
|
415
|
+
|
|
416
|
+
from workstream.commands.sweep import _sweep_handler
|
|
417
|
+
_sweep_handler(config=config, discover=False)
|
|
418
|
+
|
|
419
|
+
# repo_a has 2 plans, repo_b has 1 — repo_a wins
|
|
420
|
+
ws_file = sweep_workstreams_dir / '2026-03-15-a1b2c3d4e5-api-redesign.md'
|
|
421
|
+
ws = load_workstream(ws_file)
|
|
422
|
+
# Only plans from myapp (the winner), not myapp-clone
|
|
423
|
+
assert all(p.repo == 'myapp' for p in ws.plans)
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
# ---------------------------------------------------------------------------
|
|
427
|
+
# Phase 3: Plan review prompt and response parsing
|
|
428
|
+
# ---------------------------------------------------------------------------
|
|
429
|
+
|
|
430
|
+
def test_build_plan_review_prompt():
|
|
431
|
+
"""Prompt includes plan content, git context, and workstream context."""
|
|
432
|
+
prompt = _build_plan_review_prompt(
|
|
433
|
+
ws_title='FixLog',
|
|
434
|
+
ws_context='Status: active\n [2026-03-25] Working on auth',
|
|
435
|
+
git_context='Repo: fixlog, default branch: main\nRecent commits:\n 2026-03-25 Add auth module',
|
|
436
|
+
plan_entries=[
|
|
437
|
+
{'filename': 'auth-plan.md', 'marker': 'finalized 2026-03-20', 'content': 'Add authentication...'},
|
|
438
|
+
{'filename': 'db-migration.md', 'marker': '', 'content': 'Migrate to postgres...'},
|
|
439
|
+
],
|
|
440
|
+
)
|
|
441
|
+
assert 'FixLog' in prompt
|
|
442
|
+
assert 'auth-plan.md' in prompt
|
|
443
|
+
assert 'db-migration.md' in prompt
|
|
444
|
+
assert 'Add authentication' in prompt
|
|
445
|
+
assert 'Add auth module' in prompt
|
|
446
|
+
assert 'finalized 2026-03-20' in prompt
|
|
447
|
+
|
|
448
|
+
|
|
449
|
+
def test_build_plan_review_prompt_prefers_summary():
|
|
450
|
+
"""When plan has a summary in meta, prompt uses that, not full content."""
|
|
451
|
+
prompt = _build_plan_review_prompt(
|
|
452
|
+
ws_title='Test',
|
|
453
|
+
ws_context='Status: active',
|
|
454
|
+
git_context='Repo: test',
|
|
455
|
+
plan_entries=[
|
|
456
|
+
{'filename': 'plan.md', 'marker': '', 'content': 'This is the summary from meta'},
|
|
457
|
+
],
|
|
458
|
+
)
|
|
459
|
+
assert 'This is the summary from meta' in prompt
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
def test_parse_plan_review_response_valid():
|
|
463
|
+
"""Valid JSON array of classifications."""
|
|
464
|
+
response = '''[
|
|
465
|
+
{"plan": "auth-plan.md", "status": "implemented", "reason": "Commits on 2026-03-25", "ideas": []},
|
|
466
|
+
{"plan": "db-migration.md", "status": "active", "reason": "No commits yet", "ideas": ["Consider using Alembic"]}
|
|
467
|
+
]'''
|
|
468
|
+
results = _parse_plan_review_response(response)
|
|
469
|
+
assert len(results) == 2
|
|
470
|
+
assert results[0]['plan'] == 'auth-plan.md'
|
|
471
|
+
assert results[0]['status'] == 'implemented'
|
|
472
|
+
assert results[0]['ideas'] == []
|
|
473
|
+
assert results[1]['status'] == 'active'
|
|
474
|
+
assert results[1]['ideas'] == ['Consider using Alembic']
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
def test_parse_plan_review_response_with_fences():
|
|
478
|
+
"""JSON wrapped in markdown code fences."""
|
|
479
|
+
response = '''```json
|
|
480
|
+
[{"plan": "x.md", "status": "obsolete", "reason": "replaced", "ideas": []}]
|
|
481
|
+
```'''
|
|
482
|
+
results = _parse_plan_review_response(response)
|
|
483
|
+
assert len(results) == 1
|
|
484
|
+
assert results[0]['status'] == 'obsolete'
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
def test_parse_plan_review_response_invalid():
|
|
488
|
+
"""Garbage input returns empty list."""
|
|
489
|
+
assert _parse_plan_review_response('not json at all') == []
|
|
490
|
+
assert _parse_plan_review_response('') == []
|
|
491
|
+
|
|
492
|
+
|
|
493
|
+
def test_parse_plan_review_response_rejects_bad_status():
|
|
494
|
+
"""Entries with invalid status values are filtered out."""
|
|
495
|
+
response = '[{"plan": "x.md", "status": "unknown", "reason": "test", "ideas": []}]'
|
|
496
|
+
assert _parse_plan_review_response(response) == []
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
def test_review_dispatches_ideas(tmp_path: Path, monkeypatch):
|
|
500
|
+
"""Review handler dispatches ideas from classifications to workstream files."""
|
|
501
|
+
from unittest.mock import MagicMock
|
|
502
|
+
from workstream.models import Workstream
|
|
503
|
+
|
|
504
|
+
# Set up workstream
|
|
505
|
+
ws_dir = tmp_path / 'ws'
|
|
506
|
+
ws_dir.mkdir()
|
|
507
|
+
ws = Workstream(
|
|
508
|
+
id='2026-01-01-abc', title='FixLog', status='active',
|
|
509
|
+
repos=['fixlog'],
|
|
510
|
+
)
|
|
511
|
+
ws.source_path = ws_dir / f'{ws.id}-fixlog.md'
|
|
512
|
+
save_workstream(ws, ws.source_path)
|
|
513
|
+
|
|
514
|
+
# Set up fake repo with a plan
|
|
515
|
+
repo_path = tmp_path / 'fixlog'
|
|
516
|
+
repo_path.mkdir()
|
|
517
|
+
plans_dir = repo_path / '.plans'
|
|
518
|
+
plans_dir.mkdir()
|
|
519
|
+
(plans_dir / 'auth-plan.md').write_text('# Auth Plan\nAdd authentication.', encoding='utf-8')
|
|
520
|
+
|
|
521
|
+
repo_data = {
|
|
522
|
+
'fixlog': {
|
|
523
|
+
'plans': [{'path': 'auth-plan.md', 'meta': {'status': 'finalized'}}],
|
|
524
|
+
'branches': [],
|
|
525
|
+
'path': repo_path,
|
|
526
|
+
'plans_dir': plans_dir,
|
|
527
|
+
},
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
# Mock LLM agent
|
|
531
|
+
mock_agent = MagicMock()
|
|
532
|
+
mock_agent.prompt.return_value = '''[{
|
|
533
|
+
"plan": "auth-plan.md",
|
|
534
|
+
"status": "implemented",
|
|
535
|
+
"reason": "Auth module committed 2026-03-25",
|
|
536
|
+
"ideas": ["Add OAuth2 support", "Rate limiting for auth endpoints"]
|
|
537
|
+
}]'''
|
|
538
|
+
|
|
539
|
+
mock_config = MagicMock()
|
|
540
|
+
mock_config.get_llm_agent.return_value = 'test'
|
|
541
|
+
mock_config.llm_discover_model = None
|
|
542
|
+
|
|
543
|
+
monkeypatch.setattr('workstream.llm.LLMAgent', lambda name: mock_agent)
|
|
544
|
+
|
|
545
|
+
from workstream.commands.sweep import _review_plans_handler
|
|
546
|
+
_review_plans_handler(mock_config, [ws], repo_data, ws_dir, batch=True)
|
|
547
|
+
|
|
548
|
+
# Reload and check ideas were dispatched
|
|
549
|
+
reloaded = load_workstream(ws.source_path)
|
|
550
|
+
idea_texts = [i.text for i in reloaded.ideas]
|
|
551
|
+
assert 'Add OAuth2 support' in idea_texts
|
|
552
|
+
assert 'Rate limiting for auth endpoints' in idea_texts
|
|
553
|
+
|
|
554
|
+
# Check log entries
|
|
555
|
+
log_events = [l.event for l in reloaded.log]
|
|
556
|
+
assert 'plan-reviewed' in log_events
|
|
557
|
+
assert 'plan-idea' in log_events
|
|
558
|
+
|
|
559
|
+
|
|
560
|
+
def test_review_logs_all_classifications(tmp_path: Path, monkeypatch):
|
|
561
|
+
"""Every plan gets a log entry regardless of status or ideas."""
|
|
562
|
+
from unittest.mock import MagicMock
|
|
563
|
+
from workstream.models import Workstream
|
|
564
|
+
|
|
565
|
+
ws_dir = tmp_path / 'ws'
|
|
566
|
+
ws_dir.mkdir()
|
|
567
|
+
ws = Workstream(
|
|
568
|
+
id='2026-01-01-abc', title='TestWS', status='active',
|
|
569
|
+
repos=['testrepo'],
|
|
570
|
+
)
|
|
571
|
+
ws.source_path = ws_dir / f'{ws.id}-testws.md'
|
|
572
|
+
save_workstream(ws, ws.source_path)
|
|
573
|
+
|
|
574
|
+
repo_path = tmp_path / 'testrepo'
|
|
575
|
+
repo_path.mkdir()
|
|
576
|
+
plans_dir = repo_path / '.plans'
|
|
577
|
+
plans_dir.mkdir()
|
|
578
|
+
(plans_dir / 'a.md').write_text('# Plan A', encoding='utf-8')
|
|
579
|
+
(plans_dir / 'b.md').write_text('# Plan B', encoding='utf-8')
|
|
580
|
+
|
|
581
|
+
repo_data = {
|
|
582
|
+
'testrepo': {
|
|
583
|
+
'plans': [
|
|
584
|
+
{'path': 'a.md', 'meta': {}},
|
|
585
|
+
{'path': 'b.md', 'meta': {}},
|
|
586
|
+
],
|
|
587
|
+
'branches': [],
|
|
588
|
+
'path': repo_path,
|
|
589
|
+
'plans_dir': plans_dir,
|
|
590
|
+
},
|
|
591
|
+
}
|
|
592
|
+
|
|
593
|
+
mock_agent = MagicMock()
|
|
594
|
+
mock_agent.prompt.return_value = '''[
|
|
595
|
+
{"plan": "a.md", "status": "implemented", "reason": "done", "ideas": []},
|
|
596
|
+
{"plan": "b.md", "status": "obsolete", "reason": "replaced", "ideas": []}
|
|
597
|
+
]'''
|
|
598
|
+
|
|
599
|
+
mock_config = MagicMock()
|
|
600
|
+
mock_config.get_llm_agent.return_value = 'test'
|
|
601
|
+
mock_config.llm_discover_model = None
|
|
602
|
+
|
|
603
|
+
monkeypatch.setattr('workstream.llm.LLMAgent', lambda name: mock_agent)
|
|
604
|
+
|
|
605
|
+
from workstream.commands.sweep import _review_plans_handler
|
|
606
|
+
_review_plans_handler(mock_config, [ws], repo_data, ws_dir, batch=True)
|
|
607
|
+
|
|
608
|
+
reloaded = load_workstream(ws.source_path)
|
|
609
|
+
review_logs = [l for l in reloaded.log if l.event == 'plan-reviewed']
|
|
610
|
+
assert len(review_logs) == 2
|
|
611
|
+
assert 'a.md' in review_logs[0].detail
|
|
612
|
+
assert 'b.md' in review_logs[1].detail
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
# ---------------------------------------------------------------------------
|
|
616
|
+
# _repo_matches_slug
|
|
617
|
+
# ---------------------------------------------------------------------------
|
|
618
|
+
|
|
619
|
+
class TestRepoMatchesSlug:
|
|
620
|
+
"""Unit tests for the slug-based repo matching helper."""
|
|
621
|
+
|
|
622
|
+
def test_exact_match(self):
|
|
623
|
+
assert _repo_matches_slug('workstream', 'workstream') is True
|
|
624
|
+
|
|
625
|
+
def test_prefix_repo_shorter(self):
|
|
626
|
+
# repo 'workstream' matches slug 'workstream-tool'
|
|
627
|
+
assert _repo_matches_slug('workstream', 'workstream-tool') is True
|
|
628
|
+
|
|
629
|
+
def test_prefix_slug_shorter(self):
|
|
630
|
+
# slug 'workstream' matches repo 'workstream-cli'
|
|
631
|
+
assert _repo_matches_slug('workstream-cli', 'workstream') is True
|
|
632
|
+
|
|
633
|
+
def test_no_match_unrelated(self):
|
|
634
|
+
assert _repo_matches_slug('django', 'workstream') is False
|
|
635
|
+
|
|
636
|
+
def test_no_substring_without_dash(self):
|
|
637
|
+
# 'stream' is a substring of 'workstream' but not a word-prefix
|
|
638
|
+
assert _repo_matches_slug('workstream', 'stream') is False
|
|
639
|
+
|
|
640
|
+
def test_short_slug_exact_match(self):
|
|
641
|
+
# Short names still match exactly
|
|
642
|
+
assert _repo_matches_slug('bq', 'bq') is True
|
|
643
|
+
|
|
644
|
+
def test_short_slug_no_false_positive(self):
|
|
645
|
+
# 'bq' should NOT match 'bq-server' (too short for prefix)
|
|
646
|
+
assert _repo_matches_slug('bq-server', 'bq') is False
|
|
647
|
+
|
|
648
|
+
def test_short_repo_no_false_positive(self):
|
|
649
|
+
# short repo name 'top' should NOT match slug 'top-server'
|
|
650
|
+
assert _repo_matches_slug('top', 'top-server') is False
|
|
651
|
+
|
|
652
|
+
def test_one_side_long_enough(self):
|
|
653
|
+
# repo 'tool' (4 chars) matches slug 'tool-cli' via prefix
|
|
654
|
+
assert _repo_matches_slug('tool', 'tool-cli') is True
|
|
655
|
+
|
|
656
|
+
def test_both_long_enough_prefix(self):
|
|
657
|
+
# Both >= 4 chars, prefix match in either direction
|
|
658
|
+
assert _repo_matches_slug('infra', 'infra-deploy') is True
|
|
659
|
+
assert _repo_matches_slug('infra-deploy', 'infra') is True
|
|
660
|
+
|
|
661
|
+
|
|
662
|
+
# ---------------------------------------------------------------------------
|
|
663
|
+
# _auto_associate_repos with prefix matching
|
|
664
|
+
# ---------------------------------------------------------------------------
|
|
665
|
+
|
|
666
|
+
def test_auto_associate_repos_prefix_match(tmp_path: Path):
|
|
667
|
+
"""'Workstream Tool' + repo 'workstream' → associated via prefix."""
|
|
668
|
+
from workstream.models import Workstream
|
|
669
|
+
ws_dir = tmp_path / 'ws'
|
|
670
|
+
ws_dir.mkdir()
|
|
671
|
+
ws = Workstream(id='2026-01-01-abc', title='Workstream Tool', status='active')
|
|
672
|
+
ws.source_path = ws_dir / f'{ws.id}-workstream-tool.md'
|
|
673
|
+
save_workstream(ws, ws.source_path)
|
|
674
|
+
|
|
675
|
+
count = _auto_associate_repos([ws], ['workstream', 'other-repo'])
|
|
676
|
+
assert count == 1
|
|
677
|
+
assert 'workstream' in ws.repos
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
def test_auto_associate_repos_short_slug_no_prefix(tmp_path: Path):
|
|
681
|
+
"""'Top' + repo 'top-server' → NOT associated (short slug, not exact)."""
|
|
682
|
+
from workstream.models import Workstream
|
|
683
|
+
ws = Workstream(id='2026-01-01-abc', title='Top', status='active')
|
|
684
|
+
count = _auto_associate_repos([ws], ['top-server'])
|
|
685
|
+
assert count == 0
|
|
686
|
+
assert ws.repos == []
|
|
687
|
+
|
|
688
|
+
|
|
689
|
+
# ---------------------------------------------------------------------------
|
|
690
|
+
# _recompute_activity_dates with slug-matched repos
|
|
691
|
+
# ---------------------------------------------------------------------------
|
|
692
|
+
|
|
693
|
+
def test_recompute_activity_dates_slug_match(tmp_path: Path, monkeypatch):
|
|
694
|
+
"""Workstream 'Workstream Tool' picks up commits from repo 'workstream'."""
|
|
695
|
+
from workstream.models import Workstream
|
|
696
|
+
from workstream.markdown import write_frontmatter
|
|
697
|
+
|
|
698
|
+
ws_dir = tmp_path / 'ws'
|
|
699
|
+
ws_dir.mkdir()
|
|
700
|
+
|
|
701
|
+
# Create a workstream file with NO repos and an old last_activity
|
|
702
|
+
meta = {
|
|
703
|
+
'id': '2026-01-01-abc',
|
|
704
|
+
'title': 'Workstream Tool',
|
|
705
|
+
'status': 'active',
|
|
706
|
+
'repos': [],
|
|
707
|
+
'first_activity': '2026-01-01',
|
|
708
|
+
'last_activity': '2026-03-28',
|
|
709
|
+
'created': '2026-01-01',
|
|
710
|
+
}
|
|
711
|
+
body = '\n# Workstream Tool\n\n## Thread\n### 2026-01-01\nStarted.\n'
|
|
712
|
+
ws_file = ws_dir / '2026-01-01-abc-workstream-tool.md'
|
|
713
|
+
ws_file.write_text(write_frontmatter(meta, body), encoding='utf-8')
|
|
714
|
+
|
|
715
|
+
# Create a fake repo dir and mock recent_commits to return a newer date
|
|
716
|
+
repo_dir = tmp_path / 'workstream'
|
|
717
|
+
repo_dir.mkdir()
|
|
718
|
+
monkeypatch.setattr(
|
|
719
|
+
'workstream.git.recent_commits',
|
|
720
|
+
lambda rpath, since: [{'date': '2026-03-31', 'subject': 'fix'}],
|
|
721
|
+
)
|
|
722
|
+
|
|
723
|
+
repo_paths = {'workstream': repo_dir}
|
|
724
|
+
_recompute_activity_dates(ws_dir, repo_paths=repo_paths)
|
|
725
|
+
|
|
726
|
+
# Verify the workstream file was updated with the newer date
|
|
727
|
+
updated_meta, _ = parse_frontmatter(ws_file.read_text(encoding='utf-8'))
|
|
728
|
+
assert updated_meta['last_activity'] == '2026-03-31'
|
|
729
|
+
|
|
730
|
+
|
|
731
|
+
def test_recompute_activity_dates_explicit_repos(tmp_path: Path, monkeypatch):
|
|
732
|
+
"""Explicit ws.repos still works — both explicit and slug-matched contribute."""
|
|
733
|
+
from workstream.models import Workstream
|
|
734
|
+
from workstream.markdown import write_frontmatter
|
|
735
|
+
|
|
736
|
+
ws_dir = tmp_path / 'ws'
|
|
737
|
+
ws_dir.mkdir()
|
|
738
|
+
|
|
739
|
+
meta = {
|
|
740
|
+
'id': '2026-01-01-abc',
|
|
741
|
+
'title': 'My App',
|
|
742
|
+
'status': 'active',
|
|
743
|
+
'repos': ['myapp'],
|
|
744
|
+
'first_activity': '2026-01-01',
|
|
745
|
+
'last_activity': '2026-03-01',
|
|
746
|
+
'created': '2026-01-01',
|
|
747
|
+
}
|
|
748
|
+
body = '\n# My App\n\n## Thread\n### 2026-01-01\nStarted.\n'
|
|
749
|
+
ws_file = ws_dir / '2026-01-01-abc-my-app.md'
|
|
750
|
+
ws_file.write_text(write_frontmatter(meta, body), encoding='utf-8')
|
|
751
|
+
|
|
752
|
+
repo_dir = tmp_path / 'myapp'
|
|
753
|
+
repo_dir.mkdir()
|
|
754
|
+
monkeypatch.setattr(
|
|
755
|
+
'workstream.git.recent_commits',
|
|
756
|
+
lambda rpath, since: [{'date': '2026-03-30', 'subject': 'update'}],
|
|
757
|
+
)
|
|
758
|
+
|
|
759
|
+
repo_paths = {'myapp': repo_dir}
|
|
760
|
+
_recompute_activity_dates(ws_dir, repo_paths=repo_paths)
|
|
761
|
+
|
|
762
|
+
updated_meta, _ = parse_frontmatter(ws_file.read_text(encoding='utf-8'))
|
|
763
|
+
assert updated_meta['last_activity'] == '2026-03-30'
|
|
764
|
+
|
|
765
|
+
|
|
766
|
+
def test_match_plans_carries_metadata(repo_with_plans: Path):
|
|
767
|
+
"""PlanRef from _match_plans_to_workstream carries plan_type and guidance."""
|
|
768
|
+
# Create a plan with plan_type and workstream_guidance in frontmatter
|
|
769
|
+
plans_dir = repo_with_plans / '.plans'
|
|
770
|
+
(plans_dir / 'typed-plan.md').write_text(
|
|
771
|
+
'---\ntitle: Typed Plan\nstatus: active\nworkstream: api-redesign\n'
|
|
772
|
+
'plan_type: project\nworkstream_guidance: new-peer\n---\n\nContent.',
|
|
773
|
+
encoding='utf-8',
|
|
774
|
+
)
|
|
775
|
+
plans = scan_repo_plans(plans_dir)
|
|
776
|
+
matched = _match_plans_to_workstream(
|
|
777
|
+
'API Redesign', 'api-redesign', '2026-03-15-a1b2c3d4e5',
|
|
778
|
+
['myapp'], 'myapp', plans,
|
|
779
|
+
)
|
|
780
|
+
typed = next(p for p in matched if p.path == 'typed-plan.md')
|
|
781
|
+
assert typed.plan_type == 'project'
|
|
782
|
+
assert typed.guidance == 'new-peer'
|
|
783
|
+
|
|
784
|
+
|
|
785
|
+
def test_match_plans_no_metadata_defaults_empty(repo_with_plans: Path):
|
|
786
|
+
"""PlanRef defaults plan_type and guidance to '' for legacy plans."""
|
|
787
|
+
plans = scan_repo_plans(repo_with_plans / '.plans')
|
|
788
|
+
matched = _match_plans_to_workstream(
|
|
789
|
+
'API Redesign', 'api-redesign', '2026-03-15-a1b2c3d4e5',
|
|
790
|
+
['myapp'], 'myapp', plans,
|
|
791
|
+
)
|
|
792
|
+
api_ref = next(p for p in matched if p.path == 'api-v2-migration.md')
|
|
793
|
+
assert api_ref.plan_type == ''
|
|
794
|
+
assert api_ref.guidance == ''
|
|
795
|
+
|
|
796
|
+
|
|
797
|
+
def test_match_plans_title_match(tmp_path: Path):
|
|
798
|
+
"""Plan with workstream title matches the correct workstream."""
|
|
799
|
+
plans = [{'path': 'plan.md', 'meta': {'workstream': 'FinFam Product', 'status': 'active', 'title': 'X'}}]
|
|
800
|
+
matched = _match_plans_to_workstream(
|
|
801
|
+
'FinFam Product', 'finfam-product', '2026-01-01-abc',
|
|
802
|
+
['finfam'], 'finfam', plans,
|
|
803
|
+
)
|
|
804
|
+
assert len(matched) == 1
|
|
805
|
+
assert matched[0].path == 'plan.md'
|
|
806
|
+
|
|
807
|
+
|
|
808
|
+
def test_match_plans_title_rejects_wrong_workstream():
|
|
809
|
+
"""Plan assigned to one workstream is NOT included in another."""
|
|
810
|
+
plans = [{'path': 'plan.md', 'meta': {'workstream': 'FinFam Product', 'status': 'active', 'title': 'X'}}]
|
|
811
|
+
matched = _match_plans_to_workstream(
|
|
812
|
+
'FinFam Fundraising', 'finfam-fundraising', '2026-01-02-def',
|
|
813
|
+
['finfam'], 'finfam', plans,
|
|
814
|
+
)
|
|
815
|
+
assert len(matched) == 0
|
|
816
|
+
|
|
817
|
+
|
|
818
|
+
def test_match_plans_by_id():
|
|
819
|
+
"""Plan whose workstream field is a workstream ID matches by ID."""
|
|
820
|
+
plans = [{'path': 'plan.md', 'meta': {'workstream': '2026-01-01-abc', 'status': 'draft', 'title': 'Y'}}]
|
|
821
|
+
matched = _match_plans_to_workstream(
|
|
822
|
+
'FinFam Product', 'finfam-product', '2026-01-01-abc',
|
|
823
|
+
['finfam'], 'finfam', plans,
|
|
824
|
+
)
|
|
825
|
+
assert len(matched) == 1
|
|
826
|
+
|
|
827
|
+
|
|
828
|
+
def test_match_plans_by_slug():
|
|
829
|
+
"""Plan whose workstream field is a slug matches via slugification."""
|
|
830
|
+
plans = [{'path': 'plan.md', 'meta': {'workstream': 'finfam-product', 'status': 'active', 'title': 'Z'}}]
|
|
831
|
+
matched = _match_plans_to_workstream(
|
|
832
|
+
'FinFam Product', 'finfam-product', '2026-01-01-abc',
|
|
833
|
+
['finfam'], 'finfam', plans,
|
|
834
|
+
)
|
|
835
|
+
assert len(matched) == 1
|
|
836
|
+
|
|
837
|
+
|
|
838
|
+
def test_match_plans_no_field_repo_match():
|
|
839
|
+
"""Plan with no workstream field falls back to repo matching."""
|
|
840
|
+
plans = [{'path': 'plan.md', 'meta': {'status': 'active', 'title': 'Z'}}]
|
|
841
|
+
matched = _match_plans_to_workstream(
|
|
842
|
+
'FinFam Product', 'finfam-product', '2026-01-01-abc',
|
|
843
|
+
['finfam'], 'finfam', plans,
|
|
844
|
+
)
|
|
845
|
+
assert len(matched) == 1
|
|
846
|
+
|
|
847
|
+
|
|
848
|
+
def test_match_plans_no_field_repo_no_match():
|
|
849
|
+
"""Plan with no workstream field and repo NOT in ws_repos is excluded."""
|
|
850
|
+
plans = [{'path': 'plan.md', 'meta': {'status': 'active', 'title': 'Z'}}]
|
|
851
|
+
matched = _match_plans_to_workstream(
|
|
852
|
+
'Unrelated WS', 'unrelated-ws', '2026-01-01-xyz',
|
|
853
|
+
['other-repo'], 'finfam', plans,
|
|
854
|
+
)
|
|
855
|
+
assert len(matched) == 0
|
|
856
|
+
|
|
857
|
+
def test_auto_classify_writes_implemented(tmp_path: Path):
|
|
858
|
+
"""_auto_classify_plans writes status=implemented to plan file frontmatter."""
|
|
859
|
+
plans_dir = tmp_path / '.plans'
|
|
860
|
+
plans_dir.mkdir()
|
|
861
|
+
plan_file = plans_dir / 'feature.md'
|
|
862
|
+
plan_file.write_text(
|
|
863
|
+
'---\ntitle: Feature\nstatus: active\n---\n\nPlan content.',
|
|
864
|
+
encoding='utf-8',
|
|
865
|
+
)
|
|
866
|
+
|
|
867
|
+
plan = PlanRef(
|
|
868
|
+
repo='myrepo', path='feature.md', status='active',
|
|
869
|
+
title='Feature', date='2026-03-30', signal='likely-implemented',
|
|
870
|
+
)
|
|
871
|
+
repo_data = {'myrepo': {'plans_dir': plans_dir, 'path': tmp_path}}
|
|
872
|
+
|
|
873
|
+
classified = _auto_classify_plans([plan], repo_data)
|
|
874
|
+
|
|
875
|
+
assert len(classified) == 1
|
|
876
|
+
assert classified[0].status == 'implemented'
|
|
877
|
+
|
|
878
|
+
# Verify the file was updated
|
|
879
|
+
from workstream.markdown import parse_frontmatter
|
|
880
|
+
meta, _ = parse_frontmatter(plan_file.read_text(encoding='utf-8'))
|
|
881
|
+
assert meta['status'] == 'implemented'
|
|
882
|
+
assert 'reviewed' in meta
|
|
883
|
+
assert 'review_note' in meta
|
|
884
|
+
|
|
885
|
+
|
|
886
|
+
def test_auto_classify_skips_non_signal(tmp_path: Path):
|
|
887
|
+
"""_auto_classify_plans skips plans without likely-implemented signal."""
|
|
888
|
+
plans_dir = tmp_path / '.plans'
|
|
889
|
+
plans_dir.mkdir()
|
|
890
|
+
plan_file = plans_dir / 'stale.md'
|
|
891
|
+
plan_file.write_text(
|
|
892
|
+
'---\ntitle: Stale\nstatus: active\n---\n\nContent.',
|
|
893
|
+
encoding='utf-8',
|
|
894
|
+
)
|
|
895
|
+
|
|
896
|
+
plan = PlanRef(
|
|
897
|
+
repo='myrepo', path='stale.md', status='active',
|
|
898
|
+
title='Stale', date='2026-03-30', signal='stale',
|
|
899
|
+
)
|
|
900
|
+
repo_data = {'myrepo': {'plans_dir': plans_dir, 'path': tmp_path}}
|
|
901
|
+
|
|
902
|
+
classified = _auto_classify_plans([plan], repo_data)
|
|
903
|
+
assert classified == []
|
|
904
|
+
|
|
905
|
+
# File should be unchanged
|
|
906
|
+
from workstream.markdown import parse_frontmatter
|
|
907
|
+
meta, _ = parse_frontmatter(plan_file.read_text(encoding='utf-8'))
|
|
908
|
+
assert meta['status'] == 'active'
|
|
909
|
+
|
|
910
|
+
|
|
911
|
+
def test_find_matching_branches():
|
|
912
|
+
"""_find_matching_branches finds branches matching slugified plan title."""
|
|
913
|
+
from workstream.commands.sweep import _find_matching_branches
|
|
914
|
+
branches = [
|
|
915
|
+
BranchRef(repo='r', branch='refactor-config-parser', ahead=3),
|
|
916
|
+
BranchRef(repo='r', branch='feature-unrelated', ahead=1),
|
|
917
|
+
]
|
|
918
|
+
matched = _find_matching_branches('Refactor Config Parser', branches)
|
|
919
|
+
assert len(matched) == 1
|
|
920
|
+
assert matched[0].branch == 'refactor-config-parser'
|
|
921
|
+
|
|
922
|
+
|
|
923
|
+
def test_find_matching_branches_short_title():
|
|
924
|
+
"""Short plan titles don't cause false positive branch matches."""
|
|
925
|
+
from workstream.commands.sweep import _find_matching_branches
|
|
926
|
+
branches = [BranchRef(repo='r', branch='fix-ab-thing', ahead=1)]
|
|
927
|
+
# Title 'ab' is too short (< 4 chars slug) -- should return empty
|
|
928
|
+
assert _find_matching_branches('Ab', branches) == []
|
|
929
|
+
|
|
930
|
+
|
|
931
|
+
def test_build_sweep_review_manifest_triage(tmp_path: Path):
|
|
932
|
+
"""Manifest includes findings-scoped sections for workstreams with activity."""
|
|
933
|
+
from workstream.commands.sweep import _build_sweep_review_manifest, SweepFindings
|
|
934
|
+
from workstream.models import Workstream, BranchRef
|
|
935
|
+
|
|
936
|
+
proj_plan = PlanRef(repo='myrepo', path='proj.md', status='active',
|
|
937
|
+
title='Big Project', date='2026-03-30',
|
|
938
|
+
plan_type='project', guidance='new-peer')
|
|
939
|
+
tac_plan = PlanRef(repo='myrepo', path='tac.md', status='active',
|
|
940
|
+
title='Quick Fix', date='2026-03-30',
|
|
941
|
+
plan_type='tactical', guidance='keep-current')
|
|
942
|
+
stale_plan = PlanRef(repo='myrepo', path='stale.md', status='active',
|
|
943
|
+
title='Old Thing', date='2026-01-01',
|
|
944
|
+
signal='stale')
|
|
945
|
+
|
|
946
|
+
ws = Workstream(
|
|
947
|
+
id='2026-01-01-abc',
|
|
948
|
+
title='Test Stream',
|
|
949
|
+
status='active',
|
|
950
|
+
repos=['myrepo'],
|
|
951
|
+
plans=[proj_plan, tac_plan, stale_plan],
|
|
952
|
+
branches=[
|
|
953
|
+
BranchRef(repo='myrepo', branch='big-project', ahead=5),
|
|
954
|
+
],
|
|
955
|
+
)
|
|
956
|
+
|
|
957
|
+
repo_data = {
|
|
958
|
+
'myrepo': {
|
|
959
|
+
'plans': [], 'branches': [],
|
|
960
|
+
'path': tmp_path, 'plans_dir': tmp_path / '.plans',
|
|
961
|
+
},
|
|
962
|
+
}
|
|
963
|
+
ws_dir = tmp_path / 'ws'
|
|
964
|
+
ws_dir.mkdir()
|
|
965
|
+
|
|
966
|
+
findings = {
|
|
967
|
+
'2026-01-01-abc': SweepFindings(
|
|
968
|
+
newly_matched=[proj_plan, tac_plan],
|
|
969
|
+
new_signals=[stale_plan],
|
|
970
|
+
auto_classified=[],
|
|
971
|
+
),
|
|
972
|
+
}
|
|
973
|
+
|
|
974
|
+
manifest = _build_sweep_review_manifest([ws], repo_data, ws_dir,
|
|
975
|
+
findings=findings, cutoff='2026-03-28')
|
|
976
|
+
|
|
977
|
+
# Header shows cutoff
|
|
978
|
+
assert 'Cutoff: 2026-03-28' in manifest
|
|
979
|
+
# Workstream header
|
|
980
|
+
assert '### Test Stream' in manifest
|
|
981
|
+
# Discovered plans section with plan files
|
|
982
|
+
assert '**Discovered plans:**' in manifest
|
|
983
|
+
assert 'proj.md' in manifest
|
|
984
|
+
# Project plan with branch match
|
|
985
|
+
assert 'big-project' in manifest
|
|
986
|
+
assert 'track implementation' in manifest
|
|
987
|
+
# Tactical plan tagged
|
|
988
|
+
assert 'tac.md' in manifest
|
|
989
|
+
assert 'tactical' in manifest
|
|
990
|
+
# Signals section
|
|
991
|
+
assert '**Signals:**' in manifest
|
|
992
|
+
assert 'stale.md' in manifest
|
|
993
|
+
assert 'Still relevant?' in manifest
|
|
994
|
+
# Instructions reference tool access
|
|
995
|
+
assert 'git log' in manifest
|
|
996
|
+
|
|
997
|
+
# Paths include filesystem paths for the agent
|
|
998
|
+
assert 'Paths:' in manifest
|
|
999
|
+
assert f'myrepo={tmp_path}' in manifest
|
|
1000
|
+
|
|
1001
|
+
|
|
1002
|
+
def test_build_sweep_review_manifest_with_notes(tmp_path: Path):
|
|
1003
|
+
"""Manifest includes Changed Notes section when notes activity exists."""
|
|
1004
|
+
from workstream.commands.sweep import _build_sweep_review_manifest, SweepFindings
|
|
1005
|
+
from workstream.models import Workstream
|
|
1006
|
+
|
|
1007
|
+
ws = Workstream(
|
|
1008
|
+
id='2026-01-01-abc',
|
|
1009
|
+
title='Test Stream',
|
|
1010
|
+
status='active',
|
|
1011
|
+
repos=['myrepo'],
|
|
1012
|
+
)
|
|
1013
|
+
repo_data = {
|
|
1014
|
+
'myrepo': {
|
|
1015
|
+
'plans': [], 'branches': [],
|
|
1016
|
+
'path': tmp_path, 'plans_dir': tmp_path / '.plans',
|
|
1017
|
+
},
|
|
1018
|
+
}
|
|
1019
|
+
ws_dir = tmp_path / 'ws'
|
|
1020
|
+
ws_dir.mkdir()
|
|
1021
|
+
|
|
1022
|
+
changed_notes = [
|
|
1023
|
+
{'path': '/home/user/notes/sprint.md', 'name': 'sprint.md', 'status': 'committed'},
|
|
1024
|
+
{'path': '/home/user/notes/meeting.md', 'name': 'meeting.md', 'status': 'modified'},
|
|
1025
|
+
]
|
|
1026
|
+
notes_root = Path('/home/user/notes')
|
|
1027
|
+
|
|
1028
|
+
# No findings, but notes present — manifest should still render
|
|
1029
|
+
manifest = _build_sweep_review_manifest([ws], repo_data, ws_dir,
|
|
1030
|
+
findings={}, cutoff='2026-03-28',
|
|
1031
|
+
changed_notes=changed_notes,
|
|
1032
|
+
notes_root=notes_root)
|
|
1033
|
+
|
|
1034
|
+
assert '### Changed Notes' in manifest
|
|
1035
|
+
assert 'Notes root: /home/user/notes' in manifest
|
|
1036
|
+
assert 'sprint.md' in manifest
|
|
1037
|
+
assert 'Committed since 2026-03-28' in manifest
|
|
1038
|
+
assert 'meeting.md' in manifest
|
|
1039
|
+
assert 'Unstaged changes' in manifest
|
|
1040
|
+
|
|
1041
|
+
|
|
1042
|
+
def test_build_sweep_review_manifest_no_notes(tmp_path: Path):
|
|
1043
|
+
"""Manifest omits Changed Notes section when no notes activity."""
|
|
1044
|
+
from workstream.commands.sweep import _build_sweep_review_manifest, SweepFindings
|
|
1045
|
+
from workstream.models import Workstream
|
|
1046
|
+
|
|
1047
|
+
ws = Workstream(
|
|
1048
|
+
id='2026-01-01-abc',
|
|
1049
|
+
title='Test Stream',
|
|
1050
|
+
status='active',
|
|
1051
|
+
)
|
|
1052
|
+
repo_data = {}
|
|
1053
|
+
ws_dir = tmp_path / 'ws'
|
|
1054
|
+
ws_dir.mkdir()
|
|
1055
|
+
|
|
1056
|
+
manifest = _build_sweep_review_manifest([ws], repo_data, ws_dir,
|
|
1057
|
+
findings={}, cutoff='2026-03-28',
|
|
1058
|
+
changed_notes=None)
|
|
1059
|
+
|
|
1060
|
+
assert '### Changed Notes' not in manifest
|
|
1061
|
+
assert 'No activity detected' in manifest
|
|
1062
|
+
|
|
1063
|
+
|
|
1064
|
+
# ── Idea dedup ──────────────────────────────────────────────────────────────
|
|
1065
|
+
|
|
1066
|
+
def test_idea_is_duplicate_exact():
|
|
1067
|
+
from workstream.commands.sweep import _idea_is_duplicate
|
|
1068
|
+
from workstream.models import IdeaEntry
|
|
1069
|
+
existing = [IdeaEntry(date='2026-03-28', text='Add caching layer')]
|
|
1070
|
+
assert _idea_is_duplicate('Add caching layer', existing) is True
|
|
1071
|
+
|
|
1072
|
+
|
|
1073
|
+
def test_idea_is_duplicate_case_insensitive():
|
|
1074
|
+
from workstream.commands.sweep import _idea_is_duplicate
|
|
1075
|
+
from workstream.models import IdeaEntry
|
|
1076
|
+
existing = [IdeaEntry(date='2026-03-28', text='Add Caching Layer')]
|
|
1077
|
+
assert _idea_is_duplicate('add caching layer', existing) is True
|
|
1078
|
+
|
|
1079
|
+
|
|
1080
|
+
def test_idea_is_duplicate_substring():
|
|
1081
|
+
from workstream.commands.sweep import _idea_is_duplicate
|
|
1082
|
+
from workstream.models import IdeaEntry
|
|
1083
|
+
existing = [IdeaEntry(date='2026-03-28', text='Add a caching layer to reduce API latency')]
|
|
1084
|
+
assert _idea_is_duplicate('add a caching layer to reduce api latency by 50%', existing) is True
|
|
1085
|
+
|
|
1086
|
+
|
|
1087
|
+
def test_idea_is_duplicate_short_not_substring():
|
|
1088
|
+
"""Short ideas (<=20 chars) only match on exact equality, not substring."""
|
|
1089
|
+
from workstream.commands.sweep import _idea_is_duplicate
|
|
1090
|
+
from workstream.models import IdeaEntry
|
|
1091
|
+
existing = [IdeaEntry(date='2026-03-28', text='Add cache')]
|
|
1092
|
+
assert _idea_is_duplicate('Add cache layer', existing) is False
|
|
1093
|
+
|
|
1094
|
+
|
|
1095
|
+
def test_idea_is_duplicate_novel():
|
|
1096
|
+
from workstream.commands.sweep import _idea_is_duplicate
|
|
1097
|
+
from workstream.models import IdeaEntry
|
|
1098
|
+
existing = [IdeaEntry(date='2026-03-28', text='Add caching layer')]
|
|
1099
|
+
assert _idea_is_duplicate('Refactor auth middleware', existing) is False
|
|
1100
|
+
|
|
1101
|
+
|
|
1102
|
+
# ── Manifest code velocity warning ────────────────────────────────────────
|
|
1103
|
+
|
|
1104
|
+
def test_manifest_code_velocity_warning_stale(tmp_path: Path):
|
|
1105
|
+
"""Workstream with repos but stale code_last_activity gets a warning."""
|
|
1106
|
+
from workstream.commands.sweep import _build_sweep_review_manifest, SweepFindings
|
|
1107
|
+
from workstream.models import Workstream
|
|
1108
|
+
|
|
1109
|
+
ws = Workstream(
|
|
1110
|
+
id='2026-01-01-abc',
|
|
1111
|
+
title='Stale Code',
|
|
1112
|
+
status='active',
|
|
1113
|
+
repos=['myrepo'],
|
|
1114
|
+
code_last_activity='2026-01-15',
|
|
1115
|
+
)
|
|
1116
|
+
repo_data = {
|
|
1117
|
+
'myrepo': {
|
|
1118
|
+
'plans': [], 'branches': [],
|
|
1119
|
+
'path': tmp_path, 'plans_dir': tmp_path / '.plans',
|
|
1120
|
+
},
|
|
1121
|
+
}
|
|
1122
|
+
ws_dir = tmp_path / 'ws'
|
|
1123
|
+
ws_dir.mkdir()
|
|
1124
|
+
|
|
1125
|
+
findings = {
|
|
1126
|
+
'2026-01-01-abc': SweepFindings(
|
|
1127
|
+
newly_matched=[], new_signals=[], auto_classified=[],
|
|
1128
|
+
),
|
|
1129
|
+
}
|
|
1130
|
+
|
|
1131
|
+
manifest = _build_sweep_review_manifest([ws], repo_data, ws_dir,
|
|
1132
|
+
findings=findings, cutoff='2026-03-01')
|
|
1133
|
+
assert 'No code commits since 2026-01-15' in manifest
|
|
1134
|
+
|
|
1135
|
+
|
|
1136
|
+
def test_manifest_code_velocity_warning_no_code(tmp_path: Path):
|
|
1137
|
+
"""Workstream with repos but no code_last_activity at all."""
|
|
1138
|
+
from workstream.commands.sweep import _build_sweep_review_manifest, SweepFindings
|
|
1139
|
+
from workstream.models import Workstream
|
|
1140
|
+
|
|
1141
|
+
ws = Workstream(
|
|
1142
|
+
id='2026-01-01-abc',
|
|
1143
|
+
title='No Code',
|
|
1144
|
+
status='active',
|
|
1145
|
+
repos=['myrepo'],
|
|
1146
|
+
)
|
|
1147
|
+
repo_data = {
|
|
1148
|
+
'myrepo': {
|
|
1149
|
+
'plans': [], 'branches': [],
|
|
1150
|
+
'path': tmp_path, 'plans_dir': tmp_path / '.plans',
|
|
1151
|
+
},
|
|
1152
|
+
}
|
|
1153
|
+
ws_dir = tmp_path / 'ws'
|
|
1154
|
+
ws_dir.mkdir()
|
|
1155
|
+
|
|
1156
|
+
findings = {
|
|
1157
|
+
'2026-01-01-abc': SweepFindings(
|
|
1158
|
+
newly_matched=[], new_signals=[], auto_classified=[],
|
|
1159
|
+
),
|
|
1160
|
+
}
|
|
1161
|
+
|
|
1162
|
+
manifest = _build_sweep_review_manifest([ws], repo_data, ws_dir,
|
|
1163
|
+
findings=findings, cutoff='2026-03-01')
|
|
1164
|
+
assert 'No code commits detected in associated repos' in manifest
|
|
1165
|
+
|
|
1166
|
+
|
|
1167
|
+
def test_manifest_code_velocity_no_warning_when_recent(tmp_path: Path):
|
|
1168
|
+
"""Workstream with recent code activity gets no warning."""
|
|
1169
|
+
from workstream.commands.sweep import _build_sweep_review_manifest, SweepFindings
|
|
1170
|
+
from workstream.models import Workstream
|
|
1171
|
+
|
|
1172
|
+
ws = Workstream(
|
|
1173
|
+
id='2026-01-01-abc',
|
|
1174
|
+
title='Active Code',
|
|
1175
|
+
status='active',
|
|
1176
|
+
repos=['myrepo'],
|
|
1177
|
+
code_last_activity='2026-03-29',
|
|
1178
|
+
)
|
|
1179
|
+
repo_data = {
|
|
1180
|
+
'myrepo': {
|
|
1181
|
+
'plans': [], 'branches': [],
|
|
1182
|
+
'path': tmp_path, 'plans_dir': tmp_path / '.plans',
|
|
1183
|
+
},
|
|
1184
|
+
}
|
|
1185
|
+
ws_dir = tmp_path / 'ws'
|
|
1186
|
+
ws_dir.mkdir()
|
|
1187
|
+
|
|
1188
|
+
findings = {
|
|
1189
|
+
'2026-01-01-abc': SweepFindings(
|
|
1190
|
+
newly_matched=[], new_signals=[], auto_classified=[],
|
|
1191
|
+
),
|
|
1192
|
+
}
|
|
1193
|
+
|
|
1194
|
+
manifest = _build_sweep_review_manifest([ws], repo_data, ws_dir,
|
|
1195
|
+
findings=findings, cutoff='2026-03-01')
|
|
1196
|
+
assert 'No code commits' not in manifest
|