workstream-cli 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- workstream/ARCHITECTURE.md +89 -0
- workstream/__init__.py +8 -0
- workstream/cli.py +136 -0
- workstream/commands/__init__.py +0 -0
- workstream/commands/backfill.py +139 -0
- workstream/commands/block.py +93 -0
- workstream/commands/checkin.py +51 -0
- workstream/commands/cron.py +119 -0
- workstream/commands/focus_cmd.py +273 -0
- workstream/commands/idea.py +172 -0
- workstream/commands/index.py +89 -0
- workstream/commands/init.py +567 -0
- workstream/commands/inspect_cmd.py +354 -0
- workstream/commands/list_cmd.py +99 -0
- workstream/commands/nest.py +108 -0
- workstream/commands/new.py +95 -0
- workstream/commands/next_cmd.py +333 -0
- workstream/commands/report.py +190 -0
- workstream/commands/resume.py +145 -0
- workstream/commands/review.py +227 -0
- workstream/commands/serve.py +23 -0
- workstream/commands/setup.py +178 -0
- workstream/commands/show.py +123 -0
- workstream/commands/snooze.py +117 -0
- workstream/commands/stale.py +116 -0
- workstream/commands/sweep.py +1753 -0
- workstream/commands/tree.py +105 -0
- workstream/commands/update_status.py +117 -0
- workstream/config.py +322 -0
- workstream/extensions/__init__.py +0 -0
- workstream/extensions/workstream.ts +633 -0
- workstream/focus_artifact.py +157 -0
- workstream/git.py +194 -0
- workstream/harness.py +49 -0
- workstream/llm.py +78 -0
- workstream/markdown.py +501 -0
- workstream/models.py +274 -0
- workstream/plan_index.py +88 -0
- workstream/provisioning.py +196 -0
- workstream/repo_discovery.py +158 -0
- workstream/review_artifact.py +96 -0
- workstream/scripts/migrate_statuses.py +120 -0
- workstream/skills/__init__.py +0 -0
- workstream/skills/workstream_context/SKILL.md +75 -0
- workstream/skills/workstream_context/__init__.py +0 -0
- workstream/skills/workstream_focus/SKILL.md +141 -0
- workstream/skills/workstream_init/SKILL.md +86 -0
- workstream/skills/workstream_review/SKILL.md +224 -0
- workstream/skills/workstream_sweep/SKILL.md +178 -0
- workstream/sweep_state.py +93 -0
- workstream/templates/dashboard.html +382 -0
- workstream/templates/detail.html +360 -0
- workstream/templates/plan.html +210 -0
- workstream/test/__init__.py +0 -0
- workstream/test/conftest.py +221 -0
- workstream/test/fixtures/sample_sprint_note.md +10 -0
- workstream/test/fixtures/sample_workstream.md +41 -0
- workstream/test/test_backfill.py +180 -0
- workstream/test/test_batch_writeback.py +81 -0
- workstream/test/test_commands.py +938 -0
- workstream/test/test_config.py +54 -0
- workstream/test/test_focus_artifact.py +211 -0
- workstream/test/test_git.py +88 -0
- workstream/test/test_heuristics.py +136 -0
- workstream/test/test_hierarchy.py +231 -0
- workstream/test/test_init.py +452 -0
- workstream/test/test_inspect.py +143 -0
- workstream/test/test_llm.py +78 -0
- workstream/test/test_markdown.py +626 -0
- workstream/test/test_models.py +506 -0
- workstream/test/test_next.py +206 -0
- workstream/test/test_plan_index.py +83 -0
- workstream/test/test_provisioning.py +270 -0
- workstream/test/test_repo_discovery.py +181 -0
- workstream/test/test_resume.py +71 -0
- workstream/test/test_sweep.py +1196 -0
- workstream/test/test_sweep_state.py +86 -0
- workstream/test/test_thoughts.py +516 -0
- workstream/test/test_web.py +606 -0
- workstream/thoughts.py +505 -0
- workstream/web.py +444 -0
- workstream_cli-0.0.1.dist-info/LICENSE +21 -0
- workstream_cli-0.0.1.dist-info/METADATA +93 -0
- workstream_cli-0.0.1.dist-info/RECORD +86 -0
- workstream_cli-0.0.1.dist-info/WHEEL +4 -0
- workstream_cli-0.0.1.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""Tests for workstream.sweep_state — file change tracking for discovery sweeps."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from workstream.sweep_state import SweepState, load_sweep_state, save_sweep_state
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def test_sweep_state_empty(tmp_path: Path):
|
|
11
|
+
"""Load from non-existent directory → empty state."""
|
|
12
|
+
state = load_sweep_state(tmp_path / "nonexistent")
|
|
13
|
+
assert state.files == {}
|
|
14
|
+
assert state.last_sweep == ''
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def test_sweep_state_round_trip(tmp_path: Path):
|
|
18
|
+
"""Create state, save, load → same data."""
|
|
19
|
+
# Create a real file to track
|
|
20
|
+
f = tmp_path / "notes.md"
|
|
21
|
+
f.write_text("hello", encoding="utf-8")
|
|
22
|
+
|
|
23
|
+
state = SweepState(last_sweep='2026-03-28')
|
|
24
|
+
state.mark_processed(f)
|
|
25
|
+
save_sweep_state(state, tmp_path)
|
|
26
|
+
|
|
27
|
+
loaded = load_sweep_state(tmp_path)
|
|
28
|
+
assert loaded.last_sweep == '2026-03-28'
|
|
29
|
+
assert str(f) in loaded.files
|
|
30
|
+
fs = loaded.files[str(f)]
|
|
31
|
+
assert fs.path == str(f)
|
|
32
|
+
assert fs.processed is True
|
|
33
|
+
assert fs.content_hash == state.files[str(f)].content_hash
|
|
34
|
+
assert fs.mtime == state.files[str(f)].mtime
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def test_is_changed_new_file(tmp_path: Path):
|
|
38
|
+
"""New file not in state → is_changed = True."""
|
|
39
|
+
f = tmp_path / "new.md"
|
|
40
|
+
f.write_text("content", encoding="utf-8")
|
|
41
|
+
|
|
42
|
+
state = SweepState()
|
|
43
|
+
assert state.is_changed(f) is True
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def test_is_changed_unchanged_file(tmp_path: Path):
|
|
47
|
+
"""Mark processed, check again → is_changed = False."""
|
|
48
|
+
f = tmp_path / "stable.md"
|
|
49
|
+
f.write_text("content", encoding="utf-8")
|
|
50
|
+
|
|
51
|
+
state = SweepState()
|
|
52
|
+
state.mark_processed(f)
|
|
53
|
+
assert state.is_changed(f) is False
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def test_is_changed_modified_file(tmp_path: Path):
|
|
57
|
+
"""Mark processed, modify file, check → is_changed = True."""
|
|
58
|
+
import time
|
|
59
|
+
|
|
60
|
+
f = tmp_path / "changing.md"
|
|
61
|
+
f.write_text("original", encoding="utf-8")
|
|
62
|
+
|
|
63
|
+
state = SweepState()
|
|
64
|
+
state.mark_processed(f)
|
|
65
|
+
|
|
66
|
+
# Ensure mtime differs (filesystem resolution can be coarse)
|
|
67
|
+
time.sleep(0.05)
|
|
68
|
+
f.write_text("modified", encoding="utf-8")
|
|
69
|
+
|
|
70
|
+
assert state.is_changed(f) is True
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def test_unprocessed_files(tmp_path: Path):
|
|
74
|
+
"""Mix of new and processed files → returns only new ones."""
|
|
75
|
+
old = tmp_path / "old.md"
|
|
76
|
+
old.write_text("already seen", encoding="utf-8")
|
|
77
|
+
new1 = tmp_path / "new1.md"
|
|
78
|
+
new1.write_text("fresh", encoding="utf-8")
|
|
79
|
+
new2 = tmp_path / "new2.md"
|
|
80
|
+
new2.write_text("also fresh", encoding="utf-8")
|
|
81
|
+
|
|
82
|
+
state = SweepState()
|
|
83
|
+
state.mark_processed(old)
|
|
84
|
+
|
|
85
|
+
result = state.unprocessed_files([old, new1, new2])
|
|
86
|
+
assert set(result) == {new1, new2}
|
|
@@ -0,0 +1,516 @@
|
|
|
1
|
+
"""Tests for workstream.thoughts — thought extraction, association, and dispatch."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from datetime import date
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from textwrap import dedent
|
|
9
|
+
|
|
10
|
+
import pytest
|
|
11
|
+
|
|
12
|
+
from workstream.markdown import load_workstream, parse_frontmatter
|
|
13
|
+
from workstream.models import ThreadEntry, Workstream
|
|
14
|
+
from workstream.thoughts import (
|
|
15
|
+
Thought,
|
|
16
|
+
ThoughtDispatchResult,
|
|
17
|
+
build_association_prompt,
|
|
18
|
+
build_discovery_prompt,
|
|
19
|
+
dispatch_associations,
|
|
20
|
+
extract_thoughts_from_text,
|
|
21
|
+
parse_association_response,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# ── Extraction ───────────────────────────────────────────────────────
|
|
26
|
+
|
|
27
|
+
SPRINT_NOTE = dedent("""\
|
|
28
|
+
## 2026-03-26
|
|
29
|
+
- TODO
|
|
30
|
+
- Fix onboarding bug
|
|
31
|
+
- Review PR #42
|
|
32
|
+
- Thoughts
|
|
33
|
+
- Should we add confetti to the welcome screen?
|
|
34
|
+
- Time to rethink the API versioning strategy
|
|
35
|
+
- That agent knowledge sharing idea keeps coming up
|
|
36
|
+
- Done
|
|
37
|
+
- Deployed landing page
|
|
38
|
+
|
|
39
|
+
## 2026-03-25
|
|
40
|
+
- TODO
|
|
41
|
+
- Ship feature X
|
|
42
|
+
- Thoughts
|
|
43
|
+
- Would upgrading this library fix my bug?
|
|
44
|
+
- Done
|
|
45
|
+
- Merged PR #40
|
|
46
|
+
|
|
47
|
+
## 2026-03-24
|
|
48
|
+
- TODO
|
|
49
|
+
- Write tests
|
|
50
|
+
- Done
|
|
51
|
+
- Completed integration tests
|
|
52
|
+
""")
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def test_extract_thoughts_basic():
|
|
56
|
+
"""Extract thoughts from a multi-day sprint note."""
|
|
57
|
+
thoughts = extract_thoughts_from_text(SPRINT_NOTE)
|
|
58
|
+
assert len(thoughts) == 4
|
|
59
|
+
|
|
60
|
+
# Day 1: three thoughts
|
|
61
|
+
day1 = [t for t in thoughts if t.date == "2026-03-26"]
|
|
62
|
+
assert len(day1) == 3
|
|
63
|
+
assert day1[0].text == "Should we add confetti to the welcome screen?"
|
|
64
|
+
assert day1[1].text == "Time to rethink the API versioning strategy"
|
|
65
|
+
assert day1[2].text == "That agent knowledge sharing idea keeps coming up"
|
|
66
|
+
|
|
67
|
+
# Day 2: one thought
|
|
68
|
+
day2 = [t for t in thoughts if t.date == "2026-03-25"]
|
|
69
|
+
assert len(day2) == 1
|
|
70
|
+
assert day2[0].text == "Would upgrading this library fix my bug?"
|
|
71
|
+
|
|
72
|
+
# Day 3: no thoughts section
|
|
73
|
+
day3 = [t for t in thoughts if t.date == "2026-03-24"]
|
|
74
|
+
assert len(day3) == 0
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def test_extract_thoughts_no_thoughts_section():
|
|
78
|
+
"""No thoughts section in the text — returns empty."""
|
|
79
|
+
text = dedent("""\
|
|
80
|
+
## 2026-03-20
|
|
81
|
+
- TODO
|
|
82
|
+
- Do stuff
|
|
83
|
+
- Done
|
|
84
|
+
- Did stuff
|
|
85
|
+
""")
|
|
86
|
+
assert extract_thoughts_from_text(text) == []
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def test_extract_thoughts_empty_text():
|
|
90
|
+
"""Empty text — returns empty."""
|
|
91
|
+
assert extract_thoughts_from_text("") == []
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def test_extract_thoughts_source_file():
|
|
95
|
+
"""Source file is recorded on each thought."""
|
|
96
|
+
thoughts = extract_thoughts_from_text(SPRINT_NOTE, source_file="sprint_2026-03-23.md")
|
|
97
|
+
assert all(t.source_file == "sprint_2026-03-23.md" for t in thoughts)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def test_thought_equality():
|
|
101
|
+
"""Thoughts are equal if date and text match."""
|
|
102
|
+
a = Thought(date="2026-03-26", text="hello")
|
|
103
|
+
b = Thought(date="2026-03-26", text="hello")
|
|
104
|
+
c = Thought(date="2026-03-26", text="different")
|
|
105
|
+
assert a == b
|
|
106
|
+
assert a != c
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
# ── LLM prompt ───────────────────────────────────────────────────────
|
|
111
|
+
|
|
112
|
+
def test_build_association_prompt():
|
|
113
|
+
"""Prompt includes workstream context and thought list."""
|
|
114
|
+
ws = Workstream(
|
|
115
|
+
id="2026-03-15-abc123",
|
|
116
|
+
title="API Redesign",
|
|
117
|
+
status="active",
|
|
118
|
+
thread=[ThreadEntry(date="2026-03-25", body="Working on auth.")],
|
|
119
|
+
)
|
|
120
|
+
thoughts = [
|
|
121
|
+
Thought(date="2026-03-26", text="Should add confetti"),
|
|
122
|
+
Thought(date="2026-03-26", text="Rethink API versioning"),
|
|
123
|
+
]
|
|
124
|
+
|
|
125
|
+
prompt = build_association_prompt(thoughts, [ws])
|
|
126
|
+
assert "2026-03-15-abc123" in prompt
|
|
127
|
+
assert "API Redesign" in prompt
|
|
128
|
+
assert "Should add confetti" in prompt
|
|
129
|
+
assert "Rethink API versioning" in prompt
|
|
130
|
+
assert "JSON array" in prompt
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def test_build_association_prompt_no_workstreams():
|
|
134
|
+
"""Prompt works even with no workstreams."""
|
|
135
|
+
thoughts = [Thought(date="2026-03-26", text="Random idea")]
|
|
136
|
+
prompt = build_association_prompt(thoughts, [])
|
|
137
|
+
assert "no workstreams exist yet" in prompt
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
# ── Response parsing ─────────────────────────────────────────────────
|
|
141
|
+
|
|
142
|
+
def test_parse_association_response_valid():
|
|
143
|
+
"""Parse a well-formed JSON response."""
|
|
144
|
+
response = """[
|
|
145
|
+
{"thought": "Add confetti", "action": "match", "workstream_id": "ws-123", "title": ""},
|
|
146
|
+
{"thought": "New idea", "action": "new", "workstream_id": "", "title": "Confetti Feature"},
|
|
147
|
+
{"thought": "Unclear", "action": "inbox", "workstream_id": "", "title": ""}
|
|
148
|
+
]"""
|
|
149
|
+
result = parse_association_response(response)
|
|
150
|
+
assert len(result) == 3
|
|
151
|
+
assert result[0]["action"] == "match"
|
|
152
|
+
assert result[0]["workstream_id"] == "ws-123"
|
|
153
|
+
assert result[1]["action"] == "new"
|
|
154
|
+
assert result[1]["title"] == "Confetti Feature"
|
|
155
|
+
assert result[2]["action"] == "inbox"
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def test_parse_association_response_with_code_fences():
|
|
159
|
+
"""Parse response wrapped in markdown code fences."""
|
|
160
|
+
response = """```json
|
|
161
|
+
[{"thought": "hello", "action": "inbox", "workstream_id": "", "title": ""}]
|
|
162
|
+
```"""
|
|
163
|
+
result = parse_association_response(response)
|
|
164
|
+
assert len(result) == 1
|
|
165
|
+
assert result[0]["thought"] == "hello"
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def test_parse_association_response_invalid_json():
|
|
169
|
+
"""Invalid JSON returns empty list."""
|
|
170
|
+
assert parse_association_response("this is not json") == []
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def test_parse_association_response_invalid_action():
|
|
174
|
+
"""Entries with invalid actions are filtered out."""
|
|
175
|
+
response = '[{"thought": "hi", "action": "destroy", "workstream_id": ""}]'
|
|
176
|
+
result = parse_association_response(response)
|
|
177
|
+
assert len(result) == 0
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def test_parse_association_response_missing_fields():
|
|
181
|
+
"""Entries missing required fields are filtered out."""
|
|
182
|
+
response = '[{"thought": "hi"}, {"action": "inbox"}]'
|
|
183
|
+
result = parse_association_response(response)
|
|
184
|
+
assert len(result) == 0
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
# ── Dispatch ─────────────────────────────────────────────────────────
|
|
188
|
+
|
|
189
|
+
@pytest.fixture()
|
|
190
|
+
def dispatch_env(tmp_path: Path):
|
|
191
|
+
"""Set up a workstreams directory with one workstream file for dispatch tests."""
|
|
192
|
+
ws_dir = tmp_path / "workstreams"
|
|
193
|
+
ws_dir.mkdir()
|
|
194
|
+
|
|
195
|
+
ws_text = dedent("""\
|
|
196
|
+
---
|
|
197
|
+
id: 2026-03-15-abc123
|
|
198
|
+
title: API Redesign
|
|
199
|
+
status: active
|
|
200
|
+
size: week
|
|
201
|
+
created: '2026-03-15'
|
|
202
|
+
updated: '2026-03-25'
|
|
203
|
+
---
|
|
204
|
+
|
|
205
|
+
# API Redesign
|
|
206
|
+
|
|
207
|
+
## Thread
|
|
208
|
+
### 2026-03-25
|
|
209
|
+
Working on auth.
|
|
210
|
+
|
|
211
|
+
## Next
|
|
212
|
+
- Finish migration
|
|
213
|
+
""")
|
|
214
|
+
ws_file = ws_dir / "2026-03-15-abc123-api-redesign.md"
|
|
215
|
+
ws_file.write_text(ws_text, encoding="utf-8")
|
|
216
|
+
ws = load_workstream(ws_file)
|
|
217
|
+
|
|
218
|
+
return {"ws_dir": ws_dir, "ws": ws, "ws_file": ws_file}
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def test_dispatch_match(dispatch_env):
|
|
222
|
+
"""Matched thoughts are appended to the workstream's thread."""
|
|
223
|
+
ws_dir = dispatch_env["ws_dir"]
|
|
224
|
+
ws = dispatch_env["ws"]
|
|
225
|
+
ws_file = dispatch_env["ws_file"]
|
|
226
|
+
|
|
227
|
+
thoughts = [Thought(date="2026-03-26", text="Rethink API versioning")]
|
|
228
|
+
associations = [{
|
|
229
|
+
"thought": "Rethink API versioning",
|
|
230
|
+
"action": "match",
|
|
231
|
+
"workstream_id": "2026-03-15-abc123",
|
|
232
|
+
"title": "",
|
|
233
|
+
}]
|
|
234
|
+
|
|
235
|
+
result = dispatch_associations(associations, thoughts, [ws], ws_dir)
|
|
236
|
+
assert result.matched == 1
|
|
237
|
+
assert result.new_created == 0
|
|
238
|
+
assert result.inbox_count == 0
|
|
239
|
+
|
|
240
|
+
# Verify the workstream file was updated
|
|
241
|
+
updated_text = ws_file.read_text(encoding="utf-8")
|
|
242
|
+
# No prefix — thought text appears directly
|
|
243
|
+
assert "Rethink API versioning" in updated_text
|
|
244
|
+
assert "(from thoughts sweep)" not in updated_text
|
|
245
|
+
meta, _ = parse_frontmatter(updated_text)
|
|
246
|
+
assert meta["updated"] == date.today().isoformat()
|
|
247
|
+
# Activity dates should be set
|
|
248
|
+
assert meta["first_activity"] == "2026-03-26"
|
|
249
|
+
assert meta["last_activity"] == "2026-03-26"
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def test_dispatch_new(dispatch_env):
|
|
253
|
+
"""'new' action creates a new workstream file."""
|
|
254
|
+
ws_dir = dispatch_env["ws_dir"]
|
|
255
|
+
ws = dispatch_env["ws"]
|
|
256
|
+
|
|
257
|
+
thoughts = [Thought(date="2026-03-26", text="Agent knowledge sharing")]
|
|
258
|
+
associations = [{
|
|
259
|
+
"thought": "Agent knowledge sharing",
|
|
260
|
+
"action": "new",
|
|
261
|
+
"workstream_id": "",
|
|
262
|
+
"title": "Agent Knowledge Sharing",
|
|
263
|
+
}]
|
|
264
|
+
|
|
265
|
+
result = dispatch_associations(associations, thoughts, [ws], ws_dir)
|
|
266
|
+
assert result.new_created == 1
|
|
267
|
+
|
|
268
|
+
# Find the new file
|
|
269
|
+
new_files = [f for f in ws_dir.glob("*.md") if "agent-knowledge-sharing" in f.name]
|
|
270
|
+
assert len(new_files) == 1
|
|
271
|
+
new_ws = load_workstream(new_files[0])
|
|
272
|
+
assert new_ws.title == "Agent Knowledge Sharing"
|
|
273
|
+
assert new_ws.status == "active"
|
|
274
|
+
assert len(new_ws.thread) == 1
|
|
275
|
+
assert new_ws.thread[0].body == "Agent knowledge sharing"
|
|
276
|
+
assert "(from thoughts sweep)" not in new_ws.thread[0].body
|
|
277
|
+
# Activity dates set to thought date
|
|
278
|
+
assert new_ws.first_activity == "2026-03-26"
|
|
279
|
+
assert new_ws.last_activity == "2026-03-26"
|
|
280
|
+
|
|
281
|
+
def test_dispatch_inbox(dispatch_env):
|
|
282
|
+
"""'inbox' action writes to inbox.md."""
|
|
283
|
+
ws_dir = dispatch_env["ws_dir"]
|
|
284
|
+
ws = dispatch_env["ws"]
|
|
285
|
+
|
|
286
|
+
thoughts = [Thought(date="2026-03-26", text="Something unclear")]
|
|
287
|
+
associations = [{
|
|
288
|
+
"thought": "Something unclear",
|
|
289
|
+
"action": "inbox",
|
|
290
|
+
"workstream_id": "",
|
|
291
|
+
"title": "",
|
|
292
|
+
}]
|
|
293
|
+
|
|
294
|
+
result = dispatch_associations(associations, thoughts, [ws], ws_dir)
|
|
295
|
+
assert result.inbox_count == 1
|
|
296
|
+
|
|
297
|
+
inbox = ws_dir / "inbox.md"
|
|
298
|
+
assert inbox.exists()
|
|
299
|
+
content = inbox.read_text(encoding="utf-8")
|
|
300
|
+
assert "Something unclear" in content
|
|
301
|
+
assert "2026-03-26" in content
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def test_dispatch_match_bad_id(dispatch_env):
|
|
305
|
+
"""Match with invalid workstream ID falls through to inbox."""
|
|
306
|
+
ws_dir = dispatch_env["ws_dir"]
|
|
307
|
+
ws = dispatch_env["ws"]
|
|
308
|
+
|
|
309
|
+
thoughts = [Thought(date="2026-03-26", text="Mystery")]
|
|
310
|
+
associations = [{
|
|
311
|
+
"thought": "Mystery",
|
|
312
|
+
"action": "match",
|
|
313
|
+
"workstream_id": "nonexistent-id",
|
|
314
|
+
"title": "",
|
|
315
|
+
}]
|
|
316
|
+
|
|
317
|
+
result = dispatch_associations(associations, thoughts, [ws], ws_dir)
|
|
318
|
+
assert result.matched == 0
|
|
319
|
+
assert result.inbox_count == 1
|
|
320
|
+
assert len(result.errors) == 1
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def test_dispatch_new_with_parent(dispatch_env):
|
|
324
|
+
"""'new' action with parent_id sets parent on the new workstream."""
|
|
325
|
+
ws_dir = dispatch_env["ws_dir"]
|
|
326
|
+
ws = dispatch_env["ws"]
|
|
327
|
+
|
|
328
|
+
thoughts = [Thought(date="2026-03-26", text="Auth migration plan")]
|
|
329
|
+
associations = [{
|
|
330
|
+
"thought": "Auth migration plan",
|
|
331
|
+
"action": "new",
|
|
332
|
+
"workstream_id": "",
|
|
333
|
+
"title": "Auth Migration",
|
|
334
|
+
"parent_id": "2026-03-15-abc123", # existing ws ID
|
|
335
|
+
}]
|
|
336
|
+
|
|
337
|
+
result = dispatch_associations(associations, thoughts, [ws], ws_dir)
|
|
338
|
+
assert result.new_created == 1
|
|
339
|
+
|
|
340
|
+
new_files = [f for f in ws_dir.glob("*.md") if "auth-migration" in f.name]
|
|
341
|
+
assert len(new_files) == 1
|
|
342
|
+
new_ws = load_workstream(new_files[0])
|
|
343
|
+
assert new_ws.parent == "2026-03-15-abc123"
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
def test_dispatch_new_with_invalid_parent(dispatch_env):
|
|
347
|
+
"""'new' action with non-existent parent_id ignores the parent."""
|
|
348
|
+
ws_dir = dispatch_env["ws_dir"]
|
|
349
|
+
ws = dispatch_env["ws"]
|
|
350
|
+
|
|
351
|
+
thoughts = [Thought(date="2026-03-26", text="Random project")]
|
|
352
|
+
associations = [{
|
|
353
|
+
"thought": "Random project",
|
|
354
|
+
"action": "new",
|
|
355
|
+
"workstream_id": "",
|
|
356
|
+
"title": "Random Project",
|
|
357
|
+
"parent_id": "nonexistent-id",
|
|
358
|
+
}]
|
|
359
|
+
|
|
360
|
+
result = dispatch_associations(associations, thoughts, [ws], ws_dir)
|
|
361
|
+
assert result.new_created == 1
|
|
362
|
+
|
|
363
|
+
new_files = [f for f in ws_dir.glob("*.md") if "random-project" in f.name]
|
|
364
|
+
assert len(new_files) == 1
|
|
365
|
+
new_ws = load_workstream(new_files[0])
|
|
366
|
+
assert new_ws.parent == "" # invalid parent_id was dropped
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
# ── Snooze computation ────────────────────────────────────────────────
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def test_compute_snooze_tomorrow():
|
|
373
|
+
from datetime import date
|
|
374
|
+
from workstream.thoughts import compute_snooze_until
|
|
375
|
+
result_date, count = compute_snooze_until('tomorrow', 0, from_date=date(2026, 3, 28))
|
|
376
|
+
assert result_date == '2026-03-29'
|
|
377
|
+
assert count == 0
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
def test_compute_snooze_next_week():
|
|
381
|
+
from datetime import date
|
|
382
|
+
from workstream.thoughts import compute_snooze_until
|
|
383
|
+
# 2026-03-28 is Saturday → next Monday is 2026-03-30
|
|
384
|
+
result_date, count = compute_snooze_until('next-week', 0, from_date=date(2026, 3, 28))
|
|
385
|
+
assert result_date == '2026-03-30'
|
|
386
|
+
assert count == 0
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
def test_compute_snooze_later_escalation():
|
|
390
|
+
from datetime import date
|
|
391
|
+
from workstream.thoughts import compute_snooze_until
|
|
392
|
+
base = date(2026, 3, 28)
|
|
393
|
+
# 1st later: +1 week
|
|
394
|
+
d, c = compute_snooze_until('later', 0, from_date=base)
|
|
395
|
+
assert d == '2026-04-04'
|
|
396
|
+
assert c == 1
|
|
397
|
+
# 2nd later: +2 weeks
|
|
398
|
+
d, c = compute_snooze_until('later', 1, from_date=base)
|
|
399
|
+
assert d == '2026-04-11'
|
|
400
|
+
assert c == 2
|
|
401
|
+
# 3rd later: +1 month (30 days)
|
|
402
|
+
d, c = compute_snooze_until('later', 2, from_date=base)
|
|
403
|
+
assert d == '2026-04-27'
|
|
404
|
+
assert c == 3
|
|
405
|
+
# 6th+ later: +6 months (182 days) — caps at last schedule entry
|
|
406
|
+
d, c = compute_snooze_until('later', 10, from_date=base)
|
|
407
|
+
assert d == '2026-09-26'
|
|
408
|
+
assert c == 11
|
|
409
|
+
|
|
410
|
+
|
|
411
|
+
def test_compute_snooze_next_month():
|
|
412
|
+
from datetime import date
|
|
413
|
+
from workstream.thoughts import compute_snooze_until
|
|
414
|
+
result_date, count = compute_snooze_until('next-month', 0, from_date=date(2026, 3, 28))
|
|
415
|
+
assert result_date == '2026-04-27'
|
|
416
|
+
assert count == 0
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
def test_compute_snooze_invalid():
|
|
420
|
+
import pytest
|
|
421
|
+
from workstream.thoughts import compute_snooze_until
|
|
422
|
+
with pytest.raises(ValueError):
|
|
423
|
+
compute_snooze_until('invalid-duration')
|
|
424
|
+
|
|
425
|
+
|
|
426
|
+
# ── Discovery prompt ───────────────────────────────────────────────
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
def test_build_discovery_prompt_includes_workstream_context():
|
|
430
|
+
"""Prompt includes workstream IDs, titles, and status."""
|
|
431
|
+
ws = Workstream(
|
|
432
|
+
id="2026-03-15-abc123",
|
|
433
|
+
title="API Redesign",
|
|
434
|
+
status="active",
|
|
435
|
+
thread=[ThreadEntry(date="2026-03-25", body="Working on auth.")],
|
|
436
|
+
)
|
|
437
|
+
prompt = build_discovery_prompt("some content", [ws], "sprint_2026-03-23.md")
|
|
438
|
+
assert "2026-03-15-abc123" in prompt
|
|
439
|
+
assert "API Redesign" in prompt
|
|
440
|
+
assert "[active]" in prompt
|
|
441
|
+
assert "sprint_2026-03-23.md" in prompt
|
|
442
|
+
assert "some content" in prompt
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
def test_build_discovery_prompt_no_workstreams():
|
|
446
|
+
"""Prompt works with no workstreams."""
|
|
447
|
+
prompt = build_discovery_prompt("content", [], "sprint_2026-03-01.md")
|
|
448
|
+
assert "no workstreams exist yet" in prompt
|
|
449
|
+
assert "sprint_2026-03-01.md" in prompt
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
def test_build_discovery_prompt_instructions():
|
|
453
|
+
"""Prompt includes extraction instructions and JSON format."""
|
|
454
|
+
prompt = build_discovery_prompt("content", [], "test.md")
|
|
455
|
+
# Key extraction instructions
|
|
456
|
+
assert "Thoughts" in prompt
|
|
457
|
+
assert "Do NOT extract" in prompt
|
|
458
|
+
assert "JSON array" in prompt
|
|
459
|
+
# Format spec
|
|
460
|
+
assert '"date"' in prompt
|
|
461
|
+
assert '"thought"' in prompt
|
|
462
|
+
assert '"action"' in prompt
|
|
463
|
+
assert '"workstream_id"' in prompt
|
|
464
|
+
assert '"title"' in prompt
|
|
465
|
+
assert '"parent_id"' in prompt
|
|
466
|
+
# Date range handling instruction
|
|
467
|
+
assert "through" in prompt
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
def test_build_discovery_prompt_embeds_file_content():
|
|
471
|
+
"""Full file content is embedded in the prompt."""
|
|
472
|
+
content = dedent("""\
|
|
473
|
+
## 2026-03-09
|
|
474
|
+
- Thoughts
|
|
475
|
+
\t- Tab-indented thought here
|
|
476
|
+
- Launch thoughts
|
|
477
|
+
- This is a named thought section
|
|
478
|
+
""")
|
|
479
|
+
prompt = build_discovery_prompt(content, [], "sprint_2026-03-09.md")
|
|
480
|
+
assert "Tab-indented thought here" in prompt
|
|
481
|
+
assert "Launch thoughts" in prompt
|
|
482
|
+
assert "named thought section" in prompt
|
|
483
|
+
|
|
484
|
+
|
|
485
|
+
def test_build_discovery_prompt_shows_hierarchy():
|
|
486
|
+
"""Prompt shows parent→child nesting in the workstream tree."""
|
|
487
|
+
parent_ws = Workstream(
|
|
488
|
+
id="ws-parent",
|
|
489
|
+
title="FOSS",
|
|
490
|
+
status="active",
|
|
491
|
+
)
|
|
492
|
+
child_ws = Workstream(
|
|
493
|
+
id="ws-child",
|
|
494
|
+
title="boltons",
|
|
495
|
+
status="active",
|
|
496
|
+
parent="ws-parent",
|
|
497
|
+
)
|
|
498
|
+
prompt = build_discovery_prompt("content", [parent_ws, child_ws], "test.md")
|
|
499
|
+
# Child should be indented under parent
|
|
500
|
+
assert "- ws-parent | FOSS [active]" in prompt
|
|
501
|
+
assert " - ws-child | boltons [active] (child of FOSS)" in prompt
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
def test_build_discovery_prompt_with_repo_context():
|
|
505
|
+
"""Prompt includes repo context when provided."""
|
|
506
|
+
prompt = build_discovery_prompt("content", [], "test.md", repo_context="some repo info")
|
|
507
|
+
assert "REPO ACTIVITY:" in prompt
|
|
508
|
+
assert "some repo info" in prompt
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
def test_build_discovery_prompt_no_repo_context():
|
|
512
|
+
"""Prompt omits repo section when no repo context."""
|
|
513
|
+
prompt = build_discovery_prompt("content", [], "test.md")
|
|
514
|
+
assert "REPO ACTIVITY:" not in prompt
|
|
515
|
+
prompt2 = build_discovery_prompt("content", [], "test.md", repo_context='')
|
|
516
|
+
assert "REPO ACTIVITY:" not in prompt2
|