workstream-cli 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- workstream/ARCHITECTURE.md +89 -0
- workstream/__init__.py +8 -0
- workstream/cli.py +136 -0
- workstream/commands/__init__.py +0 -0
- workstream/commands/backfill.py +139 -0
- workstream/commands/block.py +93 -0
- workstream/commands/checkin.py +51 -0
- workstream/commands/cron.py +119 -0
- workstream/commands/focus_cmd.py +273 -0
- workstream/commands/idea.py +172 -0
- workstream/commands/index.py +89 -0
- workstream/commands/init.py +567 -0
- workstream/commands/inspect_cmd.py +354 -0
- workstream/commands/list_cmd.py +99 -0
- workstream/commands/nest.py +108 -0
- workstream/commands/new.py +95 -0
- workstream/commands/next_cmd.py +333 -0
- workstream/commands/report.py +190 -0
- workstream/commands/resume.py +145 -0
- workstream/commands/review.py +227 -0
- workstream/commands/serve.py +23 -0
- workstream/commands/setup.py +178 -0
- workstream/commands/show.py +123 -0
- workstream/commands/snooze.py +117 -0
- workstream/commands/stale.py +116 -0
- workstream/commands/sweep.py +1753 -0
- workstream/commands/tree.py +105 -0
- workstream/commands/update_status.py +117 -0
- workstream/config.py +322 -0
- workstream/extensions/__init__.py +0 -0
- workstream/extensions/workstream.ts +633 -0
- workstream/focus_artifact.py +157 -0
- workstream/git.py +194 -0
- workstream/harness.py +49 -0
- workstream/llm.py +78 -0
- workstream/markdown.py +501 -0
- workstream/models.py +274 -0
- workstream/plan_index.py +88 -0
- workstream/provisioning.py +196 -0
- workstream/repo_discovery.py +158 -0
- workstream/review_artifact.py +96 -0
- workstream/scripts/migrate_statuses.py +120 -0
- workstream/skills/__init__.py +0 -0
- workstream/skills/workstream_context/SKILL.md +75 -0
- workstream/skills/workstream_context/__init__.py +0 -0
- workstream/skills/workstream_focus/SKILL.md +141 -0
- workstream/skills/workstream_init/SKILL.md +86 -0
- workstream/skills/workstream_review/SKILL.md +224 -0
- workstream/skills/workstream_sweep/SKILL.md +178 -0
- workstream/sweep_state.py +93 -0
- workstream/templates/dashboard.html +382 -0
- workstream/templates/detail.html +360 -0
- workstream/templates/plan.html +210 -0
- workstream/test/__init__.py +0 -0
- workstream/test/conftest.py +221 -0
- workstream/test/fixtures/sample_sprint_note.md +10 -0
- workstream/test/fixtures/sample_workstream.md +41 -0
- workstream/test/test_backfill.py +180 -0
- workstream/test/test_batch_writeback.py +81 -0
- workstream/test/test_commands.py +938 -0
- workstream/test/test_config.py +54 -0
- workstream/test/test_focus_artifact.py +211 -0
- workstream/test/test_git.py +88 -0
- workstream/test/test_heuristics.py +136 -0
- workstream/test/test_hierarchy.py +231 -0
- workstream/test/test_init.py +452 -0
- workstream/test/test_inspect.py +143 -0
- workstream/test/test_llm.py +78 -0
- workstream/test/test_markdown.py +626 -0
- workstream/test/test_models.py +506 -0
- workstream/test/test_next.py +206 -0
- workstream/test/test_plan_index.py +83 -0
- workstream/test/test_provisioning.py +270 -0
- workstream/test/test_repo_discovery.py +181 -0
- workstream/test/test_resume.py +71 -0
- workstream/test/test_sweep.py +1196 -0
- workstream/test/test_sweep_state.py +86 -0
- workstream/test/test_thoughts.py +516 -0
- workstream/test/test_web.py +606 -0
- workstream/thoughts.py +505 -0
- workstream/web.py +444 -0
- workstream_cli-0.0.1.dist-info/LICENSE +21 -0
- workstream_cli-0.0.1.dist-info/METADATA +93 -0
- workstream_cli-0.0.1.dist-info/RECORD +86 -0
- workstream_cli-0.0.1.dist-info/WHEEL +4 -0
- workstream_cli-0.0.1.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
"""Tests for ws next and ws stale commands."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from workstream.commands.next_cmd import get_next_workstreams
|
|
8
|
+
from workstream.commands.stale import get_stale_workstreams
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
# -- ws next ----------------------------------------------------------------
|
|
12
|
+
|
|
13
|
+
class TestGetNextWorkstreams:
|
|
14
|
+
def test_next_all_active(self, workstreams_dir: Path) -> None:
|
|
15
|
+
"""Returns only active workstreams, sorted by staleness (most idle first)."""
|
|
16
|
+
results = get_next_workstreams(workstreams_dir)
|
|
17
|
+
|
|
18
|
+
# Only active status (not snoozed)
|
|
19
|
+
assert all(ws.status == 'active' for ws in results)
|
|
20
|
+
assert len(results) == 4
|
|
21
|
+
|
|
22
|
+
# Sorted by days_idle descending
|
|
23
|
+
idle_values = [ws.days_idle() for ws in results]
|
|
24
|
+
assert idle_values == sorted(idle_values, reverse=True)
|
|
25
|
+
|
|
26
|
+
# Verify relative ordering: sponsorship (oldest updated) before docs before api
|
|
27
|
+
titles = [ws.title for ws in results]
|
|
28
|
+
assert titles.index('Sponsorship Outreach') < titles.index('Docs Overhaul')
|
|
29
|
+
assert titles.index('Docs Overhaul') < titles.index('API Redesign')
|
|
30
|
+
|
|
31
|
+
def test_next_filter_by_tag(self, workstreams_dir: Path) -> None:
|
|
32
|
+
"""--tag code shows only code-tagged active workstreams."""
|
|
33
|
+
results = get_next_workstreams(workstreams_dir, tag='code')
|
|
34
|
+
|
|
35
|
+
assert all(ws.status == 'active' for ws in results)
|
|
36
|
+
assert all('code' in ws.tags for ws in results)
|
|
37
|
+
# Accessibility Audit has 'code' tag and is now active — included
|
|
38
|
+
# API Redesign, Docs Overhaul, and Accessibility Audit are active + code-tagged
|
|
39
|
+
titles = {ws.title for ws in results}
|
|
40
|
+
assert 'API Redesign' in titles
|
|
41
|
+
assert 'Docs Overhaul' in titles
|
|
42
|
+
assert 'Accessibility Audit' in titles
|
|
43
|
+
assert len(results) == 3
|
|
44
|
+
|
|
45
|
+
def test_next_filter_by_size(self, workstreams_dir: Path) -> None:
|
|
46
|
+
"""--size week shows only week-sized active workstreams."""
|
|
47
|
+
results = get_next_workstreams(workstreams_dir, size='week')
|
|
48
|
+
|
|
49
|
+
assert all(ws.status == 'active' for ws in results)
|
|
50
|
+
assert all(ws.size == 'week' for ws in results)
|
|
51
|
+
titles = {ws.title for ws in results}
|
|
52
|
+
assert 'API Redesign' in titles
|
|
53
|
+
assert 'Sponsorship Outreach' in titles
|
|
54
|
+
assert len(results) == 2
|
|
55
|
+
|
|
56
|
+
def test_next_empty(self, workstreams_dir: Path) -> None:
|
|
57
|
+
"""Filter combination that matches nothing returns empty list."""
|
|
58
|
+
results = get_next_workstreams(workstreams_dir, tag='nonexistent')
|
|
59
|
+
assert results == []
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
# -- ws stale ---------------------------------------------------------------
|
|
63
|
+
|
|
64
|
+
class TestGetStaleWorkstreams:
|
|
65
|
+
def test_stale_default(self, workstreams_dir: Path) -> None:
|
|
66
|
+
"""With default 7-day threshold, identifies stale active + snoozed items."""
|
|
67
|
+
snoozed_stale, active_idle, _snoozed, _needs_dir, _blocked = get_stale_workstreams(workstreams_dir, days=7)
|
|
68
|
+
|
|
69
|
+
# Conference Talk is snoozed, updated 2026-03-12 (>7 days from any date after 2026-03-19)
|
|
70
|
+
snoozed_titles = {ws.title for ws in snoozed_stale}
|
|
71
|
+
assert 'Conference Talk' in snoozed_titles
|
|
72
|
+
|
|
73
|
+
# At least some active items should be idle >7 days
|
|
74
|
+
# Sponsorship Outreach updated 2026-03-18, Docs Overhaul updated 2026-03-22
|
|
75
|
+
active_titles = {ws.title for ws in active_idle}
|
|
76
|
+
# Sponsorship is definitely stale (16+ days)
|
|
77
|
+
assert 'Sponsorship Outreach' in active_titles
|
|
78
|
+
|
|
79
|
+
# Active idle should be sorted by staleness
|
|
80
|
+
idle_values = [ws.days_idle() for ws in active_idle]
|
|
81
|
+
assert idle_values == sorted(idle_values, reverse=True)
|
|
82
|
+
|
|
83
|
+
def test_stale_snoozed_shown(self, workstreams_dir: Path) -> None:
|
|
84
|
+
"""Snoozed items appear in the snoozed_stale list, not active_idle."""
|
|
85
|
+
snoozed_stale, active_idle, _snoozed, _needs_dir, _blocked = get_stale_workstreams(workstreams_dir, days=1)
|
|
86
|
+
|
|
87
|
+
snoozed_ids = {ws.id for ws in snoozed_stale}
|
|
88
|
+
active_ids = {ws.id for ws in active_idle}
|
|
89
|
+
|
|
90
|
+
# Conference Talk is snoozed — must be in snoozed, not active
|
|
91
|
+
assert '2026-03-01-c3d4e5f6a7' in snoozed_ids
|
|
92
|
+
assert '2026-03-01-c3d4e5f6a7' not in active_ids
|
|
93
|
+
|
|
94
|
+
# Active items must all be status == 'active'
|
|
95
|
+
assert all(ws.status == 'active' for ws in active_idle)
|
|
96
|
+
|
|
97
|
+
def test_stale_high_threshold(self, workstreams_dir: Path) -> None:
|
|
98
|
+
"""Very high threshold returns nothing stale."""
|
|
99
|
+
snoozed_stale, active_idle, _snoozed, _needs_dir, _blocked = get_stale_workstreams(workstreams_dir, days=9999)
|
|
100
|
+
assert snoozed_stale == []
|
|
101
|
+
assert active_idle == []
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
# -- ws next with blocked workstreams -------------------------------------------
|
|
106
|
+
|
|
107
|
+
def test_next_includes_blocked(tmp_path: Path) -> None:
|
|
108
|
+
"""Blocked workstreams appear in ws next output (they're in ACTIVE_STATUSES)."""
|
|
109
|
+
from workstream.commands.new import _new_handler
|
|
110
|
+
from workstream.commands.block import _block_handler
|
|
111
|
+
from workstream.config import Config
|
|
112
|
+
|
|
113
|
+
ws_dir = tmp_path / 'workstreams'
|
|
114
|
+
ws_dir.mkdir()
|
|
115
|
+
config = Config(workstreams_dir=str(ws_dir))
|
|
116
|
+
|
|
117
|
+
_new_handler(posargs_=('Active', 'Task'), config=config, size='day')
|
|
118
|
+
_new_handler(posargs_=('Blocked', 'Task'), config=config, size='day')
|
|
119
|
+
|
|
120
|
+
from workstream.markdown import load_workstream
|
|
121
|
+
files = sorted(ws_dir.glob('*.md'))
|
|
122
|
+
blocked_ws = load_workstream(files[1])
|
|
123
|
+
_block_handler(posargs_=(blocked_ws.id, 'stuck'), config=config)
|
|
124
|
+
|
|
125
|
+
results = get_next_workstreams(ws_dir)
|
|
126
|
+
titles = {ws.title for ws in results}
|
|
127
|
+
assert 'Active Task' in titles
|
|
128
|
+
assert 'Blocked Task' in titles
|
|
129
|
+
statuses = {ws.status for ws in results}
|
|
130
|
+
assert 'blocked' in statuses
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
# -- ws next prompt includes log entries ------------------------------------------
|
|
134
|
+
|
|
135
|
+
def test_prompt_includes_log_entries(tmp_path: Path) -> None:
|
|
136
|
+
"""_build_recommendation_prompt() output contains recent log entries."""
|
|
137
|
+
from workstream.commands.new import _new_handler
|
|
138
|
+
from workstream.commands.checkin import _checkin_handler
|
|
139
|
+
from workstream.commands.next_cmd import _build_recommendation_prompt
|
|
140
|
+
from workstream.config import Config
|
|
141
|
+
from workstream.markdown import load_workstream
|
|
142
|
+
|
|
143
|
+
ws_dir = tmp_path / 'workstreams'
|
|
144
|
+
ws_dir.mkdir()
|
|
145
|
+
config = Config(workstreams_dir=str(ws_dir))
|
|
146
|
+
|
|
147
|
+
_new_handler(posargs_=('Logged', 'Task'), config=config, size='day')
|
|
148
|
+
files = list(ws_dir.glob('*.md'))
|
|
149
|
+
ws = load_workstream(files[0])
|
|
150
|
+
ws_id = ws.id
|
|
151
|
+
|
|
152
|
+
_checkin_handler(posargs_=(ws_id, 'Published', 'the', 'post'), config=config)
|
|
153
|
+
|
|
154
|
+
# Reload all workstreams to pick up log entries
|
|
155
|
+
workstreams = [load_workstream(f) for f in ws_dir.glob('*.md')]
|
|
156
|
+
prompt = _build_recommendation_prompt(workstreams, None, None)
|
|
157
|
+
assert 'Recent log:' in prompt
|
|
158
|
+
assert 'checked-in' in prompt
|
|
159
|
+
assert 'Published the post' in prompt
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
# -- _build_next_manifest with sprint context -----------------------------------
|
|
163
|
+
|
|
164
|
+
def test_build_next_manifest_includes_sprint_context(tmp_path: Path) -> None:
|
|
165
|
+
"""Sprint context appears in the interactive manifest when provided."""
|
|
166
|
+
from workstream.commands.new import _new_handler
|
|
167
|
+
from workstream.commands.next_cmd import _build_next_manifest
|
|
168
|
+
from workstream.config import Config
|
|
169
|
+
from workstream.markdown import load_workstream
|
|
170
|
+
|
|
171
|
+
ws_dir = tmp_path / 'workstreams'
|
|
172
|
+
ws_dir.mkdir()
|
|
173
|
+
config = Config(workstreams_dir=str(ws_dir))
|
|
174
|
+
_new_handler(posargs_=('Test', 'WS'), config=config, size='day')
|
|
175
|
+
|
|
176
|
+
workstreams = [load_workstream(f) for f in ws_dir.glob('*.md')]
|
|
177
|
+
sprint_ctx = '## Sprint Note Changes\n\n```diff\n+ Done: Test task\n```'
|
|
178
|
+
manifest = _build_next_manifest(workstreams, None, None, sprint_context=sprint_ctx)
|
|
179
|
+
assert 'Sprint Note Changes' in manifest
|
|
180
|
+
assert 'Done: Test task' in manifest
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def test_build_next_manifest_no_sprint_context(tmp_path: Path) -> None:
|
|
184
|
+
"""Without sprint context the manifest omits sprint information."""
|
|
185
|
+
from workstream.commands.new import _new_handler
|
|
186
|
+
from workstream.commands.next_cmd import _build_next_manifest
|
|
187
|
+
from workstream.config import Config
|
|
188
|
+
from workstream.markdown import load_workstream
|
|
189
|
+
|
|
190
|
+
ws_dir = tmp_path / 'workstreams'
|
|
191
|
+
ws_dir.mkdir()
|
|
192
|
+
config = Config(workstreams_dir=str(ws_dir))
|
|
193
|
+
_new_handler(posargs_=('Test', 'WS'), config=config, size='day')
|
|
194
|
+
|
|
195
|
+
workstreams = [load_workstream(f) for f in ws_dir.glob('*.md')]
|
|
196
|
+
manifest = _build_next_manifest(workstreams, None, None)
|
|
197
|
+
assert 'Sprint' not in manifest
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
# -- get_command registration ---------------------------------------------------
|
|
201
|
+
|
|
202
|
+
def test_get_command_creates_successfully() -> None:
|
|
203
|
+
"""get_command() returns a valid 'next' command with --quick flag."""
|
|
204
|
+
from workstream.commands.next_cmd import get_command
|
|
205
|
+
cmd = get_command()
|
|
206
|
+
assert cmd.name == 'next'
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
"""Tests for workstream.plan_index — durable plan catalog."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from workstream.plan_index import load_plan_index, save_plan_index, update_plan_index
|
|
6
|
+
from workstream.models import PlanRef
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def test_load_missing_file(tmp_path: Path):
|
|
10
|
+
assert load_plan_index(tmp_path) == {}
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def test_save_and_load_roundtrip(tmp_path: Path):
|
|
14
|
+
index = {
|
|
15
|
+
'my-ws': [
|
|
16
|
+
{'repo': 'r', 'path': 'p.md', 'title': 'T', 'status': 'active', 'created': '2026-01-01'},
|
|
17
|
+
],
|
|
18
|
+
}
|
|
19
|
+
save_plan_index(index, tmp_path)
|
|
20
|
+
loaded = load_plan_index(tmp_path)
|
|
21
|
+
assert loaded == index
|
|
22
|
+
# Verify header comment
|
|
23
|
+
text = (tmp_path / 'plan-index.yaml').read_text(encoding='utf-8')
|
|
24
|
+
assert 'maintained by ws sweep' in text
|
|
25
|
+
assert 'git log' in text
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def test_update_appends_new_entries(tmp_path: Path):
|
|
29
|
+
index: dict = {}
|
|
30
|
+
plan = PlanRef(repo='r', path='a.md', status='active', title='A', date='2026-01-01')
|
|
31
|
+
changed = update_plan_index(index, 'my-ws', [plan])
|
|
32
|
+
assert changed is True
|
|
33
|
+
assert len(index['my-ws']) == 1
|
|
34
|
+
assert index['my-ws'][0]['path'] == 'a.md'
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def test_update_status_change(tmp_path: Path):
|
|
38
|
+
index = {
|
|
39
|
+
'my-ws': [
|
|
40
|
+
{'repo': 'r', 'path': 'a.md', 'title': 'A', 'status': 'active', 'created': '2026-01-01'},
|
|
41
|
+
],
|
|
42
|
+
}
|
|
43
|
+
plan = PlanRef(repo='r', path='a.md', status='implemented', title='A', date='2026-01-01')
|
|
44
|
+
changed = update_plan_index(index, 'my-ws', [plan])
|
|
45
|
+
assert changed is True
|
|
46
|
+
assert index['my-ws'][0]['status'] == 'implemented'
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def test_update_no_change(tmp_path: Path):
|
|
50
|
+
index = {
|
|
51
|
+
'my-ws': [
|
|
52
|
+
{'repo': 'r', 'path': 'a.md', 'title': 'A', 'status': 'active', 'created': '2026-01-01'},
|
|
53
|
+
],
|
|
54
|
+
}
|
|
55
|
+
plan = PlanRef(repo='r', path='a.md', status='active', title='A', date='2026-01-01')
|
|
56
|
+
changed = update_plan_index(index, 'my-ws', [plan])
|
|
57
|
+
assert changed is False
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def test_update_never_removes(tmp_path: Path):
|
|
61
|
+
index = {
|
|
62
|
+
'my-ws': [
|
|
63
|
+
{'repo': 'r', 'path': 'old.md', 'title': 'Old', 'status': 'implemented', 'created': '2025-01-01'},
|
|
64
|
+
],
|
|
65
|
+
}
|
|
66
|
+
# Update with a different plan — old one must survive
|
|
67
|
+
plan = PlanRef(repo='r', path='new.md', status='active', title='New', date='2026-01-01')
|
|
68
|
+
update_plan_index(index, 'my-ws', [plan])
|
|
69
|
+
paths = [e['path'] for e in index['my-ws']]
|
|
70
|
+
assert 'old.md' in paths
|
|
71
|
+
assert 'new.md' in paths
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def test_update_backfills_title(tmp_path: Path):
|
|
75
|
+
index = {
|
|
76
|
+
'my-ws': [
|
|
77
|
+
{'repo': 'r', 'path': 'a.md', 'title': '', 'status': 'active', 'created': '2026-01-01'},
|
|
78
|
+
],
|
|
79
|
+
}
|
|
80
|
+
plan = PlanRef(repo='r', path='a.md', status='active', title='Now Has Title', date='2026-01-01')
|
|
81
|
+
changed = update_plan_index(index, 'my-ws', [plan])
|
|
82
|
+
assert changed is True
|
|
83
|
+
assert index['my-ws'][0]['title'] == 'Now Has Title'
|
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
"""Tests for workstream.provisioning and workstream.commands.setup."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import pytest
|
|
8
|
+
|
|
9
|
+
from workstream.provisioning import (
|
|
10
|
+
SetupResult,
|
|
11
|
+
_backup_if_modified,
|
|
12
|
+
deploy_extension,
|
|
13
|
+
deploy_skills,
|
|
14
|
+
setup_repo,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
# ── _backup_if_modified ──────────────────────────────────────────────────
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def test_backup_noop_when_identical(tmp_path: Path) -> None:
|
|
22
|
+
"""No backup when file matches reference."""
|
|
23
|
+
ref = tmp_path / "source.ts"
|
|
24
|
+
ref.write_text("content")
|
|
25
|
+
target = tmp_path / "dest.ts"
|
|
26
|
+
target.write_text("content")
|
|
27
|
+
|
|
28
|
+
msg = _backup_if_modified(target, reference=ref)
|
|
29
|
+
assert msg is None
|
|
30
|
+
# No .bak file created
|
|
31
|
+
assert not list(tmp_path.glob("*.bak"))
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def test_backup_when_modified(tmp_path: Path) -> None:
|
|
35
|
+
"""Backup created when file differs from reference."""
|
|
36
|
+
ref = tmp_path / "source.ts"
|
|
37
|
+
ref.write_text("new content")
|
|
38
|
+
target = tmp_path / "dest.ts"
|
|
39
|
+
target.write_text("old content with local edits")
|
|
40
|
+
|
|
41
|
+
msg = _backup_if_modified(target, reference=ref)
|
|
42
|
+
assert msg is not None
|
|
43
|
+
assert "backed up" in msg
|
|
44
|
+
|
|
45
|
+
bak_files = list(tmp_path.glob("*.bak"))
|
|
46
|
+
assert len(bak_files) == 1
|
|
47
|
+
assert bak_files[0].read_text() == "old content with local edits"
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def test_backup_without_reference(tmp_path: Path) -> None:
|
|
51
|
+
"""Backup created when no reference (file being removed)."""
|
|
52
|
+
target = tmp_path / "legacy.ts"
|
|
53
|
+
target.write_text("legacy content")
|
|
54
|
+
|
|
55
|
+
msg = _backup_if_modified(target)
|
|
56
|
+
assert msg is not None
|
|
57
|
+
|
|
58
|
+
bak_files = list(tmp_path.glob("*.bak"))
|
|
59
|
+
assert len(bak_files) == 1
|
|
60
|
+
assert bak_files[0].read_text() == "legacy content"
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def test_backup_nonexistent_file(tmp_path: Path) -> None:
|
|
64
|
+
"""No backup when target doesn't exist."""
|
|
65
|
+
target = tmp_path / "missing.ts"
|
|
66
|
+
msg = _backup_if_modified(target)
|
|
67
|
+
assert msg is None
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
# ── deploy_extension ─────────────────────────────────────────────────────
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def test_deploy_extension_creates_dir_and_copies(tmp_path: Path) -> None:
|
|
74
|
+
repo = tmp_path / "myrepo"
|
|
75
|
+
repo.mkdir()
|
|
76
|
+
msgs = deploy_extension(repo, harnesses=["omp"])
|
|
77
|
+
ext_path = repo / ".omp" / "extensions" / "workstream.ts"
|
|
78
|
+
assert ext_path.is_file()
|
|
79
|
+
assert any("deployed" in m for m in msgs)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def test_deploy_extension_removes_legacy(tmp_path: Path) -> None:
|
|
83
|
+
repo = tmp_path / "myrepo"
|
|
84
|
+
ext_dir = repo / ".omp" / "extensions"
|
|
85
|
+
ext_dir.mkdir(parents=True)
|
|
86
|
+
(ext_dir / "plan-persist.ts").write_text("old")
|
|
87
|
+
(ext_dir / "workstream-persist.ts").write_text("old")
|
|
88
|
+
|
|
89
|
+
msgs = deploy_extension(repo, harnesses=["omp"])
|
|
90
|
+
|
|
91
|
+
assert not (ext_dir / "plan-persist.ts").exists()
|
|
92
|
+
assert not (ext_dir / "workstream-persist.ts").exists()
|
|
93
|
+
assert (ext_dir / "workstream.ts").is_file()
|
|
94
|
+
assert any("removed legacy plan-persist.ts" in m for m in msgs)
|
|
95
|
+
assert any("removed legacy workstream-persist.ts" in m for m in msgs)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def test_deploy_extension_backs_up_modified_legacy(tmp_path: Path) -> None:
|
|
99
|
+
repo = tmp_path / "myrepo"
|
|
100
|
+
ext_dir = repo / ".omp" / "extensions"
|
|
101
|
+
ext_dir.mkdir(parents=True)
|
|
102
|
+
(ext_dir / "plan-persist.ts").write_text("locally edited legacy")
|
|
103
|
+
|
|
104
|
+
msgs = deploy_extension(repo, harnesses=["omp"])
|
|
105
|
+
|
|
106
|
+
# Legacy removed, backup created
|
|
107
|
+
assert not (ext_dir / "plan-persist.ts").exists()
|
|
108
|
+
bak_files = list(ext_dir.glob("plan-persist.ts.*.bak"))
|
|
109
|
+
assert len(bak_files) == 1
|
|
110
|
+
assert bak_files[0].read_text() == "locally edited legacy"
|
|
111
|
+
assert any("backed up" in m for m in msgs)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def test_deploy_extension_backs_up_modified_current(tmp_path: Path) -> None:
|
|
115
|
+
repo = tmp_path / "myrepo"
|
|
116
|
+
ext_dir = repo / ".omp" / "extensions"
|
|
117
|
+
ext_dir.mkdir(parents=True)
|
|
118
|
+
(ext_dir / "workstream.ts").write_text("user modified this")
|
|
119
|
+
|
|
120
|
+
msgs = deploy_extension(repo, harnesses=["omp"])
|
|
121
|
+
|
|
122
|
+
# Current extension overwritten, backup created
|
|
123
|
+
assert (ext_dir / "workstream.ts").is_file()
|
|
124
|
+
assert (ext_dir / "workstream.ts").read_text() != "user modified this"
|
|
125
|
+
bak_files = list(ext_dir.glob("workstream.ts.*.bak"))
|
|
126
|
+
assert len(bak_files) == 1
|
|
127
|
+
assert bak_files[0].read_text() == "user modified this"
|
|
128
|
+
assert any("backed up" in m for m in msgs)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def test_deploy_extension_no_backup_when_identical(tmp_path: Path) -> None:
|
|
132
|
+
"""Re-deploying the same content doesn't create a backup."""
|
|
133
|
+
repo = tmp_path / "myrepo"
|
|
134
|
+
ext_dir = repo / ".omp" / "extensions"
|
|
135
|
+
ext_dir.mkdir(parents=True)
|
|
136
|
+
|
|
137
|
+
# First deploy
|
|
138
|
+
deploy_extension(repo, harnesses=["omp"])
|
|
139
|
+
# Second deploy — should not back up
|
|
140
|
+
msgs = deploy_extension(repo, harnesses=["omp"])
|
|
141
|
+
|
|
142
|
+
bak_files = list(ext_dir.glob("*.bak"))
|
|
143
|
+
assert len(bak_files) == 0
|
|
144
|
+
assert not any("backed up" in m for m in msgs)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
# ── deploy_skills ────────────────────────────────────────────────────────
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def test_deploy_skills_installs_all(tmp_path: Path) -> None:
|
|
151
|
+
repo = tmp_path / "myrepo"
|
|
152
|
+
repo.mkdir()
|
|
153
|
+
msgs = deploy_skills(repo, harnesses=["omp"])
|
|
154
|
+
skill_base = repo / ".omp" / "skills"
|
|
155
|
+
assert (skill_base / "workstream_context" / "SKILL.md").is_file()
|
|
156
|
+
# Session skills are NOT installed by deploy_skills
|
|
157
|
+
assert not (skill_base / "workstream_review" / "SKILL.md").exists()
|
|
158
|
+
assert not (skill_base / "workstream_init" / "SKILL.md").exists()
|
|
159
|
+
assert not (skill_base / "workstream_sweep" / "SKILL.md").exists()
|
|
160
|
+
assert any("installed skills" in m for m in msgs)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def test_deploy_skills_preserves_existing(tmp_path: Path) -> None:
|
|
164
|
+
repo = tmp_path / "myrepo"
|
|
165
|
+
skill_base = repo / ".omp" / "skills"
|
|
166
|
+
custom_skill = skill_base / "my-custom-skill"
|
|
167
|
+
custom_skill.mkdir(parents=True)
|
|
168
|
+
(custom_skill / "SKILL.md").write_text("custom content")
|
|
169
|
+
|
|
170
|
+
deploy_skills(repo, harnesses=["omp"])
|
|
171
|
+
|
|
172
|
+
# Custom skill untouched
|
|
173
|
+
assert (custom_skill / "SKILL.md").read_text() == "custom content"
|
|
174
|
+
# Stub skill also installed
|
|
175
|
+
assert (skill_base / "workstream_context" / "SKILL.md").is_file()
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def test_deploy_skills_version_stamp(tmp_path: Path) -> None:
|
|
179
|
+
"""Installed skill file has a version stamp appended."""
|
|
180
|
+
repo = tmp_path / "myrepo"
|
|
181
|
+
repo.mkdir()
|
|
182
|
+
deploy_skills(repo, harnesses=["omp"])
|
|
183
|
+
installed = (repo / ".omp" / "skills" / "workstream_context" / "SKILL.md").read_text()
|
|
184
|
+
assert "<!-- ws-installed-version:" in installed
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def test_deploy_skills_skips_when_current(tmp_path: Path) -> None:
|
|
188
|
+
"""Second deploy returns 'up to date' when version matches."""
|
|
189
|
+
repo = tmp_path / "myrepo"
|
|
190
|
+
repo.mkdir()
|
|
191
|
+
deploy_skills(repo, harnesses=["omp"])
|
|
192
|
+
msgs = deploy_skills(repo, harnesses=["omp"])
|
|
193
|
+
assert any("up to date" in m for m in msgs)
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def test_deploy_skills_removes_legacy(tmp_path: Path) -> None:
|
|
197
|
+
"""Legacy session skill dirs are cleaned up during deploy."""
|
|
198
|
+
repo = tmp_path / "myrepo"
|
|
199
|
+
skill_base = repo / ".omp" / "skills"
|
|
200
|
+
legacy_names = ["workstream_review", "workstream_init", "workstream_sweep", "workstream_focus"]
|
|
201
|
+
for name in legacy_names:
|
|
202
|
+
d = skill_base / name
|
|
203
|
+
d.mkdir(parents=True)
|
|
204
|
+
(d / "SKILL.md").write_text("old")
|
|
205
|
+
|
|
206
|
+
msgs = deploy_skills(repo, harnesses=["omp"])
|
|
207
|
+
|
|
208
|
+
for name in legacy_names:
|
|
209
|
+
assert not (skill_base / name).exists(), f"legacy {name} should be removed"
|
|
210
|
+
assert (skill_base / "workstream_context" / "SKILL.md").is_file()
|
|
211
|
+
assert any("removed legacy" in m for m in msgs)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
# ── setup_repo ───────────────────────────────────────────────────────────
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def test_setup_repo_returns_result(tmp_path: Path) -> None:
|
|
218
|
+
repo = tmp_path / "myrepo"
|
|
219
|
+
repo.mkdir()
|
|
220
|
+
result = setup_repo(repo, harnesses=["omp"])
|
|
221
|
+
assert isinstance(result, SetupResult)
|
|
222
|
+
assert len(result.extension_msgs) > 0
|
|
223
|
+
assert len(result.skill_msgs) > 0
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def test_setup_repo_idempotent(tmp_path: Path) -> None:
|
|
227
|
+
repo = tmp_path / "myrepo"
|
|
228
|
+
repo.mkdir()
|
|
229
|
+
setup_repo(repo, harnesses=["omp"]) # first run
|
|
230
|
+
r2 = setup_repo(repo, harnesses=["omp"])
|
|
231
|
+
# Both succeed, no backup on second run (content identical)
|
|
232
|
+
assert not any("backed up" in m for m in r2.extension_msgs)
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
# ── _find_repo_root ──────────────────────────────────────────────────────
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def test_find_repo_root_by_path(tmp_path: Path) -> None:
|
|
241
|
+
from workstream.commands.setup import _find_repo_root
|
|
242
|
+
from workstream.config import Config
|
|
243
|
+
|
|
244
|
+
repo = tmp_path / "myrepo"
|
|
245
|
+
(repo / ".git").mkdir(parents=True)
|
|
246
|
+
config = Config()
|
|
247
|
+
result = _find_repo_root(str(repo), config)
|
|
248
|
+
assert result == repo
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def test_find_repo_root_by_name(tmp_path: Path) -> None:
|
|
252
|
+
from workstream.commands.setup import _find_repo_root
|
|
253
|
+
from workstream.config import Config, RepoConfig
|
|
254
|
+
|
|
255
|
+
repo = tmp_path / "myrepo"
|
|
256
|
+
(repo / ".git").mkdir(parents=True)
|
|
257
|
+
config = Config(repos=[RepoConfig(path=str(repo), name="myrepo")])
|
|
258
|
+
result = _find_repo_root("myrepo", config)
|
|
259
|
+
assert result == repo.resolve()
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def test_find_repo_root_not_found() -> None:
|
|
263
|
+
from face import CommandLineError
|
|
264
|
+
|
|
265
|
+
from workstream.commands.setup import _find_repo_root
|
|
266
|
+
from workstream.config import Config
|
|
267
|
+
|
|
268
|
+
config = Config()
|
|
269
|
+
with pytest.raises(CommandLineError, match="Cannot resolve"):
|
|
270
|
+
_find_repo_root("nonexistent-repo-xyz", config)
|