taskflow-git 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- taskflow/__init__.py +3 -0
- taskflow/archive.py +135 -0
- taskflow/cli.py +550 -0
- taskflow/config.py +195 -0
- taskflow/reports.py +284 -0
- taskflow/setup_cmd.py +305 -0
- taskflow/tasklib.py +451 -0
- taskflow_git-0.3.0.dist-info/METADATA +448 -0
- taskflow_git-0.3.0.dist-info/RECORD +11 -0
- taskflow_git-0.3.0.dist-info/WHEEL +4 -0
- taskflow_git-0.3.0.dist-info/entry_points.txt +2 -0
taskflow/__init__.py
ADDED
taskflow/archive.py
ADDED
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""
|
|
2
|
+
archive.py — done.md week archiving.
|
|
3
|
+
|
|
4
|
+
When done.md has more weeks than the configured limit, older ones move
|
|
5
|
+
to monthly archive files. Archive files are named yyyy-mm-archive.md
|
|
6
|
+
and weeks within them run oldest to newest.
|
|
7
|
+
|
|
8
|
+
The date in the ## Week of heading is what determines which monthly
|
|
9
|
+
archive file a week goes into — not the individual entry timestamps.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from __future__ import annotations
|
|
13
|
+
|
|
14
|
+
import re
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import Optional
|
|
17
|
+
|
|
18
|
+
WEEK_HEADING_RE = re.compile(r"^##\s+Week of\s+(\d{4}-\d{2}-\d{2})\s*$")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def parse_week_blocks(lines: list[str]) -> list[dict]:
|
|
22
|
+
"""
|
|
23
|
+
Split done.md into a list of week blocks.
|
|
24
|
+
|
|
25
|
+
Each block:
|
|
26
|
+
date — YYYY-MM-DD string from the heading
|
|
27
|
+
heading — the full ## Week of ... line
|
|
28
|
+
lines — all lines that follow the heading until the next heading
|
|
29
|
+
"""
|
|
30
|
+
blocks = []
|
|
31
|
+
current: Optional[dict] = None
|
|
32
|
+
|
|
33
|
+
for line in lines:
|
|
34
|
+
m = WEEK_HEADING_RE.match(line)
|
|
35
|
+
if m:
|
|
36
|
+
if current is not None:
|
|
37
|
+
blocks.append(current)
|
|
38
|
+
current = {"date": m.group(1), "heading": line, "lines": []}
|
|
39
|
+
elif current is not None:
|
|
40
|
+
current["lines"].append(line)
|
|
41
|
+
|
|
42
|
+
if current is not None:
|
|
43
|
+
blocks.append(current)
|
|
44
|
+
|
|
45
|
+
return blocks
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def archive_month_path(archive_dir: Path, week_date: str) -> Path:
|
|
49
|
+
"""
|
|
50
|
+
Return the archive file path for a given week date string (YYYY-MM-DD).
|
|
51
|
+
Uses the year-month of the week heading date.
|
|
52
|
+
"""
|
|
53
|
+
year_month = week_date[:7] # YYYY-MM
|
|
54
|
+
return archive_dir / f"{year_month}-archive.md"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def render_week_block(block: dict) -> list[str]:
|
|
58
|
+
"""Render a week block back to lines, trailing blanks stripped."""
|
|
59
|
+
out = [block["heading"]] + block["lines"]
|
|
60
|
+
while out and out[-1].strip() == "":
|
|
61
|
+
out.pop()
|
|
62
|
+
return out
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def archive_old_weeks(done_path: Path, archive_dir: Path, keep_weeks: int) -> int:
|
|
66
|
+
"""
|
|
67
|
+
Check done.md and move any weeks beyond keep_weeks into monthly archive files.
|
|
68
|
+
Returns the number of weeks archived.
|
|
69
|
+
|
|
70
|
+
Weeks are ordered oldest-to-newest inside each archive file.
|
|
71
|
+
The done.md is rewritten with only the most recent keep_weeks weeks.
|
|
72
|
+
"""
|
|
73
|
+
if not done_path.exists():
|
|
74
|
+
return 0
|
|
75
|
+
|
|
76
|
+
raw = done_path.read_text(encoding="utf-8").splitlines()
|
|
77
|
+
|
|
78
|
+
# separate the preamble (anything before the first ## Week of heading)
|
|
79
|
+
# from the week blocks — we preserve the preamble in done.md
|
|
80
|
+
first_week_idx = None
|
|
81
|
+
for i, line in enumerate(raw):
|
|
82
|
+
if WEEK_HEADING_RE.match(line):
|
|
83
|
+
first_week_idx = i
|
|
84
|
+
break
|
|
85
|
+
|
|
86
|
+
preamble = raw[:first_week_idx] if first_week_idx is not None else raw
|
|
87
|
+
week_lines = raw[first_week_idx:] if first_week_idx is not None else []
|
|
88
|
+
|
|
89
|
+
blocks = parse_week_blocks(week_lines)
|
|
90
|
+
|
|
91
|
+
if len(blocks) <= keep_weeks:
|
|
92
|
+
# nothing to archive
|
|
93
|
+
return 0
|
|
94
|
+
|
|
95
|
+
to_archive = blocks[:-keep_weeks]
|
|
96
|
+
to_keep = blocks[-keep_weeks:]
|
|
97
|
+
|
|
98
|
+
# write each old week into its monthly archive file
|
|
99
|
+
archive_dir.mkdir(parents=True, exist_ok=True)
|
|
100
|
+
|
|
101
|
+
for block in to_archive:
|
|
102
|
+
month_file = archive_month_path(archive_dir, block["date"])
|
|
103
|
+
|
|
104
|
+
if month_file.exists():
|
|
105
|
+
existing = month_file.read_text(encoding="utf-8").splitlines()
|
|
106
|
+
else:
|
|
107
|
+
existing = [f"# Archive — {block['date'][:7]}", ""]
|
|
108
|
+
|
|
109
|
+
# strip trailing blanks before appending
|
|
110
|
+
while existing and existing[-1].strip() == "":
|
|
111
|
+
existing.pop()
|
|
112
|
+
|
|
113
|
+
existing.append("")
|
|
114
|
+
existing.extend(render_week_block(block))
|
|
115
|
+
|
|
116
|
+
month_file.write_text("\n".join(existing).rstrip() + "\n", encoding="utf-8")
|
|
117
|
+
|
|
118
|
+
# rewrite done.md with only the weeks we're keeping
|
|
119
|
+
kept_lines = []
|
|
120
|
+
for i, block in enumerate(to_keep):
|
|
121
|
+
if i > 0:
|
|
122
|
+
kept_lines.append("")
|
|
123
|
+
kept_lines.extend(render_week_block(block))
|
|
124
|
+
|
|
125
|
+
# strip trailing blanks from preamble before rejoining
|
|
126
|
+
while preamble and preamble[-1].strip() == "":
|
|
127
|
+
preamble.pop()
|
|
128
|
+
|
|
129
|
+
out_lines = preamble
|
|
130
|
+
if kept_lines:
|
|
131
|
+
out_lines = preamble + [""] + kept_lines if preamble else kept_lines
|
|
132
|
+
|
|
133
|
+
done_path.write_text("\n".join(out_lines).rstrip() + "\n", encoding="utf-8")
|
|
134
|
+
|
|
135
|
+
return len(to_archive)
|
taskflow/cli.py
ADDED
|
@@ -0,0 +1,550 @@
|
|
|
1
|
+
"""
|
|
2
|
+
cli.py — taskflow command line interface.
|
|
3
|
+
|
|
4
|
+
All commands live here. Business logic lives in the other modules —
|
|
5
|
+
this is just wiring. Each command loads config, does one thing, exits.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import subprocess
|
|
11
|
+
from datetime import date
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Optional
|
|
14
|
+
|
|
15
|
+
import click
|
|
16
|
+
|
|
17
|
+
from taskflow import __version__
|
|
18
|
+
from taskflow.archive import archive_old_weeks
|
|
19
|
+
from taskflow.config import WORKFLOW_TRANSITIONS, TaskflowConfig, load_config
|
|
20
|
+
from taskflow.reports import report_pipeline, report_progress
|
|
21
|
+
from taskflow.setup_cmd import STARTER_CONFIG, run_setup
|
|
22
|
+
from taskflow.tasklib import append_done, complete_task, move_task
|
|
23
|
+
|
|
24
|
+
# ---------------------------------------------------------------------------
|
|
25
|
+
# Shell completion
|
|
26
|
+
# ---------------------------------------------------------------------------
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def install_completion(shell: str) -> None:
|
|
30
|
+
"""
|
|
31
|
+
Print the shell completion script for the given shell.
|
|
32
|
+
User adds it to their shell profile — we don't write to their system.
|
|
33
|
+
"""
|
|
34
|
+
import os
|
|
35
|
+
|
|
36
|
+
os.environ["_TASKFLOW_COMPLETE"] = f"{shell}_source"
|
|
37
|
+
try:
|
|
38
|
+
from taskflow.cli import main
|
|
39
|
+
|
|
40
|
+
main(standalone_mode=False)
|
|
41
|
+
except SystemExit:
|
|
42
|
+
pass
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
# ---------------------------------------------------------------------------
|
|
46
|
+
# Git helpers
|
|
47
|
+
# ---------------------------------------------------------------------------
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def git_root_or_none() -> Optional[Path]:
|
|
51
|
+
try:
|
|
52
|
+
root = subprocess.check_output(
|
|
53
|
+
["git", "rev-parse", "--show-toplevel"],
|
|
54
|
+
text=True,
|
|
55
|
+
stderr=subprocess.DEVNULL,
|
|
56
|
+
).strip()
|
|
57
|
+
return Path(root)
|
|
58
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def git_commit(files: list[str], message: str, cwd: Path) -> None:
|
|
63
|
+
"""Stage and commit specific files. Fails loudly if git isn't available."""
|
|
64
|
+
try:
|
|
65
|
+
subprocess.run(["git", "add"] + files, cwd=str(cwd), check=True, capture_output=True)
|
|
66
|
+
subprocess.run(["git", "commit", "-m", message], cwd=str(cwd), check=True)
|
|
67
|
+
except subprocess.CalledProcessError as e:
|
|
68
|
+
raise click.ClickException(f"git commit failed: {e}")
|
|
69
|
+
except FileNotFoundError:
|
|
70
|
+
raise click.ClickException("git not found — task was moved but not committed")
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _commit_transition(cfg: TaskflowConfig, src: str, dst: str, verb: str, task_text: str) -> None:
|
|
74
|
+
"""Stage the two state files and commit with the standard message format."""
|
|
75
|
+
src_rel = str(cfg.state_path(src).relative_to(cfg.root))
|
|
76
|
+
dst_rel = str(cfg.state_path(dst).relative_to(cfg.root))
|
|
77
|
+
git_commit([src_rel, dst_rel], f"{verb}: {task_text}", cfg.root)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _commit_done(cfg: TaskflowConfig, task_text: str) -> None:
|
|
81
|
+
now_rel = str(cfg.state_path("now").relative_to(cfg.root))
|
|
82
|
+
done_rel = str(cfg.state_path("done").relative_to(cfg.root))
|
|
83
|
+
git_commit([now_rel, done_rel], f"done: {task_text}", cfg.root)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
# ---------------------------------------------------------------------------
|
|
87
|
+
# Root command group
|
|
88
|
+
# ---------------------------------------------------------------------------
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
@click.group()
|
|
92
|
+
@click.version_option(version=__version__, prog_name="taskflow")
|
|
93
|
+
def main() -> None:
|
|
94
|
+
"""
|
|
95
|
+
Git-native task management for people who live in the terminal.
|
|
96
|
+
|
|
97
|
+
taskflow walks up from your current directory to find .taskflow.yml —
|
|
98
|
+
that directory is the project root. All file paths in the config are
|
|
99
|
+
relative to that root.
|
|
100
|
+
|
|
101
|
+
Run `taskflow init` to set up a new project.
|
|
102
|
+
Run `taskflow setup` to regenerate backlog files after editing the config.
|
|
103
|
+
"""
|
|
104
|
+
pass
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
# ---------------------------------------------------------------------------
|
|
108
|
+
# init
|
|
109
|
+
# ---------------------------------------------------------------------------
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
@main.command()
|
|
113
|
+
@click.option(
|
|
114
|
+
"--from",
|
|
115
|
+
"from_url",
|
|
116
|
+
default=None,
|
|
117
|
+
metavar="URL",
|
|
118
|
+
help="Fetch starter config from a URL instead of using the built-in template.",
|
|
119
|
+
)
|
|
120
|
+
@click.option("--name", default=None, help="Project name (defaults to current directory name).")
|
|
121
|
+
def init(from_url: Optional[str], name: Optional[str]) -> None:
|
|
122
|
+
"""Set up a new taskflow project in the current directory."""
|
|
123
|
+
cwd = Path.cwd()
|
|
124
|
+
config_path = cwd / ".taskflow.yml"
|
|
125
|
+
|
|
126
|
+
if config_path.exists():
|
|
127
|
+
raise click.ClickException(".taskflow.yml already exists. Run `taskflow setup` to regenerate backlog files.")
|
|
128
|
+
|
|
129
|
+
repo_name = name or cwd.name
|
|
130
|
+
|
|
131
|
+
if from_url:
|
|
132
|
+
import urllib.request
|
|
133
|
+
|
|
134
|
+
click.echo(f" fetching config from {from_url}...")
|
|
135
|
+
try:
|
|
136
|
+
with urllib.request.urlopen(from_url) as resp:
|
|
137
|
+
content = resp.read().decode("utf-8")
|
|
138
|
+
except Exception as e:
|
|
139
|
+
raise click.ClickException(f"Could not fetch config from {from_url}: {e}")
|
|
140
|
+
# substitute repo name if the template has the placeholder
|
|
141
|
+
content = content.replace("{repo_name}", repo_name)
|
|
142
|
+
else:
|
|
143
|
+
content = STARTER_CONFIG.replace("{repo_name}", repo_name)
|
|
144
|
+
|
|
145
|
+
config_path.write_text(content, encoding="utf-8")
|
|
146
|
+
click.echo(f" created {config_path.name}")
|
|
147
|
+
|
|
148
|
+
# load and run setup immediately
|
|
149
|
+
cfg = load_config(cwd)
|
|
150
|
+
run_setup(cfg, force=False, dry_run=False)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
# ---------------------------------------------------------------------------
|
|
154
|
+
# setup
|
|
155
|
+
# ---------------------------------------------------------------------------
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
@main.command()
|
|
159
|
+
@click.option("--force", is_flag=True, help="Overwrite existing backlog files.")
|
|
160
|
+
@click.option("--dry-run", is_flag=True, help="Show what would happen without writing.")
|
|
161
|
+
def setup(force: bool, dry_run: bool) -> None:
|
|
162
|
+
"""Regenerate backlog files and install git aliases from .taskflow.yml."""
|
|
163
|
+
cfg = load_config()
|
|
164
|
+
run_setup(cfg, force=force, dry_run=dry_run)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
# ---------------------------------------------------------------------------
|
|
168
|
+
# config (diagnostic)
|
|
169
|
+
# ---------------------------------------------------------------------------
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
@main.command("config")
|
|
173
|
+
def show_config() -> None:
|
|
174
|
+
"""Show resolved configuration — useful when paths aren't where you expect."""
|
|
175
|
+
cfg = load_config()
|
|
176
|
+
click.echo(f"\n project root : {cfg.root}")
|
|
177
|
+
click.echo(f" config file : {cfg.root / '.taskflow.yml'}")
|
|
178
|
+
click.echo(f" repo name : {cfg.repo_name}")
|
|
179
|
+
click.echo(f" done weeks : {cfg.done_weeks}")
|
|
180
|
+
click.echo(f" archive path : {cfg.archive_path}")
|
|
181
|
+
click.echo(f" weekly plans : {cfg.weekly_plan_dir}")
|
|
182
|
+
click.echo("\n state files:")
|
|
183
|
+
for state in ("now", "blocked", "paused", "next", "later", "done"):
|
|
184
|
+
path = cfg.state_path(state)
|
|
185
|
+
icon = cfg.state_icon(state)
|
|
186
|
+
exists = "✓" if path.exists() else "✗"
|
|
187
|
+
click.echo(f" {exists} {icon} {state:8} {path.relative_to(cfg.root)}")
|
|
188
|
+
click.echo("\n categories:")
|
|
189
|
+
for cat in cfg.categories:
|
|
190
|
+
icon = cat.get("icon", " ")
|
|
191
|
+
click.echo(f" {icon} {cat['name']}")
|
|
192
|
+
click.echo()
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
# ---------------------------------------------------------------------------
|
|
196
|
+
# Workflow commands — one for each transition
|
|
197
|
+
# ---------------------------------------------------------------------------
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
def _workflow_command(verb: str, src: str, dst: str):
|
|
201
|
+
"""
|
|
202
|
+
Factory that produces a Click command for a task transition.
|
|
203
|
+
All transitions follow the same pattern: find task in src, move to dst, commit.
|
|
204
|
+
"""
|
|
205
|
+
|
|
206
|
+
@click.argument("task", nargs=-1, required=True)
|
|
207
|
+
def cmd(task: tuple) -> None:
|
|
208
|
+
query = " ".join(task)
|
|
209
|
+
cfg = load_config()
|
|
210
|
+
|
|
211
|
+
src_path = cfg.state_path(src)
|
|
212
|
+
dst_path = cfg.state_path(dst)
|
|
213
|
+
dst_path.parent.mkdir(parents=True, exist_ok=True)
|
|
214
|
+
|
|
215
|
+
category, matched = move_task(src_path, dst_path, query)
|
|
216
|
+
click.echo(f"Moved: [{category}] {matched}")
|
|
217
|
+
click.echo(f" {src_path.name} → {dst_path.name}")
|
|
218
|
+
|
|
219
|
+
# archive check happens on every done write, but also on promote/start
|
|
220
|
+
# in case the done file accumulated weeks from a previous session
|
|
221
|
+
_maybe_archive(cfg)
|
|
222
|
+
|
|
223
|
+
_commit_transition(cfg, src, dst, verb, matched)
|
|
224
|
+
|
|
225
|
+
cmd.__name__ = verb
|
|
226
|
+
cmd.__doc__ = f"Move a task: {src} → {dst}."
|
|
227
|
+
return cmd
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
# register all transitions as commands
|
|
231
|
+
for _verb, (_src, _dst, _prefix) in WORKFLOW_TRANSITIONS.items():
|
|
232
|
+
main.command(_verb)(_workflow_command(_prefix, _src, _dst))
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
# ---------------------------------------------------------------------------
|
|
236
|
+
# done
|
|
237
|
+
# ---------------------------------------------------------------------------
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
@main.command()
|
|
241
|
+
@click.argument("task", nargs=-1, required=True)
|
|
242
|
+
def done(task: tuple) -> None:
|
|
243
|
+
"""Complete a task: remove from now, append to done.md, commit."""
|
|
244
|
+
query = " ".join(task)
|
|
245
|
+
cfg = load_config()
|
|
246
|
+
|
|
247
|
+
category, matched = complete_task(cfg.state_path("now"), cfg.state_path("done"), query)
|
|
248
|
+
click.echo(f"Done: [{category}] {matched}")
|
|
249
|
+
|
|
250
|
+
_maybe_archive(cfg)
|
|
251
|
+
_commit_done(cfg, matched)
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
# ---------------------------------------------------------------------------
|
|
255
|
+
# add
|
|
256
|
+
# ---------------------------------------------------------------------------
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
@main.command()
|
|
260
|
+
@click.argument("state")
|
|
261
|
+
@click.argument("category")
|
|
262
|
+
@click.argument("task", nargs=-1, required=True)
|
|
263
|
+
def add(state: str, category: str, task: tuple) -> None:
|
|
264
|
+
"""
|
|
265
|
+
Add a task directly to any state without opening a file.
|
|
266
|
+
|
|
267
|
+
\b
|
|
268
|
+
taskflow add next Engineering "write deployment runbook"
|
|
269
|
+
taskflow add now Backend "hotfix: null ptr in auth"
|
|
270
|
+
taskflow add done Engineering "emergency patch deployed"
|
|
271
|
+
|
|
272
|
+
Category is fuzzy-matched. State 'done' writes a timestamped entry directly.
|
|
273
|
+
"""
|
|
274
|
+
from taskflow.tasklib import CATEGORY_RE, DIVIDER_RE, PHASE_RE, collapse_blank_lines
|
|
275
|
+
|
|
276
|
+
query = " ".join(task)
|
|
277
|
+
cfg = load_config()
|
|
278
|
+
|
|
279
|
+
# resolve state — fuzzy match the state name too
|
|
280
|
+
valid = list(["now", "blocked", "paused", "next", "later", "done"])
|
|
281
|
+
if state not in valid:
|
|
282
|
+
matches = [s for s in valid if state.lower() in s.lower()]
|
|
283
|
+
if len(matches) == 1:
|
|
284
|
+
state = matches[0]
|
|
285
|
+
else:
|
|
286
|
+
raise click.UsageError(f"Unknown state '{state}'. Valid: {', '.join(valid)}")
|
|
287
|
+
|
|
288
|
+
# resolve category
|
|
289
|
+
if state == "done":
|
|
290
|
+
# for done we still want a category for the log entry
|
|
291
|
+
cat_name = cfg.fuzzy_category(category)
|
|
292
|
+
if not cat_name:
|
|
293
|
+
# not in config — use as-is, don't block the user
|
|
294
|
+
cat_name = category
|
|
295
|
+
append_done(cfg.state_path("done"), cat_name, query)
|
|
296
|
+
click.echo(f"Added to done: ({cat_name}) - {query}")
|
|
297
|
+
_maybe_archive(cfg)
|
|
298
|
+
done_rel = str(cfg.state_path("done").relative_to(cfg.root))
|
|
299
|
+
git_commit([done_rel], f"done: {query}", cfg.root)
|
|
300
|
+
return
|
|
301
|
+
|
|
302
|
+
cat_name = cfg.fuzzy_category(category)
|
|
303
|
+
if not cat_name:
|
|
304
|
+
cats = cfg.category_names()
|
|
305
|
+
q = category.strip().lower()
|
|
306
|
+
matches = [c for c in cats if q in c.lower()]
|
|
307
|
+
if len(matches) > 1:
|
|
308
|
+
raise click.UsageError(f"'{category}' matches multiple categories: {', '.join(matches)}")
|
|
309
|
+
raise click.UsageError(f"No category matching '{category}'")
|
|
310
|
+
|
|
311
|
+
# find the icon for the category heading
|
|
312
|
+
icon = cfg.category_icon(cat_name)
|
|
313
|
+
cat_raw = f"{icon} {cat_name}".strip() if icon else cat_name
|
|
314
|
+
|
|
315
|
+
target = cfg.state_path(state)
|
|
316
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
317
|
+
|
|
318
|
+
lines = target.read_text(encoding="utf-8").splitlines() if target.exists() else []
|
|
319
|
+
|
|
320
|
+
# find the category section and insert before its divider
|
|
321
|
+
|
|
322
|
+
insert_at = None
|
|
323
|
+
for i, line in enumerate(lines):
|
|
324
|
+
if PHASE_RE.match(line):
|
|
325
|
+
continue
|
|
326
|
+
m = CATEGORY_RE.match(line)
|
|
327
|
+
if m and m.group(1).strip().lower() == cat_name.lower():
|
|
328
|
+
j = i + 1
|
|
329
|
+
while j < len(lines):
|
|
330
|
+
if DIVIDER_RE.match(lines[j]) or CATEGORY_RE.match(lines[j]) or PHASE_RE.match(lines[j]):
|
|
331
|
+
break
|
|
332
|
+
j += 1
|
|
333
|
+
# backtrack past trailing blanks
|
|
334
|
+
while j > i + 1 and lines[j - 1].strip() == "":
|
|
335
|
+
j -= 1
|
|
336
|
+
insert_at = j
|
|
337
|
+
break
|
|
338
|
+
|
|
339
|
+
if insert_at is not None:
|
|
340
|
+
lines.insert(insert_at, f"* {query}")
|
|
341
|
+
else:
|
|
342
|
+
# category doesn't exist — append a new section
|
|
343
|
+
if lines and lines[-1].strip() != "":
|
|
344
|
+
lines.append("")
|
|
345
|
+
lines.append(f"### {cat_raw}")
|
|
346
|
+
lines.append(f"* {query}")
|
|
347
|
+
lines.append("")
|
|
348
|
+
lines.append("---")
|
|
349
|
+
|
|
350
|
+
target.write_text("\n".join(collapse_blank_lines(lines)).rstrip() + "\n", encoding="utf-8")
|
|
351
|
+
click.echo(f"Added to {state} [{cat_name}]: {query}")
|
|
352
|
+
|
|
353
|
+
target_rel = str(target.relative_to(cfg.root))
|
|
354
|
+
git_commit([target_rel], f"add ({state}): {query}", cfg.root)
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
# ---------------------------------------------------------------------------
|
|
358
|
+
# status
|
|
359
|
+
# ---------------------------------------------------------------------------
|
|
360
|
+
|
|
361
|
+
|
|
362
|
+
@main.command()
|
|
363
|
+
def status() -> None:
|
|
364
|
+
"""Active tasks, blockers, and holds — the morning view."""
|
|
365
|
+
import re as _re
|
|
366
|
+
|
|
367
|
+
from taskflow.tasklib import CATEGORY_RE, TASK_RE
|
|
368
|
+
|
|
369
|
+
cfg = load_config()
|
|
370
|
+
|
|
371
|
+
def read_tasks(state_name: str) -> dict[str, list[str]]:
|
|
372
|
+
path = cfg.state_path(state_name)
|
|
373
|
+
if not path.exists():
|
|
374
|
+
return {}
|
|
375
|
+
tasks: dict[str, list[str]] = {}
|
|
376
|
+
current_cat = None
|
|
377
|
+
for line in path.read_text(encoding="utf-8").splitlines():
|
|
378
|
+
if _re.match(r"^##[^#]", line):
|
|
379
|
+
continue
|
|
380
|
+
m = CATEGORY_RE.match(line)
|
|
381
|
+
if m:
|
|
382
|
+
current_cat = m.group(1).strip()
|
|
383
|
+
continue
|
|
384
|
+
t = TASK_RE.match(line)
|
|
385
|
+
if t and not line.startswith(" ") and current_cat:
|
|
386
|
+
tasks.setdefault(current_cat, []).append(t.group(3))
|
|
387
|
+
return tasks
|
|
388
|
+
|
|
389
|
+
now_tasks = read_tasks("now")
|
|
390
|
+
blocked_tasks = read_tasks("blocked")
|
|
391
|
+
paused_tasks = read_tasks("paused")
|
|
392
|
+
|
|
393
|
+
total_now = sum(len(v) for v in now_tasks.values())
|
|
394
|
+
total_blocked = sum(len(v) for v in blocked_tasks.values())
|
|
395
|
+
total_paused = sum(len(v) for v in paused_tasks.values())
|
|
396
|
+
|
|
397
|
+
def fmt_cat(name: str) -> str:
|
|
398
|
+
icon = cfg.category_icon(name)
|
|
399
|
+
return f"{icon} {name}".strip() if icon else name
|
|
400
|
+
|
|
401
|
+
def print_section(label: str, icon: str, tasks: dict[str, list[str]]) -> None:
|
|
402
|
+
if not tasks:
|
|
403
|
+
return
|
|
404
|
+
click.echo(f"\n {icon} {label}")
|
|
405
|
+
for cat, items in tasks.items():
|
|
406
|
+
click.echo(f" {fmt_cat(cat)}")
|
|
407
|
+
for item in items:
|
|
408
|
+
click.echo(f" · {item}")
|
|
409
|
+
|
|
410
|
+
today = date.today().strftime("%a %b %-d")
|
|
411
|
+
ni = cfg.state_icon("now")
|
|
412
|
+
bi = cfg.state_icon("blocked")
|
|
413
|
+
pi = cfg.state_icon("paused")
|
|
414
|
+
|
|
415
|
+
click.echo(f"\n taskflow status — {today}")
|
|
416
|
+
click.echo(f" {ni} now: {total_now} {bi} blocked: {total_blocked} {pi} paused: {total_paused}")
|
|
417
|
+
|
|
418
|
+
print_section("now", ni, now_tasks)
|
|
419
|
+
print_section("blocked", bi, blocked_tasks)
|
|
420
|
+
print_section("paused", pi, paused_tasks)
|
|
421
|
+
click.echo()
|
|
422
|
+
|
|
423
|
+
|
|
424
|
+
# ---------------------------------------------------------------------------
|
|
425
|
+
# week
|
|
426
|
+
# ---------------------------------------------------------------------------
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
@main.command()
|
|
430
|
+
def week() -> None:
|
|
431
|
+
"""This week's completions from done.md."""
|
|
432
|
+
import re as _re
|
|
433
|
+
|
|
434
|
+
cfg = load_config()
|
|
435
|
+
done_path = cfg.state_path("done")
|
|
436
|
+
|
|
437
|
+
if not done_path.exists():
|
|
438
|
+
click.echo("\n no done.md found.\n")
|
|
439
|
+
return
|
|
440
|
+
|
|
441
|
+
from taskflow.tasklib import WEEK_HEADING_RE
|
|
442
|
+
|
|
443
|
+
DONE_RE = _re.compile(r"^\[[\d\s:\-]+\]\s+done:\s+\(([^)]+)\)\s+-\s+(.+)$")
|
|
444
|
+
|
|
445
|
+
lines = done_path.read_text(encoding="utf-8").splitlines()
|
|
446
|
+
|
|
447
|
+
# find the last week heading, collect everything after it
|
|
448
|
+
last_week_idx = None
|
|
449
|
+
week_date_str = None
|
|
450
|
+
for i, line in enumerate(lines):
|
|
451
|
+
m = WEEK_HEADING_RE.match(line)
|
|
452
|
+
if m:
|
|
453
|
+
last_week_idx = i
|
|
454
|
+
week_date_str = m.group(1)
|
|
455
|
+
|
|
456
|
+
if last_week_idx is None:
|
|
457
|
+
click.echo("\n no week headings in done.md yet.\n")
|
|
458
|
+
return
|
|
459
|
+
|
|
460
|
+
entries: dict[str, list[str]] = {}
|
|
461
|
+
for line in lines[last_week_idx + 1 :]:
|
|
462
|
+
d = DONE_RE.match(line)
|
|
463
|
+
if d:
|
|
464
|
+
cat, task_text = d.group(1).strip(), d.group(2).strip()
|
|
465
|
+
entries.setdefault(cat, []).append(task_text)
|
|
466
|
+
|
|
467
|
+
if not entries:
|
|
468
|
+
click.echo("\n nothing completed this week yet.\n")
|
|
469
|
+
return
|
|
470
|
+
|
|
471
|
+
total = sum(len(v) for v in entries.values())
|
|
472
|
+
icon = cfg.state_icon("done")
|
|
473
|
+
|
|
474
|
+
def fmt_cat(name: str) -> str:
|
|
475
|
+
ci = cfg.category_icon(name)
|
|
476
|
+
return f"{ci} {name}".strip() if ci else name
|
|
477
|
+
|
|
478
|
+
click.echo(f"\n {icon} week of {week_date_str} — {total} completed\n")
|
|
479
|
+
for cat, tasks in entries.items():
|
|
480
|
+
click.echo(f" {fmt_cat(cat)}")
|
|
481
|
+
for t in tasks:
|
|
482
|
+
click.echo(f" · {t}")
|
|
483
|
+
click.echo()
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
# ---------------------------------------------------------------------------
|
|
487
|
+
# pipeline / progress
|
|
488
|
+
# ---------------------------------------------------------------------------
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
@main.command()
|
|
492
|
+
@click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
|
|
493
|
+
def pipeline(as_json: bool) -> None:
|
|
494
|
+
"""Work in flight across all states plus this week's completions."""
|
|
495
|
+
cfg = load_config()
|
|
496
|
+
click.echo(report_pipeline(cfg, as_json=as_json))
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
@main.command()
|
|
500
|
+
@click.option("--json", "as_json", is_flag=True, help="Output as JSON.")
|
|
501
|
+
def progress(as_json: bool) -> None:
|
|
502
|
+
"""Now vs. completed counts across up to 5 historical weeks."""
|
|
503
|
+
cfg = load_config()
|
|
504
|
+
click.echo(report_progress(cfg, as_json=as_json))
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
# ---------------------------------------------------------------------------
|
|
508
|
+
# completion
|
|
509
|
+
# ---------------------------------------------------------------------------
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
@main.command()
|
|
513
|
+
@click.argument("shell", type=click.Choice(["bash", "zsh", "fish"]))
|
|
514
|
+
def completion(shell: str) -> None:
|
|
515
|
+
"""
|
|
516
|
+
Print the shell completion script for the given shell.
|
|
517
|
+
|
|
518
|
+
\b
|
|
519
|
+
Add to your shell profile:
|
|
520
|
+
bash: eval "$(taskflow completion bash)"
|
|
521
|
+
zsh: eval "$(taskflow completion zsh)"
|
|
522
|
+
fish: taskflow completion fish | source
|
|
523
|
+
"""
|
|
524
|
+
import os
|
|
525
|
+
|
|
526
|
+
env_var = f"_{main.name.upper().replace('-', '_')}_COMPLETE"
|
|
527
|
+
os.environ[env_var] = f"{shell}_source"
|
|
528
|
+
try:
|
|
529
|
+
main(standalone_mode=False)
|
|
530
|
+
except SystemExit:
|
|
531
|
+
pass
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
# ---------------------------------------------------------------------------
|
|
535
|
+
# Archive helper — called after any done write
|
|
536
|
+
# ---------------------------------------------------------------------------
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
def _maybe_archive(cfg: TaskflowConfig) -> None:
|
|
540
|
+
"""
|
|
541
|
+
Check done.md week count and archive old weeks if needed.
|
|
542
|
+
Silent on success — only surfaces output if weeks were actually archived.
|
|
543
|
+
"""
|
|
544
|
+
archived = archive_old_weeks(
|
|
545
|
+
done_path=cfg.state_path("done"),
|
|
546
|
+
archive_dir=cfg.archive_path,
|
|
547
|
+
keep_weeks=cfg.done_weeks,
|
|
548
|
+
)
|
|
549
|
+
if archived:
|
|
550
|
+
click.echo(f" archived {archived} week(s) to {cfg.archive_path.relative_to(cfg.root)}/")
|