qwen-base 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +183 -0
- package/bin/install.js +227 -0
- package/package.json +42 -0
- package/src/commands/audit-claude-md.md +44 -0
- package/src/commands/audit-claude.md +45 -0
- package/src/commands/audit.md +33 -0
- package/src/commands/carl-hygiene.md +33 -0
- package/src/commands/groom.md +35 -0
- package/src/commands/history.md +27 -0
- package/src/commands/orientation/tasks/deep-why.md +132 -0
- package/src/commands/orientation/tasks/elevator-pitch.md +115 -0
- package/src/commands/orientation/tasks/initiatives.md +98 -0
- package/src/commands/orientation/tasks/key-values.md +130 -0
- package/src/commands/orientation/tasks/new-orientation.md +162 -0
- package/src/commands/orientation/tasks/north-star.md +97 -0
- package/src/commands/orientation/tasks/project-mapping.md +103 -0
- package/src/commands/orientation/tasks/reorientation.md +96 -0
- package/src/commands/orientation/tasks/surface-vision.md +113 -0
- package/src/commands/orientation/tasks/task-seeding.md +93 -0
- package/src/commands/orientation/templates/operator-json.md +88 -0
- package/src/commands/orientation.md +87 -0
- package/src/commands/pulse.md +33 -0
- package/src/commands/scaffold.md +33 -0
- package/src/commands/status.md +28 -0
- package/src/commands/surface-convert.md +35 -0
- package/src/commands/surface-create.md +34 -0
- package/src/commands/surface-list.md +27 -0
- package/src/commands/weekly-domain.md +34 -0
- package/src/commands/weekly.md +39 -0
- package/src/framework/context/base-principles.md +69 -0
- package/src/framework/frameworks/audit-strategies.md +53 -0
- package/src/framework/frameworks/claude-config-alignment.md +256 -0
- package/src/framework/frameworks/claudemd-strategy.md +158 -0
- package/src/framework/frameworks/satellite-registration.md +44 -0
- package/src/framework/tasks/audit-claude-md.md +171 -0
- package/src/framework/tasks/audit-claude.md +330 -0
- package/src/framework/tasks/audit.md +64 -0
- package/src/framework/tasks/carl-hygiene.md +142 -0
- package/src/framework/tasks/groom.md +157 -0
- package/src/framework/tasks/history.md +34 -0
- package/src/framework/tasks/pulse.md +83 -0
- package/src/framework/tasks/scaffold.md +389 -0
- package/src/framework/tasks/status.md +35 -0
- package/src/framework/tasks/surface-convert.md +143 -0
- package/src/framework/tasks/surface-create.md +184 -0
- package/src/framework/tasks/surface-list.md +42 -0
- package/src/framework/tasks/weekly-domain-create.md +173 -0
- package/src/framework/tasks/weekly.md +347 -0
- package/src/framework/templates/claudemd-template.md +102 -0
- package/src/framework/templates/workspace-json.md +96 -0
- package/src/framework/utils/scan-claude-dirs.py +549 -0
- package/src/hooks/_template.py +130 -0
- package/src/hooks/active-hook.py +178 -0
- package/src/hooks/apex-insights.py +169 -0
- package/src/hooks/backlog-hook.py +115 -0
- package/src/hooks/base-pulse-check.py +216 -0
- package/src/hooks/operator.py +53 -0
- package/src/hooks/psmm-injector.py +67 -0
- package/src/hooks/satellite-detection.py +320 -0
- package/src/packages/base-mcp/index.js +119 -0
- package/src/packages/base-mcp/package.json +10 -0
- package/src/packages/base-mcp/tools/entities.js +228 -0
- package/src/packages/base-mcp/tools/operator.js +106 -0
- package/src/packages/base-mcp/tools/projects.js +324 -0
- package/src/packages/base-mcp/tools/psmm.js +206 -0
- package/src/packages/base-mcp/tools/satellite.js +243 -0
- package/src/packages/base-mcp/tools/state.js +201 -0
- package/src/packages/base-mcp/tools/validate.js +121 -0
- package/src/skill/base.md +110 -0
- package/src/templates/operator.json +66 -0
- package/src/templates/workspace.json +76 -0
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
BASE Hook v2: active-hook-v2.py
|
|
4
|
+
Source: .base/data/projects.json (APEX unified project management)
|
|
5
|
+
Output: <active-awareness> compact summary grouped by priority
|
|
6
|
+
Filters: items with status NOT in [backlog, archived]
|
|
7
|
+
|
|
8
|
+
Drop-in replacement for active-hook.py. Swap in settings.json when ready.
|
|
9
|
+
Legacy active-hook.py reads from .base/data/active.json (unchanged).
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import sys
|
|
13
|
+
import json
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from datetime import date, datetime
|
|
16
|
+
|
|
17
|
+
SURFACE_NAME = "active"
|
|
18
|
+
|
|
19
|
+
HOOK_DIR = Path(__file__).resolve().parent
|
|
20
|
+
WORKSPACE_ROOT = HOOK_DIR.parent.parent
|
|
21
|
+
DATA_FILE = WORKSPACE_ROOT / ".base" / "data" / "projects.json"
|
|
22
|
+
|
|
23
|
+
BEHAVIOR_DIRECTIVE = f"""BEHAVIOR: This context is PASSIVE AWARENESS ONLY.
|
|
24
|
+
Do NOT proactively mention these items unless:
|
|
25
|
+
- User explicitly asks (e.g., "what should I work on?", "what's next?")
|
|
26
|
+
- A deadline is within 24 hours AND user hasn't acknowledged it this session
|
|
27
|
+
For details on any item, use base_get_project(id)."""
|
|
28
|
+
|
|
29
|
+
PRIORITY_ORDER = ["urgent", "high", "medium", "low", "ongoing", "deferred"]
|
|
30
|
+
|
|
31
|
+
# Staleness thresholds (days since last update)
|
|
32
|
+
STALE_THRESHOLDS = {
|
|
33
|
+
"urgent": 3,
|
|
34
|
+
"high": 5,
|
|
35
|
+
"medium": 7,
|
|
36
|
+
"low": 14,
|
|
37
|
+
"ongoing": 14,
|
|
38
|
+
"deferred": 30,
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
# Statuses that are NOT active (excluded from active view)
|
|
42
|
+
EXCLUDED_STATUSES = {"backlog", "archived", "completed"}
|
|
43
|
+
|
|
44
|
+
# Types to exclude from active awareness (checked via MCP during grooms)
|
|
45
|
+
EXCLUDED_TYPES = {"initiative"}
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def days_since_update(item):
|
|
49
|
+
"""Calculate days since last update. Uses updated_at (ISO datetime)."""
|
|
50
|
+
ts = item.get("updated_at") or item.get("created_at")
|
|
51
|
+
if not ts:
|
|
52
|
+
return None
|
|
53
|
+
try:
|
|
54
|
+
d = date.fromisoformat(ts[:10])
|
|
55
|
+
return (date.today() - d).days
|
|
56
|
+
except (ValueError, TypeError):
|
|
57
|
+
return None
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def main():
|
|
61
|
+
try:
|
|
62
|
+
input_data = json.loads(sys.stdin.read())
|
|
63
|
+
except (json.JSONDecodeError, OSError):
|
|
64
|
+
pass
|
|
65
|
+
|
|
66
|
+
if not DATA_FILE.exists():
|
|
67
|
+
sys.exit(0)
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
data = json.loads(DATA_FILE.read_text())
|
|
71
|
+
except (json.JSONDecodeError, OSError):
|
|
72
|
+
sys.exit(0)
|
|
73
|
+
|
|
74
|
+
items = data.get("items", [])
|
|
75
|
+
if not items:
|
|
76
|
+
sys.exit(0)
|
|
77
|
+
|
|
78
|
+
# Filter: only active items (not backlog, archived, completed), exclude initiatives
|
|
79
|
+
active_items = [i for i in items if i.get("status") not in EXCLUDED_STATUSES and i.get("type") not in EXCLUDED_TYPES]
|
|
80
|
+
if not active_items:
|
|
81
|
+
sys.exit(0)
|
|
82
|
+
|
|
83
|
+
# Group by priority
|
|
84
|
+
groups = {}
|
|
85
|
+
for item in active_items:
|
|
86
|
+
p = item.get("priority", "medium")
|
|
87
|
+
groups.setdefault(p, []).append(item)
|
|
88
|
+
|
|
89
|
+
# Workload balance header
|
|
90
|
+
blocked_count = sum(1 for i in active_items if i.get("blocked_by"))
|
|
91
|
+
ongoing_count = sum(1 for i in active_items if i.get("priority") == "ongoing")
|
|
92
|
+
deferred_count = sum(1 for i in active_items if i.get("status") == "deferred")
|
|
93
|
+
working_count = len(active_items) - ongoing_count - deferred_count
|
|
94
|
+
lines = [f"Load: {working_count} active | {blocked_count} blocked | {ongoing_count} ongoing | {deferred_count} deferred"]
|
|
95
|
+
|
|
96
|
+
for priority in PRIORITY_ORDER:
|
|
97
|
+
group = groups.get(priority, [])
|
|
98
|
+
if not group:
|
|
99
|
+
continue
|
|
100
|
+
lines.append(f"[{priority.upper()}]")
|
|
101
|
+
for item in group:
|
|
102
|
+
item_id = item.get("id", "?")
|
|
103
|
+
title = item.get("title", "untitled")
|
|
104
|
+
status = item.get("status", "")
|
|
105
|
+
category = item.get("category", "")
|
|
106
|
+
cat_tag = f"({category}) " if category else ""
|
|
107
|
+
parts = [f"- [{item_id}] {cat_tag}{title}"]
|
|
108
|
+
if status:
|
|
109
|
+
parts[0] += f" ({status})"
|
|
110
|
+
# PAUL signal (phase, loop, plan age, handoff) — only if paul data has real values
|
|
111
|
+
paul_info = item.get("paul")
|
|
112
|
+
if paul_info and paul_info.get("is_paul_project") and paul_info.get("phase"):
|
|
113
|
+
paul_parts = []
|
|
114
|
+
p_phase = paul_info.get("phase", "?")
|
|
115
|
+
p_completed = paul_info.get("completed_phases", "?")
|
|
116
|
+
p_total = paul_info.get("total_phases", "?")
|
|
117
|
+
p_loop = paul_info.get("loop_position", "?")
|
|
118
|
+
paul_parts.append(f"Phase {p_completed}/{p_total} ({p_phase})")
|
|
119
|
+
paul_parts.append(str(p_loop))
|
|
120
|
+
# Plan age
|
|
121
|
+
last_plan = paul_info.get("last_plan_completed_at") or paul_info.get("last_update")
|
|
122
|
+
if last_plan:
|
|
123
|
+
try:
|
|
124
|
+
lp = last_plan.replace("Z", "+00:00")
|
|
125
|
+
if "T" in lp:
|
|
126
|
+
lp_date = datetime.fromisoformat(lp).date() if hasattr(datetime, 'fromisoformat') else date.fromisoformat(lp[:10])
|
|
127
|
+
else:
|
|
128
|
+
lp_date = date.fromisoformat(lp)
|
|
129
|
+
age = (date.today() - lp_date).days
|
|
130
|
+
paul_parts.append(f"plan {age}d ago")
|
|
131
|
+
except (ValueError, TypeError):
|
|
132
|
+
pass
|
|
133
|
+
# Handoff flag
|
|
134
|
+
p_handoff = paul_info.get("handoff")
|
|
135
|
+
if isinstance(p_handoff, dict) and p_handoff.get("present"):
|
|
136
|
+
paul_parts.append("HANDOFF")
|
|
137
|
+
elif isinstance(p_handoff, bool) and p_handoff:
|
|
138
|
+
paul_parts.append("HANDOFF")
|
|
139
|
+
parts.append(f" PAUL: {' | '.join(paul_parts)}")
|
|
140
|
+
|
|
141
|
+
# Revenue signal
|
|
142
|
+
rev = item.get("revenue")
|
|
143
|
+
if rev and rev.get("amount"):
|
|
144
|
+
rev_type = rev.get("type", "")
|
|
145
|
+
parts.append(f" REV: {rev['amount']} ({rev_type})")
|
|
146
|
+
|
|
147
|
+
blocked = item.get("blocked_by")
|
|
148
|
+
if blocked:
|
|
149
|
+
parts.append(f" BLOCKED: {blocked}")
|
|
150
|
+
next_action = item.get("next")
|
|
151
|
+
if next_action and priority != "ongoing":
|
|
152
|
+
parts.append(f" NEXT: {next_action}")
|
|
153
|
+
deadline = item.get("due_date")
|
|
154
|
+
if deadline:
|
|
155
|
+
parts.append(f" DUE: {deadline}")
|
|
156
|
+
days = days_since_update(item)
|
|
157
|
+
threshold = STALE_THRESHOLDS.get(priority, 7)
|
|
158
|
+
if days is not None:
|
|
159
|
+
if days >= threshold:
|
|
160
|
+
parts.append(f" STALE: {days}d since update (threshold: {threshold}d)")
|
|
161
|
+
else:
|
|
162
|
+
parts.append(f" updated: {days}d ago")
|
|
163
|
+
lines.append("\n".join(parts))
|
|
164
|
+
|
|
165
|
+
if lines:
|
|
166
|
+
count = len(active_items)
|
|
167
|
+
summary = "\n".join(lines)
|
|
168
|
+
print(f"""<{SURFACE_NAME}-awareness items="{count}">
|
|
169
|
+
{summary}
|
|
170
|
+
|
|
171
|
+
{BEHAVIOR_DIRECTIVE}
|
|
172
|
+
</{SURFACE_NAME}-awareness>""")
|
|
173
|
+
|
|
174
|
+
sys.exit(0)
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
if __name__ == "__main__":
|
|
178
|
+
main()
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
APEX Insights — On-demand workspace analytics
|
|
4
|
+
Computes velocity, stall detection, blocking analysis, workload, and dependency chains.
|
|
5
|
+
Invoked by /apex:insights slash command via !command injection.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import sys
|
|
10
|
+
from datetime import datetime, date
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from collections import defaultdict
|
|
13
|
+
|
|
14
|
+
WORKSPACE = Path(__file__).resolve().parent.parent.parent
|
|
15
|
+
PROJECTS_FILE = WORKSPACE / ".base" / "data" / "projects.json"
|
|
16
|
+
WORKSPACE_JSON = WORKSPACE / ".base" / "workspace.json"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def load_json(path):
|
|
20
|
+
try:
|
|
21
|
+
with open(path) as f:
|
|
22
|
+
return json.load(f)
|
|
23
|
+
except (json.JSONDecodeError, OSError):
|
|
24
|
+
return None
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def days_ago(iso_str):
|
|
28
|
+
if not iso_str:
|
|
29
|
+
return None
|
|
30
|
+
try:
|
|
31
|
+
s = iso_str.replace("Z", "+00:00")
|
|
32
|
+
if "T" in s:
|
|
33
|
+
d = datetime.fromisoformat(s).date()
|
|
34
|
+
else:
|
|
35
|
+
d = date.fromisoformat(s[:10])
|
|
36
|
+
return (date.today() - d).days
|
|
37
|
+
except (ValueError, TypeError):
|
|
38
|
+
return None
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def main():
|
|
42
|
+
projects = load_json(PROJECTS_FILE)
|
|
43
|
+
workspace = load_json(WORKSPACE_JSON)
|
|
44
|
+
|
|
45
|
+
if not projects:
|
|
46
|
+
print("ERROR: Cannot read projects.json")
|
|
47
|
+
sys.exit(0)
|
|
48
|
+
|
|
49
|
+
items = projects.get("items", [])
|
|
50
|
+
satellites = (workspace or {}).get("satellites", {})
|
|
51
|
+
|
|
52
|
+
# --- VELOCITY ---
|
|
53
|
+
print("## VELOCITY (PAUL Projects)")
|
|
54
|
+
paul_projects = []
|
|
55
|
+
for item in items:
|
|
56
|
+
paul = item.get("paul")
|
|
57
|
+
if paul and paul.get("is_paul_project") and paul.get("phase"):
|
|
58
|
+
lp_age = days_ago(paul.get("last_plan_completed_at") or paul.get("last_update"))
|
|
59
|
+
paul_projects.append({
|
|
60
|
+
"id": item["id"],
|
|
61
|
+
"title": item["title"][:35],
|
|
62
|
+
"phase": f"{paul.get('completed_phases', '?')}/{paul.get('total_phases', '?')}",
|
|
63
|
+
"loop": paul.get("loop_position", "?"),
|
|
64
|
+
"last_plan_age": lp_age,
|
|
65
|
+
"handoff": paul.get("handoff", False),
|
|
66
|
+
"status": item.get("status"),
|
|
67
|
+
})
|
|
68
|
+
|
|
69
|
+
if paul_projects:
|
|
70
|
+
for p in sorted(paul_projects, key=lambda x: (x["last_plan_age"] or 0), reverse=True):
|
|
71
|
+
age_str = f"{p['last_plan_age']}d ago" if p["last_plan_age"] is not None else "never"
|
|
72
|
+
hf = " [HANDOFF]" if (isinstance(p["handoff"], dict) and p["handoff"].get("present")) or p["handoff"] is True else ""
|
|
73
|
+
print(f" {p['id']} {p['title']:35s} Phase {p['phase']:8s} {p['loop']:5s} plan: {age_str}{hf}")
|
|
74
|
+
else:
|
|
75
|
+
print(" No PAUL projects found")
|
|
76
|
+
print()
|
|
77
|
+
|
|
78
|
+
# --- STALLS (active projects with plan age > 14d) ---
|
|
79
|
+
print("## STALLS (plan age > 14 days, not completed/deferred)")
|
|
80
|
+
stalls = [p for p in paul_projects
|
|
81
|
+
if p["last_plan_age"] is not None
|
|
82
|
+
and p["last_plan_age"] > 14
|
|
83
|
+
and p["status"] not in ("completed", "deferred", "archived")]
|
|
84
|
+
if stalls:
|
|
85
|
+
for s in sorted(stalls, key=lambda x: x["last_plan_age"], reverse=True):
|
|
86
|
+
print(f" {s['id']} {s['title']:35s} STALLED {s['last_plan_age']}d")
|
|
87
|
+
else:
|
|
88
|
+
print(" No stalls detected")
|
|
89
|
+
print()
|
|
90
|
+
|
|
91
|
+
# --- BLOCKING ANALYSIS ---
|
|
92
|
+
print("## BLOCKING ANALYSIS")
|
|
93
|
+
blocked = [i for i in items if i.get("blocked_by") and i.get("status") not in ("completed", "archived")]
|
|
94
|
+
if blocked:
|
|
95
|
+
# Group by blocker
|
|
96
|
+
blockers = defaultdict(list)
|
|
97
|
+
for item in blocked:
|
|
98
|
+
blockers[item["blocked_by"]].append(item)
|
|
99
|
+
|
|
100
|
+
for blocker, items_blocked in blockers.items():
|
|
101
|
+
rev_items = [i for i in items_blocked if i.get("revenue")]
|
|
102
|
+
rev_str = ""
|
|
103
|
+
if rev_items:
|
|
104
|
+
rev_str = f" | Revenue at risk: {', '.join(i['revenue']['amount'] for i in rev_items)}"
|
|
105
|
+
print(f" Blocker: {blocker}")
|
|
106
|
+
for i in items_blocked:
|
|
107
|
+
print(f" {i['id']} {i['title'][:40]}")
|
|
108
|
+
if rev_str:
|
|
109
|
+
print(f" {rev_str}")
|
|
110
|
+
print()
|
|
111
|
+
else:
|
|
112
|
+
print(" No blocked projects")
|
|
113
|
+
print()
|
|
114
|
+
|
|
115
|
+
# --- DEPENDENCIES ---
|
|
116
|
+
print("## CROSS-PROJECT DEPENDENCIES")
|
|
117
|
+
has_deps = [i for i in items if i.get("dependencies")]
|
|
118
|
+
if has_deps:
|
|
119
|
+
for item in has_deps:
|
|
120
|
+
for dep in item["dependencies"]:
|
|
121
|
+
dep_project = next((i for i in items if i["id"] == dep["project_id"]), None)
|
|
122
|
+
dep_title = dep_project["title"][:30] if dep_project else dep["project_id"]
|
|
123
|
+
print(f" {item['id']} {item['title'][:30]} --{dep['type']}--> {dep_title}")
|
|
124
|
+
if dep.get("notes"):
|
|
125
|
+
print(f" Note: {dep['notes']}")
|
|
126
|
+
else:
|
|
127
|
+
print(" No cross-project dependencies defined")
|
|
128
|
+
print()
|
|
129
|
+
|
|
130
|
+
# --- WORKLOAD BY CATEGORY ---
|
|
131
|
+
print("## WORKLOAD BY CATEGORY")
|
|
132
|
+
active = [i for i in items if i.get("status") not in ("backlog", "archived", "completed") and i.get("type") != "initiative"]
|
|
133
|
+
cats = defaultdict(int)
|
|
134
|
+
for item in active:
|
|
135
|
+
cats[item.get("category", "uncategorized")] += 1
|
|
136
|
+
for cat, count in sorted(cats.items(), key=lambda x: -x[1]):
|
|
137
|
+
print(f" {cat}: {count} projects")
|
|
138
|
+
print()
|
|
139
|
+
|
|
140
|
+
# --- REVENUE SUMMARY ---
|
|
141
|
+
print("## REVENUE EXPOSURE")
|
|
142
|
+
rev_projects = [i for i in items if i.get("revenue") and i.get("status") not in ("completed", "archived")]
|
|
143
|
+
if rev_projects:
|
|
144
|
+
for item in rev_projects:
|
|
145
|
+
rev = item["revenue"]
|
|
146
|
+
status = item.get("status", "?")
|
|
147
|
+
blocked_flag = " [BLOCKED]" if item.get("blocked_by") else ""
|
|
148
|
+
print(f" {item['id']} {item['title'][:35]} | {rev['amount']} ({rev['type']}){blocked_flag}")
|
|
149
|
+
else:
|
|
150
|
+
print(" No revenue projects active")
|
|
151
|
+
print()
|
|
152
|
+
|
|
153
|
+
# --- HANDOFFS ---
|
|
154
|
+
print("## PENDING HANDOFFS")
|
|
155
|
+
handoff_sats = [(name, sat) for name, sat in satellites.items() if sat.get("handoff")]
|
|
156
|
+
if handoff_sats:
|
|
157
|
+
for name, sat in handoff_sats:
|
|
158
|
+
phase = sat.get("phase_name", "?")
|
|
159
|
+
print(f" {name}: Phase {phase} — has HANDOFF waiting")
|
|
160
|
+
else:
|
|
161
|
+
print(" No pending handoffs")
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
if __name__ == "__main__":
|
|
165
|
+
try:
|
|
166
|
+
main()
|
|
167
|
+
except Exception as e:
|
|
168
|
+
print(f"ERROR: {e}")
|
|
169
|
+
sys.exit(0)
|
|
@@ -0,0 +1,115 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
BASE Hook v2: backlog-hook-v2.py
|
|
4
|
+
Source: .base/data/projects.json (APEX unified project management)
|
|
5
|
+
Output: <backlog-awareness> compact summary grouped by priority
|
|
6
|
+
Filters: only items with status "backlog"
|
|
7
|
+
|
|
8
|
+
Drop-in replacement for backlog-hook.py. Swap in settings.json when ready.
|
|
9
|
+
Legacy backlog-hook.py reads from .base/data/backlog.json (unchanged).
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import sys
|
|
13
|
+
import json
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from datetime import date
|
|
16
|
+
|
|
17
|
+
SURFACE_NAME = "backlog"
|
|
18
|
+
|
|
19
|
+
HOOK_DIR = Path(__file__).resolve().parent
|
|
20
|
+
WORKSPACE_ROOT = HOOK_DIR.parent.parent
|
|
21
|
+
DATA_FILE = WORKSPACE_ROOT / ".base" / "data" / "projects.json"
|
|
22
|
+
|
|
23
|
+
BEHAVIOR_DIRECTIVE = f"""BEHAVIOR: This context is PASSIVE AWARENESS ONLY.
|
|
24
|
+
Do NOT proactively mention these items unless:
|
|
25
|
+
- User explicitly asks (e.g., "what's in the backlog?", "what's queued?")
|
|
26
|
+
- A review_by date has passed AND user hasn't acknowledged it this session
|
|
27
|
+
For details on any item, use base_get_project(id)."""
|
|
28
|
+
|
|
29
|
+
PRIORITY_ORDER = ["high", "medium", "low"]
|
|
30
|
+
|
|
31
|
+
# Staleness thresholds (days since last update)
|
|
32
|
+
STALE_THRESHOLDS = {
|
|
33
|
+
"high": 7,
|
|
34
|
+
"medium": 14,
|
|
35
|
+
"low": 30,
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def days_since_update(item):
|
|
40
|
+
"""Calculate days since last update. Uses updated_at (ISO datetime)."""
|
|
41
|
+
ts = item.get("updated_at") or item.get("created_at")
|
|
42
|
+
if not ts:
|
|
43
|
+
return None
|
|
44
|
+
try:
|
|
45
|
+
d = date.fromisoformat(ts[:10])
|
|
46
|
+
return (date.today() - d).days
|
|
47
|
+
except (ValueError, TypeError):
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def main():
|
|
52
|
+
try:
|
|
53
|
+
input_data = json.loads(sys.stdin.read())
|
|
54
|
+
except (json.JSONDecodeError, OSError):
|
|
55
|
+
pass
|
|
56
|
+
|
|
57
|
+
if not DATA_FILE.exists():
|
|
58
|
+
sys.exit(0)
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
data = json.loads(DATA_FILE.read_text())
|
|
62
|
+
except (json.JSONDecodeError, OSError):
|
|
63
|
+
sys.exit(0)
|
|
64
|
+
|
|
65
|
+
items = data.get("items", [])
|
|
66
|
+
if not items:
|
|
67
|
+
sys.exit(0)
|
|
68
|
+
|
|
69
|
+
# Filter: only backlog items
|
|
70
|
+
backlog_items = [i for i in items if i.get("status") == "backlog"]
|
|
71
|
+
if not backlog_items:
|
|
72
|
+
sys.exit(0)
|
|
73
|
+
|
|
74
|
+
# Group by priority
|
|
75
|
+
groups = {}
|
|
76
|
+
for item in backlog_items:
|
|
77
|
+
p = item.get("priority", "medium")
|
|
78
|
+
groups.setdefault(p, []).append(item)
|
|
79
|
+
|
|
80
|
+
lines = []
|
|
81
|
+
for priority in PRIORITY_ORDER:
|
|
82
|
+
group = groups.get(priority, [])
|
|
83
|
+
if not group:
|
|
84
|
+
continue
|
|
85
|
+
lines.append(f"[{priority.upper()}]")
|
|
86
|
+
for item in group:
|
|
87
|
+
item_id = item.get("id", "?")
|
|
88
|
+
title = item.get("title", "untitled")
|
|
89
|
+
review_by = item.get("review_by")
|
|
90
|
+
entry = f"- [{item_id}] {title}"
|
|
91
|
+
if review_by:
|
|
92
|
+
entry += f" [review by: {review_by}]"
|
|
93
|
+
days = days_since_update(item)
|
|
94
|
+
threshold = STALE_THRESHOLDS.get(priority, 14)
|
|
95
|
+
if days is not None:
|
|
96
|
+
if days >= threshold:
|
|
97
|
+
entry += f" STALE: {days}d"
|
|
98
|
+
else:
|
|
99
|
+
entry += f" ({days}d ago)"
|
|
100
|
+
lines.append(entry)
|
|
101
|
+
|
|
102
|
+
if lines:
|
|
103
|
+
count = len(backlog_items)
|
|
104
|
+
summary = "\n".join(lines)
|
|
105
|
+
print(f"""<{SURFACE_NAME}-awareness items="{count}">
|
|
106
|
+
{summary}
|
|
107
|
+
|
|
108
|
+
{BEHAVIOR_DIRECTIVE}
|
|
109
|
+
</{SURFACE_NAME}-awareness>""")
|
|
110
|
+
|
|
111
|
+
sys.exit(0)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
if __name__ == "__main__":
|
|
115
|
+
main()
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
BASE Hook v2: base-pulse-check-v2.py
|
|
4
|
+
Purpose: Workspace health check on session start.
|
|
5
|
+
Reads .base/data/state.json (pre-calculated drift, areas, groom config).
|
|
6
|
+
Much simpler than v1 which parsed STATE.md text + computed drift from file mtimes.
|
|
7
|
+
Triggers: UserPromptSubmit (session context)
|
|
8
|
+
Output: <base-pulse> workspace health status or groom reminder
|
|
9
|
+
|
|
10
|
+
Drop-in replacement for base-pulse-check.py. Swap in settings.json when ready.
|
|
11
|
+
Legacy base-pulse-check.py reads STATE.md + workspace.json (unchanged).
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import sys
|
|
15
|
+
import json
|
|
16
|
+
from datetime import datetime, date
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
|
|
19
|
+
HOOK_DIR = Path(__file__).resolve().parent
|
|
20
|
+
WORKSPACE_ROOT = HOOK_DIR.parent.parent
|
|
21
|
+
BASE_DIR = WORKSPACE_ROOT / ".base"
|
|
22
|
+
STATE_FILE = BASE_DIR / "data" / "state.json"
|
|
23
|
+
PROJECTS_FILE = BASE_DIR / "data" / "projects.json"
|
|
24
|
+
CARL_DIR = WORKSPACE_ROOT / ".carl"
|
|
25
|
+
CARL_JSON = CARL_DIR / "carl.json"
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def recalculate_drift(state):
|
|
29
|
+
"""Recalculate drift indicators from live data and update state.json.
|
|
30
|
+
|
|
31
|
+
This ensures drift score is always fresh on session start, not stale
|
|
32
|
+
from the last time base_update_drift was manually called.
|
|
33
|
+
"""
|
|
34
|
+
now = date.today()
|
|
35
|
+
|
|
36
|
+
# Calculate indicators from projects.json
|
|
37
|
+
indicators = {
|
|
38
|
+
"active_age_days": 0,
|
|
39
|
+
"backlog_age_days": 0,
|
|
40
|
+
"backlog_past_review": 0,
|
|
41
|
+
"orphaned_sessions": 0,
|
|
42
|
+
"untracked_root_files": 0,
|
|
43
|
+
"stale_satellites": 0,
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
if PROJECTS_FILE.exists():
|
|
47
|
+
try:
|
|
48
|
+
projects = json.loads(PROJECTS_FILE.read_text())
|
|
49
|
+
items = projects.get("items", [])
|
|
50
|
+
|
|
51
|
+
# Active staleness: max days since update for active/in_progress/blocked/in_review projects
|
|
52
|
+
active_statuses = {"in_progress", "blocked", "in_review", "todo"}
|
|
53
|
+
active_ages = []
|
|
54
|
+
backlog_ages = []
|
|
55
|
+
past_review = 0
|
|
56
|
+
|
|
57
|
+
for item in items:
|
|
58
|
+
if item.get("type") != "project":
|
|
59
|
+
continue
|
|
60
|
+
|
|
61
|
+
updated = item.get("updated_at")
|
|
62
|
+
if updated:
|
|
63
|
+
try:
|
|
64
|
+
updated_date = datetime.fromisoformat(updated).date()
|
|
65
|
+
age = (now - updated_date).days
|
|
66
|
+
except (ValueError, TypeError):
|
|
67
|
+
age = 0
|
|
68
|
+
else:
|
|
69
|
+
age = 0
|
|
70
|
+
|
|
71
|
+
status = item.get("status", "")
|
|
72
|
+
if status in active_statuses:
|
|
73
|
+
active_ages.append(age)
|
|
74
|
+
elif status == "backlog":
|
|
75
|
+
backlog_ages.append(age)
|
|
76
|
+
|
|
77
|
+
# Check review_by dates
|
|
78
|
+
review_by = item.get("review_by")
|
|
79
|
+
if review_by:
|
|
80
|
+
try:
|
|
81
|
+
review_date = date.fromisoformat(review_by)
|
|
82
|
+
if now > review_date:
|
|
83
|
+
past_review += 1
|
|
84
|
+
except (ValueError, TypeError):
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
indicators["active_age_days"] = max(active_ages) if active_ages else 0
|
|
88
|
+
indicators["backlog_age_days"] = max(backlog_ages) if backlog_ages else 0
|
|
89
|
+
indicators["backlog_past_review"] = past_review
|
|
90
|
+
|
|
91
|
+
except (json.JSONDecodeError, OSError):
|
|
92
|
+
pass
|
|
93
|
+
|
|
94
|
+
# Stale satellites: check paul.json timestamps
|
|
95
|
+
satellites = state.get("satellites", {})
|
|
96
|
+
stale_sats = 0
|
|
97
|
+
for name, sat in satellites.items():
|
|
98
|
+
sat_path = WORKSPACE_ROOT / sat.get("path", "") / ".paul" / "paul.json"
|
|
99
|
+
if sat_path.exists():
|
|
100
|
+
try:
|
|
101
|
+
paul = json.loads(sat_path.read_text())
|
|
102
|
+
ts = paul.get("timestamps", {}).get("updated_at")
|
|
103
|
+
if ts:
|
|
104
|
+
updated_date = datetime.fromisoformat(ts).date()
|
|
105
|
+
if (now - updated_date).days > 14:
|
|
106
|
+
stale_sats += 1
|
|
107
|
+
except (json.JSONDecodeError, OSError, ValueError):
|
|
108
|
+
pass
|
|
109
|
+
indicators["stale_satellites"] = stale_sats
|
|
110
|
+
|
|
111
|
+
# Compute score as sum of indicators
|
|
112
|
+
score = sum(v for v in indicators.values() if isinstance(v, (int, float)))
|
|
113
|
+
|
|
114
|
+
# Write back to state
|
|
115
|
+
if "drift" not in state:
|
|
116
|
+
state["drift"] = {}
|
|
117
|
+
state["drift"]["score"] = score
|
|
118
|
+
state["drift"]["indicators"] = indicators
|
|
119
|
+
|
|
120
|
+
try:
|
|
121
|
+
state["last_modified"] = datetime.now().isoformat()
|
|
122
|
+
STATE_FILE.write_text(json.dumps(state, indent=2))
|
|
123
|
+
except OSError:
|
|
124
|
+
pass
|
|
125
|
+
|
|
126
|
+
return state
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
def main():
|
|
130
|
+
if not STATE_FILE.exists():
|
|
131
|
+
sys.exit(0)
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
state = json.loads(STATE_FILE.read_text())
|
|
135
|
+
except (json.JSONDecodeError, OSError):
|
|
136
|
+
sys.exit(0)
|
|
137
|
+
|
|
138
|
+
# Self-heal: recalculate drift from live data every session start
|
|
139
|
+
state = recalculate_drift(state)
|
|
140
|
+
|
|
141
|
+
output_parts = []
|
|
142
|
+
now = date.today()
|
|
143
|
+
|
|
144
|
+
# Check groom overdue
|
|
145
|
+
groom = state.get("groom", {})
|
|
146
|
+
next_due = groom.get("next_groom_due")
|
|
147
|
+
if next_due:
|
|
148
|
+
try:
|
|
149
|
+
due_date = date.fromisoformat(next_due)
|
|
150
|
+
if now > due_date:
|
|
151
|
+
last_groom = groom.get("last_groom", "unknown")
|
|
152
|
+
overdue_days = (now - due_date).days
|
|
153
|
+
output_parts.append(
|
|
154
|
+
f"BASE: Workspace groom overdue by {overdue_days} days "
|
|
155
|
+
f"(last groom: {last_groom}). "
|
|
156
|
+
f"Run /base:groom to maintain workspace health."
|
|
157
|
+
)
|
|
158
|
+
except ValueError:
|
|
159
|
+
pass
|
|
160
|
+
|
|
161
|
+
# Drift score and stale areas
|
|
162
|
+
drift = state.get("drift", {})
|
|
163
|
+
drift_score = drift.get("score", 0)
|
|
164
|
+
areas = state.get("areas", {})
|
|
165
|
+
stale_areas = [name for name, area in areas.items() if area.get("status") in ("stale", "critical")]
|
|
166
|
+
|
|
167
|
+
if stale_areas:
|
|
168
|
+
output_parts.append(
|
|
169
|
+
f"BASE drift score: {drift_score} | Stale areas: {', '.join(stale_areas)}"
|
|
170
|
+
)
|
|
171
|
+
elif drift_score == 0:
|
|
172
|
+
last_groom = groom.get("last_groom", "unknown")
|
|
173
|
+
output_parts.append(
|
|
174
|
+
f"BASE: Drift 0 | Last groom: {last_groom} | All areas current"
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
# CARL hygiene reminder
|
|
178
|
+
carl_hygiene = state.get("carl_hygiene", {})
|
|
179
|
+
if carl_hygiene.get("proactive", False):
|
|
180
|
+
hygiene_cadence = {"weekly": 7, "bi-weekly": 14, "monthly": 30}.get(
|
|
181
|
+
carl_hygiene.get("cadence", "monthly"), 30
|
|
182
|
+
)
|
|
183
|
+
last_run = carl_hygiene.get("last_run")
|
|
184
|
+
if last_run:
|
|
185
|
+
try:
|
|
186
|
+
last_run_date = date.fromisoformat(last_run)
|
|
187
|
+
days_since = (now - last_run_date).days
|
|
188
|
+
if days_since > hygiene_cadence:
|
|
189
|
+
output_parts.append(
|
|
190
|
+
f"CARL hygiene overdue ({days_since}d since last run). Run /base:carl-hygiene"
|
|
191
|
+
)
|
|
192
|
+
except ValueError:
|
|
193
|
+
output_parts.append("CARL hygiene: last_run date invalid. Run /base:carl-hygiene")
|
|
194
|
+
else:
|
|
195
|
+
output_parts.append("CARL hygiene never run. Run /base:carl-hygiene when ready")
|
|
196
|
+
|
|
197
|
+
# Check staging proposals in carl.json
|
|
198
|
+
if CARL_JSON.exists():
|
|
199
|
+
try:
|
|
200
|
+
carl_data = json.loads(CARL_JSON.read_text())
|
|
201
|
+
pending = [p for p in carl_data.get("staging", []) if p.get("status") == "pending"]
|
|
202
|
+
if pending:
|
|
203
|
+
output_parts[-1] += f" | {len(pending)} staged proposals pending"
|
|
204
|
+
except (json.JSONDecodeError, OSError):
|
|
205
|
+
pass
|
|
206
|
+
|
|
207
|
+
if output_parts:
|
|
208
|
+
print(f"""<base-pulse>
|
|
209
|
+
{chr(10).join(output_parts)}
|
|
210
|
+
</base-pulse>""")
|
|
211
|
+
|
|
212
|
+
sys.exit(0)
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
if __name__ == "__main__":
|
|
216
|
+
main()
|