agileflow 2.44.0 → 2.45.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,116 @@
1
+ #!/bin/bash
2
+
3
+ # compress-status.sh
4
+ # Removes verbose fields from status.json to reduce file size
5
+ # Keeps only essential tracking metadata
6
+
7
+ set -e
8
+
9
+ # Colors for output
10
+ RED='\033[0;31m'
11
+ GREEN='\033[0;32m'
12
+ YELLOW='\033[1;33m'
13
+ BLUE='\033[0;34m'
14
+ NC='\033[0m' # No Color
15
+
16
+ # Default paths (relative to project root)
17
+ DOCS_DIR="docs"
18
+ STATUS_FILE="$DOCS_DIR/09-agents/status.json"
19
+
20
+ # Find project root (directory containing .agileflow)
21
+ PROJECT_ROOT="$(pwd)"
22
+ while [[ ! -d "$PROJECT_ROOT/.agileflow" ]] && [[ "$PROJECT_ROOT" != "/" ]]; do
23
+ PROJECT_ROOT="$(dirname "$PROJECT_ROOT")"
24
+ done
25
+
26
+ if [[ "$PROJECT_ROOT" == "/" ]]; then
27
+ echo -e "${RED}Error: Not in an AgileFlow project (no .agileflow directory found)${NC}"
28
+ exit 1
29
+ fi
30
+
31
+ # Update paths to absolute
32
+ STATUS_FILE="$PROJECT_ROOT/$STATUS_FILE"
33
+
34
+ # Check if status.json exists
35
+ if [[ ! -f "$STATUS_FILE" ]]; then
36
+ echo -e "${YELLOW}No status.json found at $STATUS_FILE${NC}"
37
+ exit 0
38
+ fi
39
+
40
+ echo -e "${BLUE}Compressing status.json...${NC}"
41
+
42
+ # Get original size
43
+ ORIGINAL_SIZE=$(wc -c < "$STATUS_FILE")
44
+
45
+ # Compress using Node.js
46
+ if command -v node &> /dev/null; then
47
+ STATUS_FILE="$STATUS_FILE" node <<'EOF'
48
+ const fs = require('fs');
49
+
50
+ const statusFile = process.env.STATUS_FILE;
51
+
52
+ // Read status.json
53
+ const status = JSON.parse(fs.readFileSync(statusFile, 'utf8'));
54
+ const stories = status.stories || {};
55
+
56
+ // Fields to keep (essential tracking metadata only)
57
+ const KEEP_FIELDS = [
58
+ 'id',
59
+ 'title',
60
+ 'status',
61
+ 'owner',
62
+ 'created_at',
63
+ 'updated_at',
64
+ 'completed_at',
65
+ 'epic',
66
+ 'dependencies',
67
+ 'blocked_by',
68
+ 'blocks',
69
+ 'pr_url',
70
+ 'test_status',
71
+ 'priority',
72
+ 'tags'
73
+ ];
74
+
75
+ // Compress each story
76
+ let removedFields = 0;
77
+ const compressed = {};
78
+
79
+ for (const [storyId, story] of Object.entries(stories)) {
80
+ compressed[storyId] = {};
81
+
82
+ for (const field of KEEP_FIELDS) {
83
+ if (story[field] !== undefined) {
84
+ compressed[storyId][field] = story[field];
85
+ }
86
+ }
87
+
88
+ const originalFieldCount = Object.keys(story).length;
89
+ const compressedFieldCount = Object.keys(compressed[storyId]).length;
90
+ removedFields += (originalFieldCount - compressedFieldCount);
91
+ }
92
+
93
+ // Update status.json
94
+ status.stories = compressed;
95
+ status.updated = new Date().toISOString();
96
+ fs.writeFileSync(statusFile, JSON.stringify(status, null, 2));
97
+
98
+ console.log(`\x1b[32m✓ Removed ${removedFields} verbose fields\x1b[0m`);
99
+ console.log(`\x1b[34mStories processed: ${Object.keys(compressed).length}\x1b[0m`);
100
+ EOF
101
+
102
+ # Get new size
103
+ NEW_SIZE=$(wc -c < "$STATUS_FILE")
104
+ SAVED=$((ORIGINAL_SIZE - NEW_SIZE))
105
+ PERCENT=$((SAVED * 100 / ORIGINAL_SIZE))
106
+
107
+ echo -e "${GREEN}Compression complete!${NC}"
108
+ echo -e "${BLUE}Original size: ${ORIGINAL_SIZE} bytes${NC}"
109
+ echo -e "${BLUE}New size: ${NEW_SIZE} bytes${NC}"
110
+ echo -e "${BLUE}Saved: ${SAVED} bytes (${PERCENT}%)${NC}"
111
+ else
112
+ echo -e "${RED}Error: Node.js not found. Cannot compress status.json.${NC}"
113
+ exit 1
114
+ fi
115
+
116
+ exit 0
@@ -0,0 +1,264 @@
1
+ #!/bin/bash
2
+ #
3
+ # expertise-metrics.sh - Metrics dashboard for Agent Expert system
4
+ #
5
+ # Purpose: Track the health and activity of Agent Expert expertise files
6
+ #
7
+ # Metrics tracked:
8
+ # 1. Total experts count
9
+ # 2. Experts with learnings (self-improved at least once)
10
+ # 3. Average file size (lines)
11
+ # 4. Staleness distribution (how old are the files)
12
+ # 5. Recent activity (updated in last 7 days)
13
+ #
14
+ # Usage:
15
+ # ./scripts/expertise-metrics.sh # Show metrics dashboard
16
+ # ./scripts/expertise-metrics.sh --json # Output as JSON (for logging)
17
+ # ./scripts/expertise-metrics.sh --csv # Output as CSV
18
+ # ./scripts/expertise-metrics.sh --help # Show help
19
+ #
20
+
21
+ set -e
22
+
23
+ # Configuration
24
+ EXPERTS_DIR="packages/cli/src/core/experts"
25
+
26
+ # Colors
27
+ BLUE='\033[0;34m'
28
+ GREEN='\033[0;32m'
29
+ YELLOW='\033[1;33m'
30
+ NC='\033[0m'
31
+
32
+ # Help message
33
+ show_help() {
34
+ echo "Usage: $0 [--json | --csv | --help]"
35
+ echo ""
36
+ echo "Metrics dashboard for Agent Expert expertise files"
37
+ echo ""
38
+ echo "Options:"
39
+ echo " --json Output metrics as JSON"
40
+ echo " --csv Output metrics as CSV"
41
+ echo " --help Show this help message"
42
+ echo ""
43
+ echo "Metrics tracked:"
44
+ echo " - Total experts count"
45
+ echo " - Experts with learnings (self-improved)"
46
+ echo " - Average file size (lines)"
47
+ echo " - Staleness distribution"
48
+ echo " - Recent activity (last 7 days)"
49
+ }
50
+
51
+ # Extract YAML field
52
+ get_yaml_field() {
53
+ local file="$1"
54
+ local field="$2"
55
+ grep "^${field}:" "$file" 2>/dev/null | sed "s/^${field}:[[:space:]]*//" | tr -d '"' || echo ""
56
+ }
57
+
58
+ # Check if learnings is empty
59
+ has_learnings() {
60
+ local file="$1"
61
+ # Check for non-empty learnings
62
+ if grep -q "^learnings: \[\]" "$file" 2>/dev/null; then
63
+ return 1 # Empty
64
+ fi
65
+ # Check if there's actual content after learnings:
66
+ local after_learnings
67
+ after_learnings=$(sed -n '/^learnings:/,/^[a-z]/p' "$file" | grep -v "^#" | grep -v "^learnings:" | grep -v "^$" | grep "^ -" | head -1)
68
+ if [ -n "$after_learnings" ]; then
69
+ return 0 # Has content
70
+ fi
71
+ return 1 # Empty
72
+ }
73
+
74
+ # Get file line count
75
+ get_line_count() {
76
+ wc -l < "$1" | tr -d ' '
77
+ }
78
+
79
+ # Calculate days since date
80
+ days_since() {
81
+ local date_str="$1"
82
+ local date_epoch now_epoch
83
+
84
+ if [[ "$date_str" =~ ^[0-9]{4}-[0-9]{2}-[0-9]{2}$ ]]; then
85
+ date_epoch=$(date -d "$date_str" +%s 2>/dev/null || date -j -f "%Y-%m-%d" "$date_str" +%s 2>/dev/null)
86
+ else
87
+ echo "999"
88
+ return
89
+ fi
90
+
91
+ now_epoch=$(date +%s)
92
+ echo $(( (now_epoch - date_epoch) / 86400 ))
93
+ }
94
+
95
+ # Count learnings entries
96
+ count_learnings() {
97
+ local file="$1"
98
+ local count
99
+ # Count lines starting with " - date:" in learnings section
100
+ count=$(grep -c "^ - date:" "$file" 2>/dev/null) || count=0
101
+ echo "$count"
102
+ }
103
+
104
+ # Main collection
105
+ collect_metrics() {
106
+ local script_dir
107
+ script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
108
+ cd "$script_dir/.."
109
+
110
+ # Initialize counters
111
+ local total=0
112
+ local with_learnings=0
113
+ local total_lines=0
114
+ local recent_updates=0
115
+ local stale_7d=0
116
+ local stale_30d=0
117
+ local stale_90d=0
118
+ local total_learnings=0
119
+
120
+ # Collect data
121
+ local domains=()
122
+ local details=()
123
+
124
+ for dir in "$EXPERTS_DIR"/*/; do
125
+ local domain
126
+ domain=$(basename "$dir")
127
+ [ "$domain" = "templates" ] && continue
128
+
129
+ local file="$dir/expertise.yaml"
130
+ [ ! -f "$file" ] && continue
131
+
132
+ total=$((total + 1))
133
+ domains+=("$domain")
134
+
135
+ # File size
136
+ local lines
137
+ lines=$(get_line_count "$file")
138
+ total_lines=$((total_lines + lines))
139
+
140
+ # Learnings
141
+ local learnings_count
142
+ learnings_count=$(count_learnings "$file")
143
+ total_learnings=$((total_learnings + learnings_count))
144
+ if [ "$learnings_count" -gt 0 ]; then
145
+ with_learnings=$((with_learnings + 1))
146
+ fi
147
+
148
+ # Staleness
149
+ local last_updated days_old
150
+ last_updated=$(get_yaml_field "$file" "last_updated")
151
+ days_old=$(days_since "$last_updated")
152
+
153
+ if [ "$days_old" -le 7 ]; then
154
+ recent_updates=$((recent_updates + 1))
155
+ elif [ "$days_old" -le 30 ]; then
156
+ stale_7d=$((stale_7d + 1))
157
+ elif [ "$days_old" -le 90 ]; then
158
+ stale_30d=$((stale_30d + 1))
159
+ else
160
+ stale_90d=$((stale_90d + 1))
161
+ fi
162
+
163
+ # Store detail
164
+ details+=("$domain,$lines,$learnings_count,$days_old")
165
+ done
166
+
167
+ # Calculate averages
168
+ local avg_lines=0
169
+ if [ "$total" -gt 0 ]; then
170
+ avg_lines=$((total_lines / total))
171
+ fi
172
+
173
+ # Output based on format
174
+ local format="${1:-dashboard}"
175
+ local timestamp
176
+ timestamp=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
177
+
178
+ case "$format" in
179
+ json)
180
+ echo "{"
181
+ echo " \"timestamp\": \"$timestamp\","
182
+ echo " \"total_experts\": $total,"
183
+ echo " \"with_learnings\": $with_learnings,"
184
+ echo " \"self_improve_rate\": $(awk -v w="$with_learnings" -v t="$total" 'BEGIN {printf "%.1f", (w / t) * 100}'),"
185
+ echo " \"total_learnings\": $total_learnings,"
186
+ echo " \"avg_file_lines\": $avg_lines,"
187
+ echo " \"staleness\": {"
188
+ echo " \"recent_7d\": $recent_updates,"
189
+ echo " \"stale_8_30d\": $stale_7d,"
190
+ echo " \"stale_31_90d\": $stale_30d,"
191
+ echo " \"stale_90d_plus\": $stale_90d"
192
+ echo " }"
193
+ echo "}"
194
+ ;;
195
+ csv)
196
+ local csv_rate
197
+ csv_rate=$(awk -v w="$with_learnings" -v t="$total" 'BEGIN {printf "%.1f", (w / t) * 100}')
198
+ echo "timestamp,total_experts,with_learnings,self_improve_rate,total_learnings,avg_file_lines,recent_7d,stale_8_30d,stale_31_90d,stale_90d_plus"
199
+ echo "$timestamp,$total,$with_learnings,$csv_rate,$total_learnings,$avg_lines,$recent_updates,$stale_7d,$stale_30d,$stale_90d"
200
+ ;;
201
+ dashboard)
202
+ echo ""
203
+ echo -e "${BLUE}╔════════════════════════════════════════════════╗${NC}"
204
+ echo -e "${BLUE}║ Agent Expert Metrics Dashboard ║${NC}"
205
+ echo -e "${BLUE}╚════════════════════════════════════════════════╝${NC}"
206
+ echo ""
207
+ echo -e "${GREEN}Summary${NC}"
208
+ echo "─────────────────────────────────────────────────"
209
+ printf "%-30s %s\n" "Total Experts:" "$total"
210
+ local pct
211
+ pct=$(awk -v w="$with_learnings" -v t="$total" 'BEGIN {printf "%.0f", (w / t) * 100}')
212
+ printf "%-30s %s (%s%%)\n" "With Learnings:" "$with_learnings" "$pct"
213
+ printf "%-30s %s\n" "Total Learnings Recorded:" "$total_learnings"
214
+ printf "%-30s %s lines\n" "Avg File Size:" "$avg_lines"
215
+ echo ""
216
+ echo -e "${YELLOW}Staleness Distribution${NC}"
217
+ echo "─────────────────────────────────────────────────"
218
+ printf "%-30s %s\n" "Updated in last 7 days:" "$recent_updates"
219
+ printf "%-30s %s\n" "8-30 days old:" "$stale_7d"
220
+ printf "%-30s %s\n" "31-90 days old:" "$stale_30d"
221
+ printf "%-30s %s\n" "90+ days old:" "$stale_90d"
222
+ echo ""
223
+ echo -e "${BLUE}Self-Improve Health${NC}"
224
+ echo "─────────────────────────────────────────────────"
225
+ if [ "$with_learnings" -eq 0 ]; then
226
+ echo "No agents have self-improved yet"
227
+ echo " Run agents and they will update expertise.yaml"
228
+ elif [ "$with_learnings" -lt $((total / 2)) ]; then
229
+ local rate
230
+ rate=$(awk -v w="$with_learnings" -v t="$total" 'BEGIN {printf "%.0f", (w / t) * 100}')
231
+ echo "$with_learnings/$total agents have started learning"
232
+ echo " Self-improve rate: ${rate}%"
233
+ else
234
+ local avg
235
+ avg=$(awk -v l="$total_learnings" -v t="$total" 'BEGIN {printf "%.1f", l / t}')
236
+ echo "Good adoption: $with_learnings/$total agents learning"
237
+ echo " Average learnings per expert: $avg"
238
+ fi
239
+ echo ""
240
+ echo "Generated: $timestamp"
241
+ ;;
242
+ esac
243
+ }
244
+
245
+ # Main
246
+ main() {
247
+ case "$1" in
248
+ --help|-h)
249
+ show_help
250
+ exit 0
251
+ ;;
252
+ --json)
253
+ collect_metrics "json"
254
+ ;;
255
+ --csv)
256
+ collect_metrics "csv"
257
+ ;;
258
+ *)
259
+ collect_metrics "dashboard"
260
+ ;;
261
+ esac
262
+ }
263
+
264
+ main "$@"
@@ -0,0 +1,209 @@
1
+ #!/usr/bin/env node
2
+
3
+ /**
4
+ * get-env.js - Helper script to output environment information
5
+ *
6
+ * This script can be called from hooks or other automation to get
7
+ * consistent environment information about the AgileFlow project.
8
+ *
9
+ * Usage:
10
+ * node scripts/get-env.js [--json] [--compact]
11
+ *
12
+ * Flags:
13
+ * --json Output as JSON
14
+ * --compact Minimal output for status line
15
+ */
16
+
17
+ const fs = require('fs');
18
+ const path = require('path');
19
+ const os = require('os');
20
+ const { execSync } = require('child_process');
21
+
22
+ function getProjectInfo() {
23
+ const rootDir = path.resolve(__dirname, '..');
24
+
25
+ // Read package.json files
26
+ let cliPackage = {};
27
+ let rootPackage = {};
28
+
29
+ try {
30
+ cliPackage = JSON.parse(
31
+ fs.readFileSync(path.join(rootDir, 'packages/cli/package.json'), 'utf8')
32
+ );
33
+ } catch (err) {
34
+ // Ignore if not found
35
+ }
36
+
37
+ try {
38
+ rootPackage = JSON.parse(
39
+ fs.readFileSync(path.join(rootDir, 'package.json'), 'utf8')
40
+ );
41
+ } catch (err) {
42
+ // Ignore if not found
43
+ }
44
+
45
+ // Get git info
46
+ let gitBranch = 'unknown';
47
+ let gitCommit = 'unknown';
48
+ let recentCommits = [];
49
+
50
+ try {
51
+ gitBranch = execSync('git branch --show-current', {
52
+ cwd: rootDir,
53
+ encoding: 'utf8'
54
+ }).trim();
55
+ gitCommit = execSync('git rev-parse --short HEAD', {
56
+ cwd: rootDir,
57
+ encoding: 'utf8'
58
+ }).trim();
59
+
60
+ // Get recent commits (last 5)
61
+ const commitLog = execSync('git log --oneline -5 2>/dev/null', {
62
+ cwd: rootDir,
63
+ encoding: 'utf8'
64
+ }).trim();
65
+ recentCommits = commitLog.split('\n').filter(Boolean);
66
+ } catch (err) {
67
+ // Ignore if git not available
68
+ }
69
+
70
+ // Get AgileFlow status info
71
+ let activeStories = [];
72
+ let wipCount = 0;
73
+ let blockedCount = 0;
74
+ let activeEpics = [];
75
+
76
+ try {
77
+ const statusPath = path.join(rootDir, 'docs/09-agents/status.json');
78
+ if (fs.existsSync(statusPath)) {
79
+ const status = JSON.parse(fs.readFileSync(statusPath, 'utf8'));
80
+
81
+ // Get active stories
82
+ if (status.stories) {
83
+ Object.entries(status.stories).forEach(([id, story]) => {
84
+ if (story.status === 'in_progress') {
85
+ activeStories.push({ id, title: story.title, owner: story.owner });
86
+ wipCount++;
87
+ }
88
+ if (story.status === 'blocked') {
89
+ blockedCount++;
90
+ }
91
+ });
92
+ }
93
+
94
+ // Get active epics
95
+ if (status.epics) {
96
+ Object.entries(status.epics).forEach(([id, epic]) => {
97
+ if (epic.status !== 'complete') {
98
+ activeEpics.push({ id, title: epic.title });
99
+ }
100
+ });
101
+ }
102
+ }
103
+ } catch (err) {
104
+ // Ignore if status.json not available
105
+ }
106
+
107
+ return {
108
+ project: {
109
+ name: cliPackage.name || rootPackage.name || 'AgileFlow',
110
+ version: cliPackage.version || rootPackage.version || 'unknown',
111
+ description: cliPackage.description || rootPackage.description || '',
112
+ rootDir: rootDir,
113
+ },
114
+ git: {
115
+ branch: gitBranch,
116
+ commit: gitCommit,
117
+ recentCommits: recentCommits,
118
+ },
119
+ agileflow: {
120
+ activeStories: activeStories,
121
+ wipCount: wipCount,
122
+ blockedCount: blockedCount,
123
+ activeEpics: activeEpics,
124
+ },
125
+ system: {
126
+ node: process.version,
127
+ platform: os.platform(),
128
+ arch: os.arch(),
129
+ hostname: os.hostname(),
130
+ user: os.userInfo().username,
131
+ },
132
+ timestamp: new Date().toISOString(),
133
+ };
134
+ }
135
+
136
+ function formatOutput(info, asJson = false, compact = false) {
137
+ if (asJson) {
138
+ return JSON.stringify(info, null, 2);
139
+ }
140
+
141
+ if (compact) {
142
+ // Minimal output for status line
143
+ const story = info.agileflow.activeStories[0];
144
+ const storyStr = story ? `${story.id}: ${story.title.substring(0, 30)}` : 'No active story';
145
+ return `[${info.git.branch}] ${storyStr} | WIP: ${info.agileflow.wipCount}`;
146
+ }
147
+
148
+ // ANSI colors (including brand color #e8683a as RGB)
149
+ const c = {
150
+ reset: '\x1b[0m',
151
+ bold: '\x1b[1m',
152
+ dim: '\x1b[2m',
153
+ green: '\x1b[32m',
154
+ yellow: '\x1b[33m',
155
+ blue: '\x1b[34m',
156
+ cyan: '\x1b[36m',
157
+ red: '\x1b[31m',
158
+ brand: '\x1b[38;2;232;104;58m', // #e8683a - AgileFlow brand orange
159
+ };
160
+
161
+ // Beautiful compact colorful format
162
+ const lines = [];
163
+
164
+ // Header line with project info (brand color name, dim version, colored branch)
165
+ const branchColor = info.git.branch === 'main' ? c.green : c.cyan;
166
+ lines.push(`${c.brand}${c.bold}${info.project.name}${c.reset} ${c.dim}v${info.project.version}${c.reset} | ${branchColor}${info.git.branch}${c.reset} ${c.dim}(${info.git.commit})${c.reset}`);
167
+
168
+ // Status line (yellow WIP, red blocked)
169
+ const wipColor = info.agileflow.wipCount > 0 ? c.yellow : c.dim;
170
+ let statusLine = info.agileflow.wipCount > 0
171
+ ? `${wipColor}WIP: ${info.agileflow.wipCount}${c.reset}`
172
+ : `${c.dim}No active work${c.reset}`;
173
+ if (info.agileflow.blockedCount > 0) {
174
+ statusLine += ` | ${c.red}Blocked: ${info.agileflow.blockedCount}${c.reset}`;
175
+ }
176
+ lines.push(statusLine);
177
+
178
+ // Active story (if any) - just the first one (blue label)
179
+ if (info.agileflow.activeStories.length > 0) {
180
+ const story = info.agileflow.activeStories[0];
181
+ lines.push(`${c.blue}Current:${c.reset} ${story.id} - ${story.title}`);
182
+ }
183
+
184
+ // Last commit (just one, dim)
185
+ if (info.git.recentCommits.length > 0) {
186
+ lines.push(`${c.dim}Last: ${info.git.recentCommits[0]}${c.reset}`);
187
+ }
188
+
189
+ return lines.join('\n');
190
+ }
191
+
192
+ // Main execution
193
+ if (require.main === module) {
194
+ const args = process.argv.slice(2);
195
+ const asJson = args.includes('--json');
196
+ const compact = args.includes('--compact');
197
+
198
+ try {
199
+ const info = getProjectInfo();
200
+ console.log(formatOutput(info, asJson, compact));
201
+ process.exit(0);
202
+ } catch (err) {
203
+ console.error('Error getting environment info:', err.message);
204
+ process.exit(1);
205
+ }
206
+ }
207
+
208
+ // Export for use as module
209
+ module.exports = { getProjectInfo, formatOutput };