claude-flow-novice 2.10.8 → 2.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/README.md +157 -0
- package/.claude/hooks/cfn-invoke-post-edit.sh +1 -1
- package/.claude/hooks/cfn-invoke-pre-edit.sh +88 -0
- package/.claude/skills/cfn-agent-spawning/spawn-worker.sh +176 -0
- package/.claude/skills/cfn-backlog-management/SKILL.md +199 -0
- package/.claude/skills/cfn-backlog-management/add-backlog-item.sh +210 -0
- package/claude-assets/agents/cfn-dev-team/architecture/base-template-generator.md +7 -21
- package/claude-assets/agents/cfn-dev-team/testing/test-validation-agent.md +312 -0
- package/claude-assets/agents/cfn-dev-team/utility/agent-builder.md +480 -115
- package/claude-assets/agents/csuite/cto-agent.md +371 -0
- package/claude-assets/agents/marketing_hybrid/cost_tracker.md +13 -0
- package/claude-assets/agents/marketing_hybrid/docker_deployer.md +13 -0
- package/claude-assets/agents/marketing_hybrid/zai_worker_spawner.md +13 -0
- package/claude-assets/hooks/cfn-invoke-post-edit.sh +1 -1
- package/claude-assets/hooks/cfn-invoke-pre-edit.sh +88 -0
- package/claude-assets/skills/cfn-agent-spawning/spawn-worker.sh +176 -0
- package/claude-assets/skills/cfn-backlog-management/SKILL.md +199 -0
- package/claude-assets/skills/cfn-backlog-management/add-backlog-item.sh +210 -0
- package/claude-assets/skills/pre-edit-backup/backup.sh +130 -0
- package/claude-assets/skills/pre-edit-backup/cleanup.sh +155 -0
- package/claude-assets/skills/pre-edit-backup/restore.sh +128 -0
- package/claude-assets/skills/pre-edit-backup/revert-file.sh +168 -0
- package/dist/agents/agent-loader.js +315 -0
- package/dist/agents/agent-loader.js.map +1 -1
- package/package.json +3 -2
- package/scripts/init-project.js +38 -3
- package/scripts/marketing_hybrid_deployment.sh +45 -0
- package/scripts/redis-prometheus-exporter.sh +33 -0
- package/scripts/track-zai-costs.sh +19 -0
- package/claude-assets/commands/agents/README.md +0 -10
- package/claude-assets/commands/agents/agent-capabilities.md +0 -21
- package/claude-assets/commands/agents/agent-coordination.md +0 -28
- package/claude-assets/commands/agents/agent-spawning.md +0 -28
- package/claude-assets/commands/agents/agent-types.md +0 -26
- package/claude-assets/commands/analysis/COMMAND_COMPLIANCE_REPORT.md +0 -54
- package/claude-assets/commands/analysis/README.md +0 -9
- package/claude-assets/commands/analysis/bottleneck-detect.md +0 -162
- package/claude-assets/commands/analysis/performance-bottlenecks.md +0 -59
- package/claude-assets/commands/analysis/performance-report.md +0 -25
- package/claude-assets/commands/analysis/token-efficiency.md +0 -45
- package/claude-assets/commands/analysis/token-usage.md +0 -25
- package/claude-assets/commands/automation/README.md +0 -9
- package/claude-assets/commands/automation/auto-agent.md +0 -122
- package/claude-assets/commands/automation/self-healing.md +0 -106
- package/claude-assets/commands/automation/session-memory.md +0 -90
- package/claude-assets/commands/automation/smart-agents.md +0 -73
- package/claude-assets/commands/automation/smart-spawn.md +0 -25
- package/claude-assets/commands/automation/workflow-select.md +0 -25
- package/claude-assets/commands/claude-md.js +0 -237
- package/claude-assets/commands/claude-soul.js +0 -28
- package/claude-assets/commands/cli-integration.js +0 -216
- package/claude-assets/commands/coordination/README.md +0 -9
- package/claude-assets/commands/coordination/agent-spawn.md +0 -25
- package/claude-assets/commands/coordination/coordination-system.md +0 -88
- package/claude-assets/commands/coordination/init.md +0 -44
- package/claude-assets/commands/coordination/orchestrate.md +0 -43
- package/claude-assets/commands/coordination/spawn.md +0 -45
- package/claude-assets/commands/coordination/swarm-init.md +0 -85
- package/claude-assets/commands/coordination/task-orchestrate.md +0 -25
- package/claude-assets/commands/github/README.md +0 -11
- package/claude-assets/commands/github/code-review-swarm.md +0 -514
- package/claude-assets/commands/github/code-review.md +0 -25
- package/claude-assets/commands/github/github-modes.md +0 -147
- package/claude-assets/commands/github/github-swarm.md +0 -121
- package/claude-assets/commands/github/issue-tracker.md +0 -292
- package/claude-assets/commands/github/issue-triage.md +0 -25
- package/claude-assets/commands/github/multi-repo-swarm.md +0 -519
- package/claude-assets/commands/github/pr-enhance.md +0 -26
- package/claude-assets/commands/github/pr-manager.md +0 -170
- package/claude-assets/commands/github/project-board-sync.md +0 -471
- package/claude-assets/commands/github/release-manager.md +0 -338
- package/claude-assets/commands/github/release-swarm.md +0 -544
- package/claude-assets/commands/github/repo-analyze.md +0 -25
- package/claude-assets/commands/github/repo-architect.md +0 -367
- package/claude-assets/commands/github/swarm-issue.md +0 -482
- package/claude-assets/commands/github/swarm-pr.md +0 -285
- package/claude-assets/commands/github/sync-coordinator.md +0 -301
- package/claude-assets/commands/github/workflow-automation.md +0 -442
- package/claude-assets/commands/github.js +0 -638
- package/claude-assets/commands/hive-mind/README.md +0 -17
- package/claude-assets/commands/hive-mind/hive-mind-consensus.md +0 -8
- package/claude-assets/commands/hive-mind/hive-mind-init.md +0 -18
- package/claude-assets/commands/hive-mind/hive-mind-memory.md +0 -8
- package/claude-assets/commands/hive-mind/hive-mind-metrics.md +0 -8
- package/claude-assets/commands/hive-mind/hive-mind-resume.md +0 -8
- package/claude-assets/commands/hive-mind/hive-mind-sessions.md +0 -8
- package/claude-assets/commands/hive-mind/hive-mind-spawn.md +0 -21
- package/claude-assets/commands/hive-mind/hive-mind-status.md +0 -8
- package/claude-assets/commands/hive-mind/hive-mind-stop.md +0 -8
- package/claude-assets/commands/hive-mind/hive-mind-wizard.md +0 -8
- package/claude-assets/commands/hive-mind/hive-mind.md +0 -27
- package/claude-assets/commands/hooks/README.md +0 -11
- package/claude-assets/commands/hooks/overview.md +0 -58
- package/claude-assets/commands/hooks/post-edit.md +0 -117
- package/claude-assets/commands/hooks/post-task.md +0 -112
- package/claude-assets/commands/hooks/pre-edit.md +0 -113
- package/claude-assets/commands/hooks/pre-task.md +0 -111
- package/claude-assets/commands/hooks/session-end.md +0 -118
- package/claude-assets/commands/hooks/session-start.md +0 -9
- package/claude-assets/commands/hooks/setup.md +0 -103
- package/claude-assets/commands/hooks.js +0 -651
- package/claude-assets/commands/index.js +0 -119
- package/claude-assets/commands/memory/README.md +0 -9
- package/claude-assets/commands/memory/memory-bank.md +0 -58
- package/claude-assets/commands/memory/memory-persist.md +0 -25
- package/claude-assets/commands/memory/memory-search.md +0 -25
- package/claude-assets/commands/memory/memory-usage.md +0 -25
- package/claude-assets/commands/memory/neural.md +0 -47
- package/claude-assets/commands/memory/usage.md +0 -46
- package/claude-assets/commands/monitoring/README.md +0 -9
- package/claude-assets/commands/monitoring/agent-metrics.md +0 -25
- package/claude-assets/commands/monitoring/agents.md +0 -44
- package/claude-assets/commands/monitoring/real-time-view.md +0 -25
- package/claude-assets/commands/monitoring/status.md +0 -46
- package/claude-assets/commands/monitoring/swarm-monitor.md +0 -25
- package/claude-assets/commands/neural.js +0 -572
- package/claude-assets/commands/optimization/README.md +0 -9
- package/claude-assets/commands/optimization/auto-topology.md +0 -62
- package/claude-assets/commands/optimization/cache-manage.md +0 -25
- package/claude-assets/commands/optimization/parallel-execute.md +0 -25
- package/claude-assets/commands/optimization/parallel-execution.md +0 -50
- package/claude-assets/commands/optimization/topology-optimize.md +0 -25
- package/claude-assets/commands/pair/README.md +0 -261
- package/claude-assets/commands/pair/commands.md +0 -546
- package/claude-assets/commands/pair/config.md +0 -510
- package/claude-assets/commands/pair/examples.md +0 -512
- package/claude-assets/commands/pair/modes.md +0 -348
- package/claude-assets/commands/pair/session.md +0 -407
- package/claude-assets/commands/pair/start.md +0 -209
- package/claude-assets/commands/parse-epic.js +0 -180
- package/claude-assets/commands/performance.js +0 -582
- package/claude-assets/commands/register-all-commands.js +0 -320
- package/claude-assets/commands/register-claude-md.js +0 -82
- package/claude-assets/commands/register-claude-soul.js +0 -80
- package/claude-assets/commands/sparc/analyzer.md +0 -52
- package/claude-assets/commands/sparc/architect.md +0 -53
- package/claude-assets/commands/sparc/batch-executor.md +0 -54
- package/claude-assets/commands/sparc/coder.md +0 -54
- package/claude-assets/commands/sparc/debugger.md +0 -54
- package/claude-assets/commands/sparc/designer.md +0 -53
- package/claude-assets/commands/sparc/documenter.md +0 -54
- package/claude-assets/commands/sparc/innovator.md +0 -54
- package/claude-assets/commands/sparc/memory-manager.md +0 -54
- package/claude-assets/commands/sparc/optimizer.md +0 -54
- package/claude-assets/commands/sparc/orchestrator.md +0 -132
- package/claude-assets/commands/sparc/researcher.md +0 -54
- package/claude-assets/commands/sparc/reviewer.md +0 -54
- package/claude-assets/commands/sparc/sparc-modes.md +0 -174
- package/claude-assets/commands/sparc/swarm-coordinator.md +0 -54
- package/claude-assets/commands/sparc/tdd.md +0 -54
- package/claude-assets/commands/sparc/tester.md +0 -54
- package/claude-assets/commands/sparc/workflow-manager.md +0 -54
- package/claude-assets/commands/sparc.js +0 -110
- package/claude-assets/commands/stream-chain/pipeline.md +0 -121
- package/claude-assets/commands/stream-chain/run.md +0 -70
- package/claude-assets/commands/swarm/README.md +0 -15
- package/claude-assets/commands/swarm/analysis.md +0 -95
- package/claude-assets/commands/swarm/development.md +0 -96
- package/claude-assets/commands/swarm/examples.md +0 -168
- package/claude-assets/commands/swarm/maintenance.md +0 -102
- package/claude-assets/commands/swarm/optimization.md +0 -117
- package/claude-assets/commands/swarm/research.md +0 -136
- package/claude-assets/commands/swarm/swarm-analysis.md +0 -8
- package/claude-assets/commands/swarm/swarm-background.md +0 -8
- package/claude-assets/commands/swarm/swarm-init.md +0 -19
- package/claude-assets/commands/swarm/swarm-modes.md +0 -8
- package/claude-assets/commands/swarm/swarm-monitor.md +0 -8
- package/claude-assets/commands/swarm/swarm-spawn.md +0 -19
- package/claude-assets/commands/swarm/swarm-status.md +0 -8
- package/claude-assets/commands/swarm/swarm-strategies.md +0 -8
- package/claude-assets/commands/swarm/swarm.md +0 -27
- package/claude-assets/commands/swarm/testing.md +0 -131
- package/claude-assets/commands/swarm.js +0 -423
- package/claude-assets/commands/testing/playwright-e2e.md +0 -288
- package/claude-assets/commands/training/README.md +0 -9
- package/claude-assets/commands/training/model-update.md +0 -25
- package/claude-assets/commands/training/neural-patterns.md +0 -74
- package/claude-assets/commands/training/neural-train.md +0 -25
- package/claude-assets/commands/training/pattern-learn.md +0 -25
- package/claude-assets/commands/training/specialization.md +0 -63
- package/claude-assets/commands/truth/start.md +0 -143
- package/claude-assets/commands/validate-commands.js +0 -223
- package/claude-assets/commands/verify/check.md +0 -50
- package/claude-assets/commands/verify/start.md +0 -128
- package/claude-assets/commands/workflow.js +0 -606
- package/claude-assets/commands/workflows/README.md +0 -9
- package/claude-assets/commands/workflows/development.md +0 -78
- package/claude-assets/commands/workflows/research.md +0 -63
- package/claude-assets/commands/workflows/workflow-create.md +0 -25
- package/claude-assets/commands/workflows/workflow-execute.md +0 -25
- package/claude-assets/commands/workflows/workflow-export.md +0 -25
- package/claude-assets/hooks/post-edit.config.json +0 -12
- package/claude-assets/skills/team-provider-routing/spawn-worker.sh +0 -91
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
---
|
|
2
|
+
skill_id: cfn-backlog-management
|
|
3
|
+
name: CFN Backlog Management
|
|
4
|
+
version: 1.0.0
|
|
5
|
+
category: coordination
|
|
6
|
+
tags: [backlog, documentation, sprint-planning, technical-debt]
|
|
7
|
+
dependencies: []
|
|
8
|
+
---
|
|
9
|
+
|
|
10
|
+
# CFN Backlog Management Skill
|
|
11
|
+
|
|
12
|
+
## Purpose
|
|
13
|
+
Systematically capture and track backlogged items during CFN sprints to prevent work from being forgotten. Provides centralized documentation of deferred tasks with context, rationale, and proposed solutions.
|
|
14
|
+
|
|
15
|
+
## Problem Solved
|
|
16
|
+
During CFN Loop execution, agents frequently identify improvements, optimizations, or edge cases that should be addressed but are out of scope for the current sprint. Without systematic capture, these items are lost in chat history or forgotten entirely.
|
|
17
|
+
|
|
18
|
+
## When to Use
|
|
19
|
+
- **During CFN sprints** when identifying work that should be deferred
|
|
20
|
+
- **After consensus** when validators identify future improvements
|
|
21
|
+
- **During retrospectives** when documenting technical debt
|
|
22
|
+
- **Architecture reviews** when noting long-term refactoring needs
|
|
23
|
+
|
|
24
|
+
## Interface
|
|
25
|
+
|
|
26
|
+
### Primary Script: `add-backlog-item.sh`
|
|
27
|
+
|
|
28
|
+
**Required Parameters:**
|
|
29
|
+
- `--item`: Brief description of backlogged work (1-2 sentences)
|
|
30
|
+
- `--why`: Rationale for deferring (why not now?)
|
|
31
|
+
- `--solution`: Proposed implementation approach
|
|
32
|
+
|
|
33
|
+
**Optional Parameters:**
|
|
34
|
+
- `--sprint`: Sprint identifier (default: auto-detected from context)
|
|
35
|
+
- `--priority`: P0-P3 (default: P2)
|
|
36
|
+
- `--tags`: Comma-separated tags (e.g., "optimization,redis,testing")
|
|
37
|
+
- `--category`: Feature/Bug/Technical-Debt/Optimization (default: Technical-Debt)
|
|
38
|
+
|
|
39
|
+
**Usage:**
|
|
40
|
+
```bash
|
|
41
|
+
./.claude/skills/cfn-backlog-management/add-backlog-item.sh \
|
|
42
|
+
--sprint "Sprint 10" \
|
|
43
|
+
--item "Implement Redis connection pooling for multi-agent coordination" \
|
|
44
|
+
--why "Current single-connection model causes bottlenecks with 10+ agents, but Sprint 10 scope limited to 3-agent validation" \
|
|
45
|
+
--solution "Use ioredis library with configurable pool size (min: 5, max: 20). Add pool metrics to monitoring dashboard" \
|
|
46
|
+
--priority "P2" \
|
|
47
|
+
--tags "optimization,redis,performance" \
|
|
48
|
+
--category "Optimization"
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
### Output Location
|
|
52
|
+
All backlog items are appended to: `readme/BACKLOG.md`
|
|
53
|
+
|
|
54
|
+
## Backlog File Structure
|
|
55
|
+
|
|
56
|
+
```markdown
|
|
57
|
+
# Claude Flow Novice - Backlog
|
|
58
|
+
|
|
59
|
+
Last Updated: 2025-10-31
|
|
60
|
+
|
|
61
|
+
## Active Items
|
|
62
|
+
|
|
63
|
+
### P0 - Critical
|
|
64
|
+
[Items requiring immediate attention in next sprint]
|
|
65
|
+
|
|
66
|
+
### P1 - High Priority
|
|
67
|
+
[Items to address within 2-3 sprints]
|
|
68
|
+
|
|
69
|
+
### P2 - Medium Priority
|
|
70
|
+
[Items to address when capacity allows]
|
|
71
|
+
|
|
72
|
+
### P3 - Low Priority / Nice-to-Have
|
|
73
|
+
[Items for future consideration]
|
|
74
|
+
|
|
75
|
+
## Completed Items
|
|
76
|
+
[Moved here when implemented, with resolution sprint noted]
|
|
77
|
+
|
|
78
|
+
---
|
|
79
|
+
|
|
80
|
+
## Item Template
|
|
81
|
+
|
|
82
|
+
**[PRIORITY] - [Item Title]**
|
|
83
|
+
- **Sprint Backlogged**: Sprint X
|
|
84
|
+
- **Category**: Feature/Bug/Technical-Debt/Optimization
|
|
85
|
+
- **Description**: What needs to be done
|
|
86
|
+
- **Rationale**: Why it was deferred
|
|
87
|
+
- **Proposed Solution**: How to implement
|
|
88
|
+
- **Tags**: `tag1`, `tag2`, `tag3`
|
|
89
|
+
- **Status**: Backlogged | In Progress | Completed
|
|
90
|
+
- **Date Added**: YYYY-MM-DD
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
## Validation Rules
|
|
94
|
+
|
|
95
|
+
The skill enforces:
|
|
96
|
+
1. **All required fields present** (item, why, solution)
|
|
97
|
+
2. **Item description clarity** (≥10 characters, ≤500 characters)
|
|
98
|
+
3. **Rationale specificity** (must explain deferral reason, not just "out of scope")
|
|
99
|
+
4. **Solution actionability** (must include concrete implementation approach)
|
|
100
|
+
5. **No duplicates** (checks existing BACKLOG.md for similar items)
|
|
101
|
+
|
|
102
|
+
## Integration with CFN Loops
|
|
103
|
+
|
|
104
|
+
### Loop 2 Validators
|
|
105
|
+
When validators identify improvements outside current scope:
|
|
106
|
+
```bash
|
|
107
|
+
# In validator agent
|
|
108
|
+
./.claude/skills/cfn-backlog-management/add-backlog-item.sh \
|
|
109
|
+
--item "Add integration tests for Redis failure scenarios" \
|
|
110
|
+
--why "Current sprint validates happy path only; failure testing requires additional test infrastructure" \
|
|
111
|
+
--solution "Create test-redis-failures.sh with Docker-based Redis crash simulation" \
|
|
112
|
+
--tags "testing,redis,edge-cases"
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
### Product Owner Decision
|
|
116
|
+
When Product Owner defers work for future sprint:
|
|
117
|
+
```bash
|
|
118
|
+
# In product-owner agent
|
|
119
|
+
./.claude/skills/cfn-backlog-management/add-backlog-item.sh \
|
|
120
|
+
--item "Migrate coordination from Redis to etcd for production scale" \
|
|
121
|
+
--why "Redis sufficient for current 10-agent limit; etcd needed for 100+ agent deployments" \
|
|
122
|
+
--solution "Abstract coordination layer behind interface, implement etcd adapter" \
|
|
123
|
+
--priority "P3" \
|
|
124
|
+
--category "Technical-Debt"
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
### Coordinator Context
|
|
128
|
+
Coordinators can query backlog for related items before spawning agents:
|
|
129
|
+
```bash
|
|
130
|
+
# Check if backlog contains relevant context
|
|
131
|
+
grep -i "redis pooling" readme/BACKLOG.md
|
|
132
|
+
# Use results to inform agent context injection
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
## Query Interface
|
|
136
|
+
|
|
137
|
+
**Search by tag:**
|
|
138
|
+
```bash
|
|
139
|
+
grep -A 10 "Tags:.*redis" readme/BACKLOG.md
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
**Filter by priority:**
|
|
143
|
+
```bash
|
|
144
|
+
sed -n '/^### P1/,/^### P2/p' readme/BACKLOG.md
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
**List all optimization items:**
|
|
148
|
+
```bash
|
|
149
|
+
grep -B 2 "Category: Optimization" readme/BACKLOG.md
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
## Maintenance
|
|
153
|
+
|
|
154
|
+
**Weekly Review**: Product Owner reviews P0-P1 items for sprint planning
|
|
155
|
+
**Monthly Cleanup**: Archive completed items, reassess P3 priorities
|
|
156
|
+
**Quarterly Audit**: Remove stale items (>6 months old, no activity)
|
|
157
|
+
|
|
158
|
+
## Best Practices
|
|
159
|
+
|
|
160
|
+
1. **Be specific**: "Add caching" → "Implement Redis LRU cache for agent context with 1h TTL"
|
|
161
|
+
2. **Explain constraints**: "Not enough time" → "Requires 8h estimation work; current sprint has 2h budget"
|
|
162
|
+
3. **Provide actionable solutions**: "Fix later" → "Refactor using Strategy pattern from planning/PATTERNS.md"
|
|
163
|
+
4. **Tag appropriately**: Enables filtering and sprint planning
|
|
164
|
+
5. **Update status**: Move to "Completed" when resolved, note resolution sprint
|
|
165
|
+
|
|
166
|
+
## Anti-Patterns
|
|
167
|
+
|
|
168
|
+
❌ **Vague items**: "Improve performance" (What component? How much improvement?)
|
|
169
|
+
❌ **No rationale**: "Backlog this" (Why defer? What's the blocker?)
|
|
170
|
+
❌ **Solution-less**: "Fix Redis issues" (What's the approach? What research is needed?)
|
|
171
|
+
❌ **Duplicate entries**: Check BACKLOG.md before adding
|
|
172
|
+
❌ **Scope creep**: Backlog is for deferred work, not scope expansion
|
|
173
|
+
|
|
174
|
+
## Example Backlog Item
|
|
175
|
+
|
|
176
|
+
```markdown
|
|
177
|
+
**[P1] - Implement Adaptive Validator Scaling**
|
|
178
|
+
- **Sprint Backlogged**: Sprint 9 - CFN v3 Implementation
|
|
179
|
+
- **Category**: Optimization
|
|
180
|
+
- **Description**: Dynamically adjust number of Loop 2 validators (2-5) based on task complexity. Currently fixed at 3-4 validators regardless of task size.
|
|
181
|
+
- **Rationale**: Sprint 9 focused on dual-mode architecture validation. Adaptive scaling requires task complexity classifier (NLP or heuristic-based), estimated 12h implementation vs 4h sprint budget.
|
|
182
|
+
- **Proposed Solution**: Create task-classifier skill that analyzes task description (file count, domain keywords, integration points) and returns complexity score (0.0-1.0). Map score to validator count: <0.3 → 2 validators, 0.3-0.7 → 3-4 validators, >0.7 → 5 validators. Reference: CFN_LOOP_TASK_MODE.md section on adaptive validator scaling.
|
|
183
|
+
- **Tags**: `optimization`, `cfn-loop`, `validation`, `adaptive-scaling`
|
|
184
|
+
- **Status**: Backlogged
|
|
185
|
+
- **Date Added**: 2025-10-31
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
## Success Metrics
|
|
189
|
+
|
|
190
|
+
- **Backlog utilization**: ≥30% of backlog items addressed within 3 sprints
|
|
191
|
+
- **Item clarity**: 0 items missing required fields
|
|
192
|
+
- **Discovery rate**: ≥50% of technical debt captured vs lost in chat
|
|
193
|
+
- **Sprint planning efficiency**: Backlog queries reduce planning time by 20%
|
|
194
|
+
|
|
195
|
+
## References
|
|
196
|
+
|
|
197
|
+
- **STRAT-025**: Explicit Deliverable Tracking (adaptive context)
|
|
198
|
+
- **CFN Loop Documentation**: `.claude/commands/cfn/CFN_LOOP_TASK_MODE.md`
|
|
199
|
+
- **Sprint Execution**: CLAUDE.md Section 6 - Sprint Context Injection
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -e
|
|
3
|
+
|
|
4
|
+
# cfn-backlog-management/add-backlog-item.sh
|
|
5
|
+
# Adds structured backlog items to readme/BACKLOG.md
|
|
6
|
+
|
|
7
|
+
# Default values
|
|
8
|
+
PRIORITY="P2"
|
|
9
|
+
CATEGORY="Technical-Debt"
|
|
10
|
+
SPRINT="Unknown"
|
|
11
|
+
TAGS=""
|
|
12
|
+
ITEM=""
|
|
13
|
+
WHY=""
|
|
14
|
+
SOLUTION=""
|
|
15
|
+
|
|
16
|
+
# Parse arguments
|
|
17
|
+
while [[ $# -gt 0 ]]; do
|
|
18
|
+
case $1 in
|
|
19
|
+
--item)
|
|
20
|
+
ITEM="$2"
|
|
21
|
+
shift 2
|
|
22
|
+
;;
|
|
23
|
+
--why)
|
|
24
|
+
WHY="$2"
|
|
25
|
+
shift 2
|
|
26
|
+
;;
|
|
27
|
+
--solution)
|
|
28
|
+
SOLUTION="$2"
|
|
29
|
+
shift 2
|
|
30
|
+
;;
|
|
31
|
+
--sprint)
|
|
32
|
+
SPRINT="$2"
|
|
33
|
+
shift 2
|
|
34
|
+
;;
|
|
35
|
+
--priority)
|
|
36
|
+
PRIORITY="$2"
|
|
37
|
+
shift 2
|
|
38
|
+
;;
|
|
39
|
+
--tags)
|
|
40
|
+
TAGS="$2"
|
|
41
|
+
shift 2
|
|
42
|
+
;;
|
|
43
|
+
--category)
|
|
44
|
+
CATEGORY="$2"
|
|
45
|
+
shift 2
|
|
46
|
+
;;
|
|
47
|
+
*)
|
|
48
|
+
echo "Unknown argument: $1" >&2
|
|
49
|
+
exit 1
|
|
50
|
+
;;
|
|
51
|
+
esac
|
|
52
|
+
done
|
|
53
|
+
|
|
54
|
+
# Validation
|
|
55
|
+
if [[ -z "$ITEM" ]]; then
|
|
56
|
+
echo "Error: --item is required" >&2
|
|
57
|
+
exit 1
|
|
58
|
+
fi
|
|
59
|
+
|
|
60
|
+
if [[ -z "$WHY" ]]; then
|
|
61
|
+
echo "Error: --why is required" >&2
|
|
62
|
+
exit 1
|
|
63
|
+
fi
|
|
64
|
+
|
|
65
|
+
if [[ -z "$SOLUTION" ]]; then
|
|
66
|
+
echo "Error: --solution is required" >&2
|
|
67
|
+
exit 1
|
|
68
|
+
fi
|
|
69
|
+
|
|
70
|
+
# Validate item length
|
|
71
|
+
ITEM_LENGTH=${#ITEM}
|
|
72
|
+
if (( ITEM_LENGTH < 10 )); then
|
|
73
|
+
echo "Error: --item must be at least 10 characters (got $ITEM_LENGTH)" >&2
|
|
74
|
+
exit 1
|
|
75
|
+
fi
|
|
76
|
+
|
|
77
|
+
if (( ITEM_LENGTH > 500 )); then
|
|
78
|
+
echo "Error: --item must be at most 500 characters (got $ITEM_LENGTH)" >&2
|
|
79
|
+
exit 1
|
|
80
|
+
fi
|
|
81
|
+
|
|
82
|
+
# Validate priority
|
|
83
|
+
if [[ ! "$PRIORITY" =~ ^P[0-3]$ ]]; then
|
|
84
|
+
echo "Error: --priority must be P0, P1, P2, or P3 (got: $PRIORITY)" >&2
|
|
85
|
+
exit 1
|
|
86
|
+
fi
|
|
87
|
+
|
|
88
|
+
# Validate category
|
|
89
|
+
VALID_CATEGORIES="Feature|Bug|Technical-Debt|Optimization"
|
|
90
|
+
if [[ ! "$CATEGORY" =~ ^($VALID_CATEGORIES)$ ]]; then
|
|
91
|
+
echo "Error: --category must be one of: Feature, Bug, Technical-Debt, Optimization (got: $CATEGORY)" >&2
|
|
92
|
+
exit 1
|
|
93
|
+
fi
|
|
94
|
+
|
|
95
|
+
# Path to backlog file
|
|
96
|
+
BACKLOG_FILE="readme/BACKLOG.md"
|
|
97
|
+
PROJECT_ROOT="/mnt/c/Users/masha/Documents/claude-flow-novice"
|
|
98
|
+
BACKLOG_PATH="$PROJECT_ROOT/$BACKLOG_FILE"
|
|
99
|
+
|
|
100
|
+
# Create backlog file if it doesn't exist
|
|
101
|
+
if [[ ! -f "$BACKLOG_PATH" ]]; then
|
|
102
|
+
echo "Creating $BACKLOG_FILE..."
|
|
103
|
+
mkdir -p "$(dirname "$BACKLOG_PATH")"
|
|
104
|
+
cat > "$BACKLOG_PATH" <<'EOF'
|
|
105
|
+
# Claude Flow Novice - Backlog
|
|
106
|
+
|
|
107
|
+
Last Updated: $(date +%Y-%m-%d)
|
|
108
|
+
|
|
109
|
+
## Active Items
|
|
110
|
+
|
|
111
|
+
### P0 - Critical
|
|
112
|
+
|
|
113
|
+
### P1 - High Priority
|
|
114
|
+
|
|
115
|
+
### P2 - Medium Priority
|
|
116
|
+
|
|
117
|
+
### P3 - Low Priority / Nice-to-Have
|
|
118
|
+
|
|
119
|
+
## Completed Items
|
|
120
|
+
|
|
121
|
+
---
|
|
122
|
+
|
|
123
|
+
## Item Template
|
|
124
|
+
|
|
125
|
+
**[PRIORITY] - [Item Title]**
|
|
126
|
+
- **Sprint Backlogged**: Sprint X
|
|
127
|
+
- **Category**: Feature/Bug/Technical-Debt/Optimization
|
|
128
|
+
- **Description**: What needs to be done
|
|
129
|
+
- **Rationale**: Why it was deferred
|
|
130
|
+
- **Proposed Solution**: How to implement
|
|
131
|
+
- **Tags**: `tag1`, `tag2`, `tag3`
|
|
132
|
+
- **Status**: Backlogged
|
|
133
|
+
- **Date Added**: YYYY-MM-DD
|
|
134
|
+
EOF
|
|
135
|
+
fi
|
|
136
|
+
|
|
137
|
+
# Check for duplicates (simple substring match)
|
|
138
|
+
if grep -qi "$ITEM" "$BACKLOG_PATH" 2>/dev/null; then
|
|
139
|
+
echo "Warning: Similar item may already exist in backlog" >&2
|
|
140
|
+
echo "Existing matches:" >&2
|
|
141
|
+
grep -i "$ITEM" "$BACKLOG_PATH" | head -3 >&2
|
|
142
|
+
read -p "Continue anyway? (y/n) " -n 1 -r
|
|
143
|
+
echo
|
|
144
|
+
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
|
145
|
+
echo "Aborted" >&2
|
|
146
|
+
exit 1
|
|
147
|
+
fi
|
|
148
|
+
fi
|
|
149
|
+
|
|
150
|
+
# Format tags
|
|
151
|
+
FORMATTED_TAGS=""
|
|
152
|
+
if [[ -n "$TAGS" ]]; then
|
|
153
|
+
IFS=',' read -ra TAG_ARRAY <<< "$TAGS"
|
|
154
|
+
for tag in "${TAG_ARRAY[@]}"; do
|
|
155
|
+
FORMATTED_TAGS="${FORMATTED_TAGS}\`${tag}\`, "
|
|
156
|
+
done
|
|
157
|
+
FORMATTED_TAGS="${FORMATTED_TAGS%, }" # Remove trailing comma
|
|
158
|
+
fi
|
|
159
|
+
|
|
160
|
+
# Generate item title (first 60 chars of description)
|
|
161
|
+
ITEM_TITLE="${ITEM:0:60}"
|
|
162
|
+
if (( ${#ITEM} > 60 )); then
|
|
163
|
+
ITEM_TITLE="${ITEM_TITLE}..."
|
|
164
|
+
fi
|
|
165
|
+
|
|
166
|
+
# Current date
|
|
167
|
+
CURRENT_DATE=$(date +%Y-%m-%d)
|
|
168
|
+
|
|
169
|
+
# Create backlog entry
|
|
170
|
+
BACKLOG_ENTRY=$(cat <<EOF
|
|
171
|
+
|
|
172
|
+
**[$PRIORITY] - $ITEM_TITLE**
|
|
173
|
+
- **Sprint Backlogged**: $SPRINT
|
|
174
|
+
- **Category**: $CATEGORY
|
|
175
|
+
- **Description**: $ITEM
|
|
176
|
+
- **Rationale**: $WHY
|
|
177
|
+
- **Proposed Solution**: $SOLUTION
|
|
178
|
+
- **Tags**: $FORMATTED_TAGS
|
|
179
|
+
- **Status**: Backlogged
|
|
180
|
+
- **Date Added**: $CURRENT_DATE
|
|
181
|
+
|
|
182
|
+
EOF
|
|
183
|
+
)
|
|
184
|
+
|
|
185
|
+
# Insert into appropriate priority section
|
|
186
|
+
SECTION_MARKER="### $PRIORITY"
|
|
187
|
+
|
|
188
|
+
# Use awk to insert after section marker
|
|
189
|
+
awk -v section="$SECTION_MARKER" -v entry="$BACKLOG_ENTRY" '
|
|
190
|
+
$0 ~ section {
|
|
191
|
+
print
|
|
192
|
+
print entry
|
|
193
|
+
next
|
|
194
|
+
}
|
|
195
|
+
{print}
|
|
196
|
+
' "$BACKLOG_PATH" > "${BACKLOG_PATH}.tmp"
|
|
197
|
+
|
|
198
|
+
mv "${BACKLOG_PATH}.tmp" "$BACKLOG_PATH"
|
|
199
|
+
|
|
200
|
+
# Update "Last Updated" timestamp
|
|
201
|
+
sed -i "s/Last Updated: .*/Last Updated: $CURRENT_DATE/" "$BACKLOG_PATH"
|
|
202
|
+
|
|
203
|
+
echo "✅ Backlog item added successfully"
|
|
204
|
+
echo " Priority: $PRIORITY"
|
|
205
|
+
echo " Category: $CATEGORY"
|
|
206
|
+
echo " Sprint: $SPRINT"
|
|
207
|
+
echo " Location: $BACKLOG_FILE"
|
|
208
|
+
|
|
209
|
+
# Output path for scripting
|
|
210
|
+
echo "$BACKLOG_PATH"
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Pre-Edit Backup Script
|
|
4
|
+
# Creates timestamped backup with SHA-256 hash and JSON metadata
|
|
5
|
+
#
|
|
6
|
+
# Usage: backup.sh FILE_PATH AGENT_ID
|
|
7
|
+
#
|
|
8
|
+
# Arguments:
|
|
9
|
+
# FILE_PATH - Absolute path to file to backup
|
|
10
|
+
# AGENT_ID - Unique identifier for the agent creating the backup
|
|
11
|
+
#
|
|
12
|
+
# Returns:
|
|
13
|
+
# Backup directory path on success
|
|
14
|
+
# Exit code 1 on failure
|
|
15
|
+
#
|
|
16
|
+
# Example:
|
|
17
|
+
# ./.claude/skills/pre-edit-backup/backup.sh "/path/to/file.txt" "backend-dev-1"
|
|
18
|
+
|
|
19
|
+
set -euo pipefail
|
|
20
|
+
|
|
21
|
+
# === Input Validation ===
|
|
22
|
+
|
|
23
|
+
FILE_PATH="$1"
|
|
24
|
+
AGENT_ID="$2"
|
|
25
|
+
|
|
26
|
+
if [[ -z "$FILE_PATH" ]]; then
|
|
27
|
+
echo "Error: No file path provided" >&2
|
|
28
|
+
echo "Usage: backup.sh FILE_PATH AGENT_ID" >&2
|
|
29
|
+
exit 1
|
|
30
|
+
fi
|
|
31
|
+
|
|
32
|
+
if [[ -z "$AGENT_ID" ]]; then
|
|
33
|
+
echo "Error: No agent ID provided" >&2
|
|
34
|
+
echo "Usage: backup.sh FILE_PATH AGENT_ID" >&2
|
|
35
|
+
exit 1
|
|
36
|
+
fi
|
|
37
|
+
|
|
38
|
+
if [[ ! -f "$FILE_PATH" ]]; then
|
|
39
|
+
echo "Error: File does not exist: $FILE_PATH" >&2
|
|
40
|
+
exit 1
|
|
41
|
+
fi
|
|
42
|
+
|
|
43
|
+
# === Configuration ===
|
|
44
|
+
|
|
45
|
+
BACKUP_BASE_DIR=".backups"
|
|
46
|
+
DEFAULT_TTL=86400 # 24 hours in seconds
|
|
47
|
+
|
|
48
|
+
# === Tool Availability Checks ===
|
|
49
|
+
|
|
50
|
+
# Check for sha256sum (with fallback to shasum on macOS)
|
|
51
|
+
if command -v sha256sum &>/dev/null; then
|
|
52
|
+
HASH_TOOL="sha256sum"
|
|
53
|
+
elif command -v shasum &>/dev/null; then
|
|
54
|
+
HASH_TOOL="shasum -a 256"
|
|
55
|
+
else
|
|
56
|
+
echo "Error: Neither sha256sum nor shasum found. Cannot generate file hash." >&2
|
|
57
|
+
exit 1
|
|
58
|
+
fi
|
|
59
|
+
|
|
60
|
+
# Check for jq (graceful degradation)
|
|
61
|
+
if ! command -v jq &>/dev/null; then
|
|
62
|
+
echo "Warning: jq not found. Metadata will be created using basic shell." >&2
|
|
63
|
+
USE_JQ=false
|
|
64
|
+
else
|
|
65
|
+
USE_JQ=true
|
|
66
|
+
fi
|
|
67
|
+
|
|
68
|
+
# === Generate Backup Metadata ===
|
|
69
|
+
|
|
70
|
+
TIMESTAMP=$(date +%s%3N 2>/dev/null || date +%s) # Milliseconds if supported, else seconds
|
|
71
|
+
FILE_HASH=$($HASH_TOOL "$FILE_PATH" | cut -d' ' -f1)
|
|
72
|
+
|
|
73
|
+
# === Create Backup Directory ===
|
|
74
|
+
|
|
75
|
+
BACKUP_DIR="${BACKUP_BASE_DIR}/${AGENT_ID}/${TIMESTAMP}_${FILE_HASH}"
|
|
76
|
+
|
|
77
|
+
if ! mkdir -p "$BACKUP_DIR" 2>/dev/null; then
|
|
78
|
+
echo "Error: Failed to create backup directory: $BACKUP_DIR" >&2
|
|
79
|
+
exit 1
|
|
80
|
+
fi
|
|
81
|
+
|
|
82
|
+
# Set secure permissions (owner read/write/execute only)
|
|
83
|
+
chmod 700 "$BACKUP_DIR" 2>/dev/null || true
|
|
84
|
+
|
|
85
|
+
# === Copy Original File ===
|
|
86
|
+
|
|
87
|
+
if ! cp "$FILE_PATH" "${BACKUP_DIR}/original_file" 2>/dev/null; then
|
|
88
|
+
echo "Error: Failed to copy file to backup directory" >&2
|
|
89
|
+
rm -rf "$BACKUP_DIR"
|
|
90
|
+
exit 1
|
|
91
|
+
fi
|
|
92
|
+
|
|
93
|
+
# === Generate Metadata ===
|
|
94
|
+
|
|
95
|
+
METADATA_FILE="${BACKUP_DIR}/backup_metadata.json"
|
|
96
|
+
|
|
97
|
+
if [[ "$USE_JQ" == true ]]; then
|
|
98
|
+
# Use jq for structured JSON generation
|
|
99
|
+
jq -n \
|
|
100
|
+
--arg agent_id "$AGENT_ID" \
|
|
101
|
+
--arg original_path "$FILE_PATH" \
|
|
102
|
+
--arg timestamp "$TIMESTAMP" \
|
|
103
|
+
--arg file_hash "$FILE_HASH" \
|
|
104
|
+
--arg ttl "$DEFAULT_TTL" \
|
|
105
|
+
'{
|
|
106
|
+
agent_id: $agent_id,
|
|
107
|
+
original_path: $original_path,
|
|
108
|
+
backup_timestamp: ($timestamp | tonumber),
|
|
109
|
+
file_hash: $file_hash,
|
|
110
|
+
backup_ttl: ($ttl | tonumber),
|
|
111
|
+
backup_status: "active"
|
|
112
|
+
}' > "$METADATA_FILE"
|
|
113
|
+
else
|
|
114
|
+
# Fallback: Manual JSON generation
|
|
115
|
+
cat > "$METADATA_FILE" <<EOF
|
|
116
|
+
{
|
|
117
|
+
"agent_id": "$AGENT_ID",
|
|
118
|
+
"original_path": "$FILE_PATH",
|
|
119
|
+
"backup_timestamp": $TIMESTAMP,
|
|
120
|
+
"file_hash": "$FILE_HASH",
|
|
121
|
+
"backup_ttl": $DEFAULT_TTL,
|
|
122
|
+
"backup_status": "active"
|
|
123
|
+
}
|
|
124
|
+
EOF
|
|
125
|
+
fi
|
|
126
|
+
|
|
127
|
+
# === Return Backup Path ===
|
|
128
|
+
|
|
129
|
+
echo "$BACKUP_DIR"
|
|
130
|
+
exit 0
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
|
|
3
|
+
# Pre-Edit Backup Cleanup Script
|
|
4
|
+
# Removes expired backups based on TTL configuration
|
|
5
|
+
# Runs as background process or cron-style with flock for concurrency control
|
|
6
|
+
#
|
|
7
|
+
# Usage: cleanup.sh [--dry-run] [--log-file FILE]
|
|
8
|
+
#
|
|
9
|
+
# Options:
|
|
10
|
+
# --dry-run - Show what would be deleted without actually deleting
|
|
11
|
+
# --log-file - Path to log file (default: none, outputs to stdout)
|
|
12
|
+
#
|
|
13
|
+
# Returns:
|
|
14
|
+
# Exit code 0 on success
|
|
15
|
+
# Exit code 1 if cleanup already in progress
|
|
16
|
+
#
|
|
17
|
+
# Example:
|
|
18
|
+
# ./.claude/skills/pre-edit-backup/cleanup.sh
|
|
19
|
+
# ./.claude/skills/pre-edit-backup/cleanup.sh --dry-run
|
|
20
|
+
# ./.claude/skills/pre-edit-backup/cleanup.sh --log-file /tmp/backup-cleanup.log
|
|
21
|
+
|
|
22
|
+
set -euo pipefail
|
|
23
|
+
|
|
24
|
+
# === Configuration ===
|
|
25
|
+
|
|
26
|
+
BACKUP_BASE_DIR=".backups"
|
|
27
|
+
CURRENT_TIME=$(date +%s)
|
|
28
|
+
DRY_RUN=false
|
|
29
|
+
LOG_FILE=""
|
|
30
|
+
|
|
31
|
+
# === Parse Options ===
|
|
32
|
+
|
|
33
|
+
while [[ "$#" -gt 0 ]]; do
|
|
34
|
+
case $1 in
|
|
35
|
+
--dry-run)
|
|
36
|
+
DRY_RUN=true
|
|
37
|
+
shift
|
|
38
|
+
;;
|
|
39
|
+
--log-file)
|
|
40
|
+
LOG_FILE="$2"
|
|
41
|
+
shift 2
|
|
42
|
+
;;
|
|
43
|
+
*)
|
|
44
|
+
echo "Error: Unknown option: $1" >&2
|
|
45
|
+
echo "Usage: cleanup.sh [--dry-run] [--log-file FILE]" >&2
|
|
46
|
+
exit 1
|
|
47
|
+
;;
|
|
48
|
+
esac
|
|
49
|
+
done
|
|
50
|
+
|
|
51
|
+
# === Logging Function ===
|
|
52
|
+
|
|
53
|
+
log() {
|
|
54
|
+
local message="$1"
|
|
55
|
+
local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
|
|
56
|
+
local log_line="[$timestamp] $message"
|
|
57
|
+
|
|
58
|
+
if [[ -n "$LOG_FILE" ]]; then
|
|
59
|
+
echo "$log_line" >> "$LOG_FILE"
|
|
60
|
+
else
|
|
61
|
+
echo "$log_line"
|
|
62
|
+
fi
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
# === Prevent Concurrent Cleanup ===
|
|
66
|
+
|
|
67
|
+
if [[ ! -d "$BACKUP_BASE_DIR" ]]; then
|
|
68
|
+
log "Backup directory does not exist: $BACKUP_BASE_DIR"
|
|
69
|
+
exit 0
|
|
70
|
+
fi
|
|
71
|
+
|
|
72
|
+
LOCKFILE="${BACKUP_BASE_DIR}/cleanup.lock"
|
|
73
|
+
|
|
74
|
+
# Ensure lock file directory exists
|
|
75
|
+
mkdir -p "$(dirname "$LOCKFILE")" 2>/dev/null || true
|
|
76
|
+
|
|
77
|
+
# Acquire lock (non-blocking)
|
|
78
|
+
exec 9>"$LOCKFILE"
|
|
79
|
+
if ! flock -n 9; then
|
|
80
|
+
log "Cleanup already in progress (lock held)"
|
|
81
|
+
exit 1
|
|
82
|
+
fi
|
|
83
|
+
|
|
84
|
+
log "Cleanup started (dry-run: $DRY_RUN)"
|
|
85
|
+
|
|
86
|
+
# === Cleanup Logic ===
|
|
87
|
+
|
|
88
|
+
REMOVED_COUNT=0
|
|
89
|
+
SKIPPED_COUNT=0
|
|
90
|
+
ERROR_COUNT=0
|
|
91
|
+
|
|
92
|
+
# Check for jq availability
|
|
93
|
+
if ! command -v jq &>/dev/null; then
|
|
94
|
+
log "Error: jq is required for cleanup operations"
|
|
95
|
+
exit 1
|
|
96
|
+
fi
|
|
97
|
+
|
|
98
|
+
# Iterate through agent directories
|
|
99
|
+
for agent_dir in "$BACKUP_BASE_DIR"/*; do
|
|
100
|
+
# Skip if not a directory or if it's the lockfile
|
|
101
|
+
[[ -d "$agent_dir" ]] || continue
|
|
102
|
+
[[ "$(basename "$agent_dir")" == "cleanup.lock" ]] && continue
|
|
103
|
+
|
|
104
|
+
# Iterate through backup directories for this agent
|
|
105
|
+
for backup_dir in "$agent_dir"/*; do
|
|
106
|
+
[[ -d "$backup_dir" ]] || continue
|
|
107
|
+
|
|
108
|
+
metadata_file="${backup_dir}/backup_metadata.json"
|
|
109
|
+
|
|
110
|
+
if [[ ! -f "$metadata_file" ]]; then
|
|
111
|
+
log "Warning: Metadata missing for backup: ${backup_dir}"
|
|
112
|
+
SKIPPED_COUNT=$((SKIPPED_COUNT + 1))
|
|
113
|
+
continue
|
|
114
|
+
fi
|
|
115
|
+
|
|
116
|
+
# Extract backup timestamp and TTL
|
|
117
|
+
backup_timestamp=$(jq -r '.backup_timestamp' "$metadata_file" 2>/dev/null || echo "")
|
|
118
|
+
backup_ttl=$(jq -r '.backup_ttl' "$metadata_file" 2>/dev/null || echo "")
|
|
119
|
+
|
|
120
|
+
if [[ -z "$backup_timestamp" ]] || [[ "$backup_timestamp" == "null" ]] || \
|
|
121
|
+
[[ -z "$backup_ttl" ]] || [[ "$backup_ttl" == "null" ]]; then
|
|
122
|
+
log "Warning: Invalid metadata in: ${metadata_file}"
|
|
123
|
+
SKIPPED_COUNT=$((SKIPPED_COUNT + 1))
|
|
124
|
+
continue
|
|
125
|
+
fi
|
|
126
|
+
|
|
127
|
+
# Convert milliseconds to seconds if needed (timestamp > 10 digits = milliseconds)
|
|
128
|
+
if [[ ${#backup_timestamp} -gt 10 ]]; then
|
|
129
|
+
backup_timestamp=$((backup_timestamp / 1000))
|
|
130
|
+
fi
|
|
131
|
+
|
|
132
|
+
# Check if backup has expired
|
|
133
|
+
age=$((CURRENT_TIME - backup_timestamp))
|
|
134
|
+
if (( age > backup_ttl )); then
|
|
135
|
+
if [[ "$DRY_RUN" == true ]]; then
|
|
136
|
+
log "Would remove expired backup (age: ${age}s, ttl: ${backup_ttl}s): ${backup_dir}"
|
|
137
|
+
REMOVED_COUNT=$((REMOVED_COUNT + 1))
|
|
138
|
+
else
|
|
139
|
+
if rm -rf "$backup_dir" 2>/dev/null; then
|
|
140
|
+
log "Removed expired backup (age: ${age}s, ttl: ${backup_ttl}s): ${backup_dir}"
|
|
141
|
+
REMOVED_COUNT=$((REMOVED_COUNT + 1))
|
|
142
|
+
else
|
|
143
|
+
log "Error: Failed to remove backup: ${backup_dir}"
|
|
144
|
+
ERROR_COUNT=$((ERROR_COUNT + 1))
|
|
145
|
+
fi
|
|
146
|
+
fi
|
|
147
|
+
fi
|
|
148
|
+
done
|
|
149
|
+
done
|
|
150
|
+
|
|
151
|
+
# === Summary ===
|
|
152
|
+
|
|
153
|
+
log "Cleanup completed: removed=$REMOVED_COUNT, skipped=$SKIPPED_COUNT, errors=$ERROR_COUNT"
|
|
154
|
+
|
|
155
|
+
exit 0
|