shipwright-cli 1.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +926 -0
- package/claude-code/CLAUDE.md.shipwright +125 -0
- package/claude-code/hooks/notify-idle.sh +35 -0
- package/claude-code/hooks/pre-compact-save.sh +57 -0
- package/claude-code/hooks/task-completed.sh +170 -0
- package/claude-code/hooks/teammate-idle.sh +68 -0
- package/claude-code/settings.json.template +184 -0
- package/completions/_shipwright +140 -0
- package/completions/shipwright.bash +89 -0
- package/completions/shipwright.fish +107 -0
- package/docs/KNOWN-ISSUES.md +199 -0
- package/docs/TIPS.md +331 -0
- package/docs/definition-of-done.example.md +16 -0
- package/docs/patterns/README.md +139 -0
- package/docs/patterns/audit-loop.md +149 -0
- package/docs/patterns/bug-hunt.md +183 -0
- package/docs/patterns/feature-implementation.md +159 -0
- package/docs/patterns/refactoring.md +183 -0
- package/docs/patterns/research-exploration.md +144 -0
- package/docs/patterns/test-generation.md +173 -0
- package/package.json +49 -0
- package/scripts/adapters/docker-deploy.sh +50 -0
- package/scripts/adapters/fly-deploy.sh +41 -0
- package/scripts/adapters/iterm2-adapter.sh +122 -0
- package/scripts/adapters/railway-deploy.sh +34 -0
- package/scripts/adapters/tmux-adapter.sh +87 -0
- package/scripts/adapters/vercel-deploy.sh +35 -0
- package/scripts/adapters/wezterm-adapter.sh +103 -0
- package/scripts/cct +242 -0
- package/scripts/cct-cleanup.sh +172 -0
- package/scripts/cct-cost.sh +590 -0
- package/scripts/cct-daemon.sh +3189 -0
- package/scripts/cct-doctor.sh +328 -0
- package/scripts/cct-fix.sh +478 -0
- package/scripts/cct-fleet.sh +904 -0
- package/scripts/cct-init.sh +282 -0
- package/scripts/cct-logs.sh +273 -0
- package/scripts/cct-loop.sh +1332 -0
- package/scripts/cct-memory.sh +1148 -0
- package/scripts/cct-pipeline.sh +3844 -0
- package/scripts/cct-prep.sh +1352 -0
- package/scripts/cct-ps.sh +168 -0
- package/scripts/cct-reaper.sh +390 -0
- package/scripts/cct-session.sh +284 -0
- package/scripts/cct-status.sh +169 -0
- package/scripts/cct-templates.sh +242 -0
- package/scripts/cct-upgrade.sh +422 -0
- package/scripts/cct-worktree.sh +405 -0
- package/scripts/postinstall.mjs +96 -0
- package/templates/pipelines/autonomous.json +71 -0
- package/templates/pipelines/cost-aware.json +95 -0
- package/templates/pipelines/deployed.json +79 -0
- package/templates/pipelines/enterprise.json +114 -0
- package/templates/pipelines/fast.json +63 -0
- package/templates/pipelines/full.json +104 -0
- package/templates/pipelines/hotfix.json +63 -0
- package/templates/pipelines/standard.json +91 -0
- package/tmux/claude-teams-overlay.conf +109 -0
- package/tmux/templates/architecture.json +19 -0
- package/tmux/templates/bug-fix.json +24 -0
- package/tmux/templates/code-review.json +24 -0
- package/tmux/templates/devops.json +19 -0
- package/tmux/templates/documentation.json +19 -0
- package/tmux/templates/exploration.json +19 -0
- package/tmux/templates/feature-dev.json +24 -0
- package/tmux/templates/full-stack.json +24 -0
- package/tmux/templates/migration.json +24 -0
- package/tmux/templates/refactor.json +19 -0
- package/tmux/templates/security-audit.json +24 -0
- package/tmux/templates/testing.json +24 -0
- package/tmux/tmux.conf +167 -0
|
@@ -0,0 +1,1148 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# ╔═══════════════════════════════════════════════════════════════════════════╗
|
|
3
|
+
# ║ shipwright memory — Persistent Learning & Context System ║
|
|
4
|
+
# ║ Captures learnings · Injects context · Searches memory · Tracks metrics║
|
|
5
|
+
# ╚═══════════════════════════════════════════════════════════════════════════╝
|
|
6
|
+
set -euo pipefail
|
|
7
|
+
|
|
8
|
+
VERSION="1.7.0"
|
|
9
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
10
|
+
REPO_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
|
11
|
+
|
|
12
|
+
# ─── Colors (matches Seth's tmux theme) ─────────────────────────────────────
|
|
13
|
+
CYAN='\033[38;2;0;212;255m' # #00d4ff — primary accent
|
|
14
|
+
PURPLE='\033[38;2;124;58;237m' # #7c3aed — secondary
|
|
15
|
+
BLUE='\033[38;2;0;102;255m' # #0066ff — tertiary
|
|
16
|
+
GREEN='\033[38;2;74;222;128m' # success
|
|
17
|
+
YELLOW='\033[38;2;250;204;21m' # warning
|
|
18
|
+
RED='\033[38;2;248;113;113m' # error
|
|
19
|
+
DIM='\033[2m'
|
|
20
|
+
BOLD='\033[1m'
|
|
21
|
+
RESET='\033[0m'
|
|
22
|
+
|
|
23
|
+
# ─── Output Helpers ─────────────────────────────────────────────────────────
|
|
24
|
+
info() { echo -e "${CYAN}${BOLD}▸${RESET} $*"; }
|
|
25
|
+
success() { echo -e "${GREEN}${BOLD}✓${RESET} $*"; }
|
|
26
|
+
warn() { echo -e "${YELLOW}${BOLD}⚠${RESET} $*"; }
|
|
27
|
+
error() { echo -e "${RED}${BOLD}✗${RESET} $*" >&2; }
|
|
28
|
+
|
|
29
|
+
now_iso() { date -u +"%Y-%m-%dT%H:%M:%SZ"; }
|
|
30
|
+
now_epoch() { date +%s; }
|
|
31
|
+
|
|
32
|
+
# ─── Structured Event Log ──────────────────────────────────────────────────
|
|
33
|
+
EVENTS_FILE="${HOME}/.claude-teams/events.jsonl"
|
|
34
|
+
|
|
35
|
+
emit_event() {
|
|
36
|
+
local event_type="$1"
|
|
37
|
+
shift
|
|
38
|
+
local json_fields=""
|
|
39
|
+
for kv in "$@"; do
|
|
40
|
+
local key="${kv%%=*}"
|
|
41
|
+
local val="${kv#*=}"
|
|
42
|
+
if [[ "$val" =~ ^-?[0-9]+\.?[0-9]*$ ]]; then
|
|
43
|
+
json_fields="${json_fields},\"${key}\":${val}"
|
|
44
|
+
else
|
|
45
|
+
val="${val//\"/\\\"}"
|
|
46
|
+
json_fields="${json_fields},\"${key}\":\"${val}\""
|
|
47
|
+
fi
|
|
48
|
+
done
|
|
49
|
+
mkdir -p "${HOME}/.claude-teams"
|
|
50
|
+
echo "{\"ts\":\"$(now_iso)\",\"ts_epoch\":$(now_epoch),\"type\":\"${event_type}\"${json_fields}}" >> "$EVENTS_FILE"
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
# ─── Memory Storage Paths ──────────────────────────────────────────────────
|
|
54
|
+
MEMORY_ROOT="${HOME}/.shipwright/memory"
|
|
55
|
+
GLOBAL_MEMORY="${MEMORY_ROOT}/global.json"
|
|
56
|
+
|
|
57
|
+
# Get a deterministic hash for the current repo
|
|
58
|
+
repo_hash() {
|
|
59
|
+
local origin
|
|
60
|
+
origin=$(git config --get remote.origin.url 2>/dev/null || echo "local")
|
|
61
|
+
echo -n "$origin" | shasum -a 256 | cut -c1-12
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
repo_name() {
|
|
65
|
+
git config --get remote.origin.url 2>/dev/null \
|
|
66
|
+
| sed 's|.*[:/]\([^/]*/[^/]*\)\.git$|\1|' \
|
|
67
|
+
| sed 's|.*[:/]\([^/]*/[^/]*\)$|\1|' \
|
|
68
|
+
|| echo "local"
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
repo_memory_dir() {
|
|
72
|
+
echo "${MEMORY_ROOT}/$(repo_hash)"
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
ensure_memory_dir() {
|
|
76
|
+
local dir
|
|
77
|
+
dir="$(repo_memory_dir)"
|
|
78
|
+
mkdir -p "$dir"
|
|
79
|
+
|
|
80
|
+
# Initialize empty JSON files if they don't exist
|
|
81
|
+
[[ -f "$dir/patterns.json" ]] || echo '{}' > "$dir/patterns.json"
|
|
82
|
+
[[ -f "$dir/failures.json" ]] || echo '{"failures":[]}' > "$dir/failures.json"
|
|
83
|
+
[[ -f "$dir/decisions.json" ]] || echo '{"decisions":[]}' > "$dir/decisions.json"
|
|
84
|
+
[[ -f "$dir/metrics.json" ]] || echo '{"baselines":{}}' > "$dir/metrics.json"
|
|
85
|
+
|
|
86
|
+
# Initialize global memory if missing
|
|
87
|
+
mkdir -p "$MEMORY_ROOT"
|
|
88
|
+
[[ -f "$GLOBAL_MEMORY" ]] || echo '{"common_patterns":[],"cross_repo_learnings":[]}' > "$GLOBAL_MEMORY"
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
# ─── Memory Capture Functions ──────────────────────────────────────────────
|
|
92
|
+
|
|
93
|
+
# memory_capture_pipeline <state_file> <artifacts_dir>
|
|
94
|
+
# Called after every pipeline completes. Reads state + artifacts → writes learnings.
|
|
95
|
+
memory_capture_pipeline() {
|
|
96
|
+
local state_file="${1:-}"
|
|
97
|
+
local artifacts_dir="${2:-}"
|
|
98
|
+
|
|
99
|
+
if [[ -z "$state_file" || ! -f "$state_file" ]]; then
|
|
100
|
+
warn "State file not found: ${state_file:-<empty>}"
|
|
101
|
+
return 1
|
|
102
|
+
fi
|
|
103
|
+
|
|
104
|
+
ensure_memory_dir
|
|
105
|
+
local mem_dir
|
|
106
|
+
mem_dir="$(repo_memory_dir)"
|
|
107
|
+
|
|
108
|
+
local repo
|
|
109
|
+
repo="$(repo_name)"
|
|
110
|
+
local captured_at
|
|
111
|
+
captured_at="$(now_iso)"
|
|
112
|
+
|
|
113
|
+
info "Capturing pipeline learnings for ${CYAN}${repo}${RESET}..."
|
|
114
|
+
|
|
115
|
+
# Extract pipeline result from state file
|
|
116
|
+
local pipeline_status=""
|
|
117
|
+
pipeline_status=$(sed -n 's/^status: *//p' "$state_file" | head -1)
|
|
118
|
+
|
|
119
|
+
local goal=""
|
|
120
|
+
goal=$(sed -n 's/^goal: *"*\([^"]*\)"*/\1/p' "$state_file" | head -1)
|
|
121
|
+
|
|
122
|
+
# Capture stage results
|
|
123
|
+
local stages_section=""
|
|
124
|
+
stages_section=$(sed -n '/^stages:/,/^---/p' "$state_file" 2>/dev/null || true)
|
|
125
|
+
|
|
126
|
+
# Track which stages passed/failed
|
|
127
|
+
local passed_stages=""
|
|
128
|
+
local failed_stages=""
|
|
129
|
+
if [[ -n "$stages_section" ]]; then
|
|
130
|
+
passed_stages=$(echo "$stages_section" | grep "complete" | sed 's/: *complete//' | tr -d ' ' | tr '\n' ',' | sed 's/,$//' || true)
|
|
131
|
+
failed_stages=$(echo "$stages_section" | grep "failed" | sed 's/: *failed//' | tr -d ' ' | tr '\n' ',' | sed 's/,$//' || true)
|
|
132
|
+
fi
|
|
133
|
+
|
|
134
|
+
# Capture test failures if test artifacts exist
|
|
135
|
+
if [[ -n "$artifacts_dir" && -f "$artifacts_dir/test-results.log" ]]; then
|
|
136
|
+
local test_output
|
|
137
|
+
test_output=$(cat "$artifacts_dir/test-results.log" 2>/dev/null || true)
|
|
138
|
+
if echo "$test_output" | grep -qiE "FAIL|ERROR|failed"; then
|
|
139
|
+
memory_capture_failure "test" "$test_output"
|
|
140
|
+
fi
|
|
141
|
+
fi
|
|
142
|
+
|
|
143
|
+
# Capture review feedback patterns
|
|
144
|
+
if [[ -n "$artifacts_dir" && -f "$artifacts_dir/review.md" ]]; then
|
|
145
|
+
local review_output
|
|
146
|
+
review_output=$(cat "$artifacts_dir/review.md" 2>/dev/null || true)
|
|
147
|
+
local bug_count warning_count
|
|
148
|
+
bug_count=$(echo "$review_output" | grep -ciE '\*\*\[Bug\]' || true)
|
|
149
|
+
warning_count=$(echo "$review_output" | grep -ciE '\*\*\[Warning\]' || true)
|
|
150
|
+
|
|
151
|
+
if [[ "${bug_count:-0}" -gt 0 || "${warning_count:-0}" -gt 0 ]]; then
|
|
152
|
+
# Record review patterns to global memory for cross-repo learning
|
|
153
|
+
local tmp_global
|
|
154
|
+
tmp_global=$(mktemp)
|
|
155
|
+
jq --arg repo "$repo" \
|
|
156
|
+
--arg ts "$captured_at" \
|
|
157
|
+
--argjson bugs "${bug_count:-0}" \
|
|
158
|
+
--argjson warns "${warning_count:-0}" \
|
|
159
|
+
'.cross_repo_learnings += [{
|
|
160
|
+
repo: $repo,
|
|
161
|
+
type: "review_feedback",
|
|
162
|
+
bugs: $bugs,
|
|
163
|
+
warnings: $warns,
|
|
164
|
+
captured_at: $ts
|
|
165
|
+
}] | .cross_repo_learnings = (.cross_repo_learnings | .[-50:])' \
|
|
166
|
+
"$GLOBAL_MEMORY" > "$tmp_global" && mv "$tmp_global" "$GLOBAL_MEMORY"
|
|
167
|
+
fi
|
|
168
|
+
fi
|
|
169
|
+
|
|
170
|
+
emit_event "memory.capture" \
|
|
171
|
+
"repo=${repo}" \
|
|
172
|
+
"result=${pipeline_status}" \
|
|
173
|
+
"passed_stages=${passed_stages}" \
|
|
174
|
+
"failed_stages=${failed_stages}"
|
|
175
|
+
|
|
176
|
+
success "Captured pipeline learnings (status: ${pipeline_status})"
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
# memory_capture_failure <stage> <error_output>
|
|
180
|
+
# Captures and deduplicates failure patterns.
|
|
181
|
+
memory_capture_failure() {
|
|
182
|
+
local stage="${1:-unknown}"
|
|
183
|
+
local error_output="${2:-}"
|
|
184
|
+
|
|
185
|
+
ensure_memory_dir
|
|
186
|
+
local mem_dir
|
|
187
|
+
mem_dir="$(repo_memory_dir)"
|
|
188
|
+
local failures_file="$mem_dir/failures.json"
|
|
189
|
+
|
|
190
|
+
# Extract a short pattern from the error (first significant line)
|
|
191
|
+
local pattern=""
|
|
192
|
+
pattern=$(echo "$error_output" \
|
|
193
|
+
| grep -iE "error|fail|cannot|not found|undefined|exception|missing" \
|
|
194
|
+
| head -1 \
|
|
195
|
+
| sed 's/^[[:space:]]*//' \
|
|
196
|
+
| cut -c1-200)
|
|
197
|
+
|
|
198
|
+
if [[ -z "$pattern" ]]; then
|
|
199
|
+
pattern=$(echo "$error_output" | head -1 | cut -c1-200)
|
|
200
|
+
fi
|
|
201
|
+
|
|
202
|
+
[[ -z "$pattern" ]] && return 0
|
|
203
|
+
|
|
204
|
+
# Check for duplicate — increment seen_count if pattern already exists
|
|
205
|
+
local existing_idx
|
|
206
|
+
existing_idx=$(jq --arg pat "$pattern" \
|
|
207
|
+
'[.failures[]] | to_entries | map(select(.value.pattern == $pat)) | .[0].key // -1' \
|
|
208
|
+
"$failures_file" 2>/dev/null || echo "-1")
|
|
209
|
+
|
|
210
|
+
local tmp_file
|
|
211
|
+
tmp_file=$(mktemp)
|
|
212
|
+
|
|
213
|
+
if [[ "$existing_idx" != "-1" && "$existing_idx" != "null" ]]; then
|
|
214
|
+
# Update existing entry
|
|
215
|
+
jq --argjson idx "$existing_idx" \
|
|
216
|
+
--arg ts "$(now_iso)" \
|
|
217
|
+
'.failures[$idx].seen_count += 1 | .failures[$idx].last_seen = $ts' \
|
|
218
|
+
"$failures_file" > "$tmp_file" && mv "$tmp_file" "$failures_file"
|
|
219
|
+
else
|
|
220
|
+
# Add new failure entry
|
|
221
|
+
jq --arg stage "$stage" \
|
|
222
|
+
--arg pattern "$pattern" \
|
|
223
|
+
--arg ts "$(now_iso)" \
|
|
224
|
+
'.failures += [{
|
|
225
|
+
stage: $stage,
|
|
226
|
+
pattern: $pattern,
|
|
227
|
+
root_cause: "",
|
|
228
|
+
fix: "",
|
|
229
|
+
seen_count: 1,
|
|
230
|
+
last_seen: $ts
|
|
231
|
+
}] | .failures = (.failures | .[-100:])' \
|
|
232
|
+
"$failures_file" > "$tmp_file" && mv "$tmp_file" "$failures_file"
|
|
233
|
+
fi
|
|
234
|
+
|
|
235
|
+
emit_event "memory.failure" "stage=${stage}" "pattern=${pattern:0:80}"
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
# memory_analyze_failure <log_file> <stage>
|
|
239
|
+
# Uses Claude to analyze a pipeline failure and fill in root_cause/fix/category.
|
|
240
|
+
memory_analyze_failure() {
|
|
241
|
+
local log_file="${1:-}"
|
|
242
|
+
local stage="${2:-unknown}"
|
|
243
|
+
|
|
244
|
+
if [[ -z "$log_file" ]]; then
|
|
245
|
+
warn "No log file specified for failure analysis"
|
|
246
|
+
return 1
|
|
247
|
+
fi
|
|
248
|
+
|
|
249
|
+
# Gather log context — use the specific log file if it exists,
|
|
250
|
+
# otherwise glob for any logs in the artifacts directory
|
|
251
|
+
local log_tail=""
|
|
252
|
+
if [[ -f "$log_file" ]]; then
|
|
253
|
+
log_tail=$(tail -200 "$log_file" 2>/dev/null || true)
|
|
254
|
+
else
|
|
255
|
+
# Try to find stage-specific logs in the same directory
|
|
256
|
+
local log_dir
|
|
257
|
+
log_dir=$(dirname "$log_file" 2>/dev/null || echo ".")
|
|
258
|
+
log_tail=$(tail -200 "$log_dir"/*.log 2>/dev/null || true)
|
|
259
|
+
fi
|
|
260
|
+
|
|
261
|
+
if [[ -z "$log_tail" ]]; then
|
|
262
|
+
warn "No log content found for analysis"
|
|
263
|
+
return 1
|
|
264
|
+
fi
|
|
265
|
+
|
|
266
|
+
ensure_memory_dir
|
|
267
|
+
local mem_dir
|
|
268
|
+
mem_dir="$(repo_memory_dir)"
|
|
269
|
+
local failures_file="$mem_dir/failures.json"
|
|
270
|
+
|
|
271
|
+
# Check that failures.json has at least one entry
|
|
272
|
+
local entry_count
|
|
273
|
+
entry_count=$(jq '.failures | length' "$failures_file" 2>/dev/null || echo "0")
|
|
274
|
+
if [[ "$entry_count" -eq 0 ]]; then
|
|
275
|
+
warn "No failure entries to analyze"
|
|
276
|
+
return 0
|
|
277
|
+
fi
|
|
278
|
+
|
|
279
|
+
local last_pattern
|
|
280
|
+
last_pattern=$(jq -r '.failures[-1].pattern // ""' "$failures_file" 2>/dev/null)
|
|
281
|
+
|
|
282
|
+
info "Analyzing failure in ${CYAN}${stage}${RESET} stage..."
|
|
283
|
+
|
|
284
|
+
# Build the analysis prompt
|
|
285
|
+
local prompt
|
|
286
|
+
prompt="Analyze this pipeline failure. The stage was: ${stage}.
|
|
287
|
+
The error pattern is: ${last_pattern}
|
|
288
|
+
|
|
289
|
+
Log output (last 200 lines):
|
|
290
|
+
${log_tail}
|
|
291
|
+
|
|
292
|
+
Return ONLY a JSON object with exactly these fields:
|
|
293
|
+
{\"root_cause\": \"one-line root cause\", \"fix\": \"one-line fix suggestion\", \"category\": \"one of: test_failure, build_error, lint_error, timeout, dependency, flaky, config\"}
|
|
294
|
+
|
|
295
|
+
Return JSON only, no markdown fences, no explanation."
|
|
296
|
+
|
|
297
|
+
# Call Claude for analysis
|
|
298
|
+
local analysis
|
|
299
|
+
analysis=$(claude -p "$prompt" --model sonnet 2>/dev/null) || {
|
|
300
|
+
warn "Claude analysis failed"
|
|
301
|
+
return 1
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
# Extract JSON — strip markdown fences if present
|
|
305
|
+
analysis=$(echo "$analysis" | sed 's/^```json//; s/^```//; s/```$//' | tr -d '\n')
|
|
306
|
+
|
|
307
|
+
# Parse the fields
|
|
308
|
+
local root_cause fix category
|
|
309
|
+
root_cause=$(echo "$analysis" | jq -r '.root_cause // ""' 2>/dev/null) || root_cause=""
|
|
310
|
+
fix=$(echo "$analysis" | jq -r '.fix // ""' 2>/dev/null) || fix=""
|
|
311
|
+
category=$(echo "$analysis" | jq -r '.category // "unknown"' 2>/dev/null) || category="unknown"
|
|
312
|
+
|
|
313
|
+
if [[ -z "$root_cause" || "$root_cause" == "null" ]]; then
|
|
314
|
+
warn "Could not parse analysis response"
|
|
315
|
+
return 1
|
|
316
|
+
fi
|
|
317
|
+
|
|
318
|
+
# Validate category against allowed values
|
|
319
|
+
case "$category" in
|
|
320
|
+
test_failure|build_error|lint_error|timeout|dependency|flaky|config) ;;
|
|
321
|
+
*) category="unknown" ;;
|
|
322
|
+
esac
|
|
323
|
+
|
|
324
|
+
# Update the most recent failure entry with root_cause, fix, category
|
|
325
|
+
local tmp_file
|
|
326
|
+
tmp_file=$(mktemp)
|
|
327
|
+
jq --arg rc "$root_cause" \
|
|
328
|
+
--arg fix "$fix" \
|
|
329
|
+
--arg cat "$category" \
|
|
330
|
+
'.failures[-1].root_cause = $rc | .failures[-1].fix = $fix | .failures[-1].category = $cat' \
|
|
331
|
+
"$failures_file" > "$tmp_file" && mv "$tmp_file" "$failures_file"
|
|
332
|
+
|
|
333
|
+
emit_event "memory.analyze" "stage=${stage}" "category=${category}"
|
|
334
|
+
|
|
335
|
+
success "Failure analyzed: ${PURPLE}[${category}]${RESET} ${root_cause}"
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
# memory_capture_pattern <pattern_type> <pattern_data_json>
|
|
339
|
+
# Records codebase patterns (project type, framework, conventions).
|
|
340
|
+
memory_capture_pattern() {
|
|
341
|
+
local pattern_type="${1:-}"
|
|
342
|
+
local pattern_data="${2:-}"
|
|
343
|
+
|
|
344
|
+
ensure_memory_dir
|
|
345
|
+
local mem_dir
|
|
346
|
+
mem_dir="$(repo_memory_dir)"
|
|
347
|
+
local patterns_file="$mem_dir/patterns.json"
|
|
348
|
+
|
|
349
|
+
local repo
|
|
350
|
+
repo="$(repo_name)"
|
|
351
|
+
local captured_at
|
|
352
|
+
captured_at="$(now_iso)"
|
|
353
|
+
|
|
354
|
+
local tmp_file
|
|
355
|
+
tmp_file=$(mktemp)
|
|
356
|
+
|
|
357
|
+
case "$pattern_type" in
|
|
358
|
+
project)
|
|
359
|
+
# Detect project attributes
|
|
360
|
+
local proj_type="unknown" framework="" test_runner="" pkg_mgr="" language=""
|
|
361
|
+
|
|
362
|
+
if [[ -f "package.json" ]]; then
|
|
363
|
+
proj_type="node"
|
|
364
|
+
pkg_mgr="npm"
|
|
365
|
+
[[ -f "pnpm-lock.yaml" ]] && pkg_mgr="pnpm"
|
|
366
|
+
[[ -f "yarn.lock" ]] && pkg_mgr="yarn"
|
|
367
|
+
[[ -f "bun.lockb" ]] && pkg_mgr="bun"
|
|
368
|
+
|
|
369
|
+
framework=$(jq -r '
|
|
370
|
+
if .dependencies.next then "next"
|
|
371
|
+
elif .dependencies.express then "express"
|
|
372
|
+
elif .dependencies.fastify then "fastify"
|
|
373
|
+
elif .dependencies.react then "react"
|
|
374
|
+
elif .dependencies.vue then "vue"
|
|
375
|
+
elif .dependencies.svelte then "svelte"
|
|
376
|
+
else ""
|
|
377
|
+
end' package.json 2>/dev/null || echo "")
|
|
378
|
+
|
|
379
|
+
test_runner=$(jq -r '
|
|
380
|
+
if .devDependencies.jest then "jest"
|
|
381
|
+
elif .devDependencies.vitest then "vitest"
|
|
382
|
+
elif .devDependencies.mocha then "mocha"
|
|
383
|
+
else ""
|
|
384
|
+
end' package.json 2>/dev/null || echo "")
|
|
385
|
+
|
|
386
|
+
[[ -f "tsconfig.json" ]] && language="typescript" || language="javascript"
|
|
387
|
+
elif [[ -f "requirements.txt" || -f "pyproject.toml" || -f "setup.py" ]]; then
|
|
388
|
+
proj_type="python"
|
|
389
|
+
language="python"
|
|
390
|
+
[[ -f "pyproject.toml" ]] && pkg_mgr="poetry" || pkg_mgr="pip"
|
|
391
|
+
test_runner="pytest"
|
|
392
|
+
elif [[ -f "go.mod" ]]; then
|
|
393
|
+
proj_type="go"
|
|
394
|
+
language="go"
|
|
395
|
+
test_runner="go test"
|
|
396
|
+
elif [[ -f "Cargo.toml" ]]; then
|
|
397
|
+
proj_type="rust"
|
|
398
|
+
language="rust"
|
|
399
|
+
test_runner="cargo test"
|
|
400
|
+
pkg_mgr="cargo"
|
|
401
|
+
fi
|
|
402
|
+
|
|
403
|
+
local source_dir=""
|
|
404
|
+
[[ -d "src" ]] && source_dir="src/"
|
|
405
|
+
[[ -d "lib" ]] && source_dir="lib/"
|
|
406
|
+
[[ -d "app" ]] && source_dir="app/"
|
|
407
|
+
|
|
408
|
+
local test_pattern=""
|
|
409
|
+
if [[ -n "$(find . -maxdepth 3 -name '*.test.ts' 2>/dev/null | head -1)" ]]; then
|
|
410
|
+
test_pattern="*.test.ts"
|
|
411
|
+
elif [[ -n "$(find . -maxdepth 3 -name '*.test.js' 2>/dev/null | head -1)" ]]; then
|
|
412
|
+
test_pattern="*.test.js"
|
|
413
|
+
elif [[ -n "$(find . -maxdepth 3 -name '*_test.go' 2>/dev/null | head -1)" ]]; then
|
|
414
|
+
test_pattern="*_test.go"
|
|
415
|
+
elif [[ -n "$(find . -maxdepth 3 -name 'test_*.py' 2>/dev/null | head -1)" ]]; then
|
|
416
|
+
test_pattern="test_*.py"
|
|
417
|
+
fi
|
|
418
|
+
|
|
419
|
+
local import_style="commonjs"
|
|
420
|
+
if [[ -f "package.json" ]]; then
|
|
421
|
+
local pkg_type
|
|
422
|
+
pkg_type=$(jq -r '.type // "commonjs"' package.json 2>/dev/null || echo "commonjs")
|
|
423
|
+
[[ "$pkg_type" == "module" ]] && import_style="esm"
|
|
424
|
+
fi
|
|
425
|
+
|
|
426
|
+
jq --arg repo "$repo" \
|
|
427
|
+
--arg ts "$captured_at" \
|
|
428
|
+
--arg type "$proj_type" \
|
|
429
|
+
--arg fw "$framework" \
|
|
430
|
+
--arg tr "$test_runner" \
|
|
431
|
+
--arg pm "$pkg_mgr" \
|
|
432
|
+
--arg lang "$language" \
|
|
433
|
+
--arg sd "$source_dir" \
|
|
434
|
+
--arg tp "$test_pattern" \
|
|
435
|
+
--arg is "$import_style" \
|
|
436
|
+
'. + {
|
|
437
|
+
repo: $repo,
|
|
438
|
+
captured_at: $ts,
|
|
439
|
+
project: {
|
|
440
|
+
type: $type,
|
|
441
|
+
framework: $fw,
|
|
442
|
+
test_runner: $tr,
|
|
443
|
+
package_manager: $pm,
|
|
444
|
+
language: $lang
|
|
445
|
+
},
|
|
446
|
+
conventions: {
|
|
447
|
+
source_dir: $sd,
|
|
448
|
+
test_pattern: $tp,
|
|
449
|
+
import_style: $is
|
|
450
|
+
}
|
|
451
|
+
}' "$patterns_file" > "$tmp_file" && mv "$tmp_file" "$patterns_file"
|
|
452
|
+
|
|
453
|
+
emit_event "memory.pattern" "type=project" "proj_type=${proj_type}" "framework=${framework}"
|
|
454
|
+
success "Captured project patterns (${proj_type}/${framework:-none})"
|
|
455
|
+
;;
|
|
456
|
+
|
|
457
|
+
known_issue)
|
|
458
|
+
# pattern_data is the issue description string
|
|
459
|
+
if [[ -n "$pattern_data" ]]; then
|
|
460
|
+
jq --arg issue "$pattern_data" \
|
|
461
|
+
'if .known_issues then
|
|
462
|
+
if (.known_issues | index($issue)) then .
|
|
463
|
+
else .known_issues += [$issue]
|
|
464
|
+
end
|
|
465
|
+
else . + {known_issues: [$issue]}
|
|
466
|
+
end | .known_issues = (.known_issues | .[-50:])' \
|
|
467
|
+
"$patterns_file" > "$tmp_file" && mv "$tmp_file" "$patterns_file"
|
|
468
|
+
emit_event "memory.pattern" "type=known_issue"
|
|
469
|
+
fi
|
|
470
|
+
;;
|
|
471
|
+
|
|
472
|
+
*)
|
|
473
|
+
warn "Unknown pattern type: ${pattern_type}"
|
|
474
|
+
return 1
|
|
475
|
+
;;
|
|
476
|
+
esac
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
# memory_inject_context <stage_id>
|
|
480
|
+
# Returns a text block of relevant memory for a given pipeline stage.
|
|
481
|
+
memory_inject_context() {
|
|
482
|
+
local stage_id="${1:-}"
|
|
483
|
+
|
|
484
|
+
ensure_memory_dir
|
|
485
|
+
local mem_dir
|
|
486
|
+
mem_dir="$(repo_memory_dir)"
|
|
487
|
+
|
|
488
|
+
# Check that we have memory to inject
|
|
489
|
+
local has_memory=false
|
|
490
|
+
for f in "$mem_dir/patterns.json" "$mem_dir/failures.json" "$mem_dir/decisions.json"; do
|
|
491
|
+
if [[ -f "$f" ]] && [[ "$(wc -c < "$f")" -gt 5 ]]; then
|
|
492
|
+
has_memory=true
|
|
493
|
+
break
|
|
494
|
+
fi
|
|
495
|
+
done
|
|
496
|
+
|
|
497
|
+
if [[ "$has_memory" == "false" ]]; then
|
|
498
|
+
echo "# No memory available for this repository yet."
|
|
499
|
+
return 0
|
|
500
|
+
fi
|
|
501
|
+
|
|
502
|
+
echo "# Shipwright Memory Context"
|
|
503
|
+
echo "# Injected at: $(now_iso)"
|
|
504
|
+
echo "# Stage: ${stage_id}"
|
|
505
|
+
echo ""
|
|
506
|
+
|
|
507
|
+
case "$stage_id" in
|
|
508
|
+
plan|design)
|
|
509
|
+
# Past design decisions + codebase patterns
|
|
510
|
+
echo "## Codebase Patterns"
|
|
511
|
+
if [[ -f "$mem_dir/patterns.json" ]]; then
|
|
512
|
+
local proj_type framework lang
|
|
513
|
+
proj_type=$(jq -r '.project.type // "unknown"' "$mem_dir/patterns.json" 2>/dev/null)
|
|
514
|
+
framework=$(jq -r '.project.framework // ""' "$mem_dir/patterns.json" 2>/dev/null)
|
|
515
|
+
lang=$(jq -r '.project.language // ""' "$mem_dir/patterns.json" 2>/dev/null)
|
|
516
|
+
echo "- Project: ${proj_type} / ${framework:-no framework} / ${lang:-unknown}"
|
|
517
|
+
|
|
518
|
+
local src_dir test_pat
|
|
519
|
+
src_dir=$(jq -r '.conventions.source_dir // ""' "$mem_dir/patterns.json" 2>/dev/null)
|
|
520
|
+
test_pat=$(jq -r '.conventions.test_pattern // ""' "$mem_dir/patterns.json" 2>/dev/null)
|
|
521
|
+
[[ -n "$src_dir" ]] && echo "- Source directory: ${src_dir}"
|
|
522
|
+
[[ -n "$test_pat" ]] && echo "- Test file pattern: ${test_pat}"
|
|
523
|
+
fi
|
|
524
|
+
|
|
525
|
+
echo ""
|
|
526
|
+
echo "## Past Design Decisions"
|
|
527
|
+
if [[ -f "$mem_dir/decisions.json" ]]; then
|
|
528
|
+
jq -r '.decisions[-5:][] | "- [\(.type // "decision")] \(.summary // .description // "no description")"' \
|
|
529
|
+
"$mem_dir/decisions.json" 2>/dev/null || echo "- No decisions recorded yet."
|
|
530
|
+
fi
|
|
531
|
+
|
|
532
|
+
echo ""
|
|
533
|
+
echo "## Known Issues"
|
|
534
|
+
if [[ -f "$mem_dir/patterns.json" ]]; then
|
|
535
|
+
jq -r '.known_issues // [] | .[] | "- \(.)"' "$mem_dir/patterns.json" 2>/dev/null || true
|
|
536
|
+
fi
|
|
537
|
+
;;
|
|
538
|
+
|
|
539
|
+
build)
|
|
540
|
+
# Failure patterns to avoid + code conventions
|
|
541
|
+
echo "## Failure Patterns to Avoid"
|
|
542
|
+
if [[ -f "$mem_dir/failures.json" ]]; then
|
|
543
|
+
jq -r '.failures | sort_by(-.seen_count) | .[:10][] |
|
|
544
|
+
"- [\(.stage)] \(.pattern) (seen \(.seen_count)x)" +
|
|
545
|
+
if .fix != "" then "\n Fix: \(.fix)" else "" end' \
|
|
546
|
+
"$mem_dir/failures.json" 2>/dev/null || echo "- No failures recorded."
|
|
547
|
+
fi
|
|
548
|
+
|
|
549
|
+
echo ""
|
|
550
|
+
echo "## Known Fixes"
|
|
551
|
+
if [[ -f "$mem_dir/failures.json" ]]; then
|
|
552
|
+
jq -r '.failures[] | select(.root_cause != "" and .fix != "" and .stage == "build") |
|
|
553
|
+
"- [\(.category // "unknown")] \(.root_cause)\n Fix: \(.fix)"' \
|
|
554
|
+
"$mem_dir/failures.json" 2>/dev/null || echo "- No analyzed fixes yet."
|
|
555
|
+
else
|
|
556
|
+
echo "- No analyzed fixes yet."
|
|
557
|
+
fi
|
|
558
|
+
|
|
559
|
+
echo ""
|
|
560
|
+
echo "## Code Conventions"
|
|
561
|
+
if [[ -f "$mem_dir/patterns.json" ]]; then
|
|
562
|
+
local import_style
|
|
563
|
+
import_style=$(jq -r '.conventions.import_style // ""' "$mem_dir/patterns.json" 2>/dev/null)
|
|
564
|
+
[[ -n "$import_style" ]] && echo "- Import style: ${import_style}"
|
|
565
|
+
local test_runner
|
|
566
|
+
test_runner=$(jq -r '.project.test_runner // ""' "$mem_dir/patterns.json" 2>/dev/null)
|
|
567
|
+
[[ -n "$test_runner" ]] && echo "- Test runner: ${test_runner}"
|
|
568
|
+
fi
|
|
569
|
+
;;
|
|
570
|
+
|
|
571
|
+
test)
|
|
572
|
+
# Known flaky tests + coverage baselines
|
|
573
|
+
echo "## Known Test Failures"
|
|
574
|
+
if [[ -f "$mem_dir/failures.json" ]]; then
|
|
575
|
+
jq -r '.failures[] | select(.stage == "test") |
|
|
576
|
+
"- \(.pattern) (seen \(.seen_count)x)" +
|
|
577
|
+
if .fix != "" then "\n Fix: \(.fix)" else "" end' \
|
|
578
|
+
"$mem_dir/failures.json" 2>/dev/null || echo "- No test failures recorded."
|
|
579
|
+
fi
|
|
580
|
+
|
|
581
|
+
echo ""
|
|
582
|
+
echo "## Known Fixes"
|
|
583
|
+
if [[ -f "$mem_dir/failures.json" ]]; then
|
|
584
|
+
jq -r '.failures[] | select(.root_cause != "" and .fix != "" and .stage == "test") |
|
|
585
|
+
"- [\(.category // "unknown")] \(.root_cause)\n Fix: \(.fix)"' \
|
|
586
|
+
"$mem_dir/failures.json" 2>/dev/null || echo "- No analyzed fixes yet."
|
|
587
|
+
else
|
|
588
|
+
echo "- No analyzed fixes yet."
|
|
589
|
+
fi
|
|
590
|
+
|
|
591
|
+
echo ""
|
|
592
|
+
echo "## Performance Baselines"
|
|
593
|
+
if [[ -f "$mem_dir/metrics.json" ]]; then
|
|
594
|
+
local test_dur coverage
|
|
595
|
+
test_dur=$(jq -r '.baselines.test_duration_s // "not tracked"' "$mem_dir/metrics.json" 2>/dev/null)
|
|
596
|
+
coverage=$(jq -r '.baselines.coverage_pct // "not tracked"' "$mem_dir/metrics.json" 2>/dev/null)
|
|
597
|
+
echo "- Test duration baseline: ${test_dur}s"
|
|
598
|
+
echo "- Coverage baseline: ${coverage}%"
|
|
599
|
+
fi
|
|
600
|
+
;;
|
|
601
|
+
|
|
602
|
+
review|compound_quality)
|
|
603
|
+
# Past review feedback patterns
|
|
604
|
+
echo "## Common Review Feedback"
|
|
605
|
+
if [[ -f "$mem_dir/failures.json" ]]; then
|
|
606
|
+
jq -r '.failures[] | select(.stage == "review") |
|
|
607
|
+
"- \(.pattern)"' \
|
|
608
|
+
"$mem_dir/failures.json" 2>/dev/null || echo "- No review patterns recorded."
|
|
609
|
+
fi
|
|
610
|
+
|
|
611
|
+
echo ""
|
|
612
|
+
echo "## Cross-Repo Learnings"
|
|
613
|
+
if [[ -f "$GLOBAL_MEMORY" ]]; then
|
|
614
|
+
jq -r '.cross_repo_learnings[-5:][] |
|
|
615
|
+
"- [\(.repo)] \(.type): \(.bugs // 0) bugs, \(.warnings // 0) warnings"' \
|
|
616
|
+
"$GLOBAL_MEMORY" 2>/dev/null || true
|
|
617
|
+
fi
|
|
618
|
+
;;
|
|
619
|
+
|
|
620
|
+
*)
|
|
621
|
+
# Generic context for any other stage
|
|
622
|
+
echo "## Repository Patterns"
|
|
623
|
+
if [[ -f "$mem_dir/patterns.json" ]]; then
|
|
624
|
+
jq -r 'to_entries | map(select(.key != "known_issues")) | from_entries' \
|
|
625
|
+
"$mem_dir/patterns.json" 2>/dev/null || true
|
|
626
|
+
fi
|
|
627
|
+
;;
|
|
628
|
+
esac
|
|
629
|
+
|
|
630
|
+
echo ""
|
|
631
|
+
emit_event "memory.inject" "stage=${stage_id}"
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
# memory_update_metrics <metric_name> <value>
|
|
635
|
+
# Track performance baselines and flag regressions.
|
|
636
|
+
memory_update_metrics() {
|
|
637
|
+
local metric_name="${1:-}"
|
|
638
|
+
local value="${2:-}"
|
|
639
|
+
|
|
640
|
+
[[ -z "$metric_name" || -z "$value" ]] && return 1
|
|
641
|
+
|
|
642
|
+
ensure_memory_dir
|
|
643
|
+
local mem_dir
|
|
644
|
+
mem_dir="$(repo_memory_dir)"
|
|
645
|
+
local metrics_file="$mem_dir/metrics.json"
|
|
646
|
+
|
|
647
|
+
# Read previous baseline
|
|
648
|
+
local previous
|
|
649
|
+
previous=$(jq -r --arg m "$metric_name" '.baselines[$m] // 0' "$metrics_file" 2>/dev/null || echo "0")
|
|
650
|
+
|
|
651
|
+
# Check for regression (>20% increase for duration metrics)
|
|
652
|
+
if [[ "$previous" != "0" && "$previous" != "null" ]]; then
|
|
653
|
+
local threshold
|
|
654
|
+
threshold=$(echo "$previous" | awk '{printf "%.0f", $1 * 1.2}')
|
|
655
|
+
if [[ "${metric_name}" == *"duration"* || "${metric_name}" == *"time"* ]]; then
|
|
656
|
+
if [[ "$(echo "$value $threshold" | awk '{print ($1 > $2)}')" == "1" ]]; then
|
|
657
|
+
warn "Regression detected: ${metric_name} increased from ${previous} to ${value} (>20%)"
|
|
658
|
+
emit_event "memory.regression" "metric=${metric_name}" "previous=${previous}" "current=${value}"
|
|
659
|
+
fi
|
|
660
|
+
fi
|
|
661
|
+
fi
|
|
662
|
+
|
|
663
|
+
# Update baseline using atomic write
|
|
664
|
+
local tmp_file
|
|
665
|
+
tmp_file=$(mktemp)
|
|
666
|
+
jq --arg m "$metric_name" \
|
|
667
|
+
--argjson v "$value" \
|
|
668
|
+
--arg ts "$(now_iso)" \
|
|
669
|
+
'.baselines[$m] = $v | .last_updated = $ts' \
|
|
670
|
+
"$metrics_file" > "$tmp_file" && mv "$tmp_file" "$metrics_file"
|
|
671
|
+
|
|
672
|
+
emit_event "memory.metric" "metric=${metric_name}" "value=${value}"
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
# memory_capture_decision <type> <summary> <detail>
|
|
676
|
+
# Record a design decision / ADR.
|
|
677
|
+
memory_capture_decision() {
|
|
678
|
+
local dec_type="${1:-decision}"
|
|
679
|
+
local summary="${2:-}"
|
|
680
|
+
local detail="${3:-}"
|
|
681
|
+
|
|
682
|
+
ensure_memory_dir
|
|
683
|
+
local mem_dir
|
|
684
|
+
mem_dir="$(repo_memory_dir)"
|
|
685
|
+
local decisions_file="$mem_dir/decisions.json"
|
|
686
|
+
|
|
687
|
+
local tmp_file
|
|
688
|
+
tmp_file=$(mktemp)
|
|
689
|
+
jq --arg type "$dec_type" \
|
|
690
|
+
--arg summary "$summary" \
|
|
691
|
+
--arg detail "$detail" \
|
|
692
|
+
--arg ts "$(now_iso)" \
|
|
693
|
+
'.decisions += [{
|
|
694
|
+
type: $type,
|
|
695
|
+
summary: $summary,
|
|
696
|
+
detail: $detail,
|
|
697
|
+
recorded_at: $ts
|
|
698
|
+
}] | .decisions = (.decisions | .[-100:])' \
|
|
699
|
+
"$decisions_file" > "$tmp_file" && mv "$tmp_file" "$decisions_file"
|
|
700
|
+
|
|
701
|
+
emit_event "memory.decision" "type=${dec_type}" "summary=${summary:0:80}"
|
|
702
|
+
success "Recorded decision: ${summary}"
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
# ─── CLI Display Commands ──────────────────────────────────────────────────
|
|
706
|
+
|
|
707
|
+
memory_show() {
|
|
708
|
+
local show_global=false
|
|
709
|
+
|
|
710
|
+
while [[ $# -gt 0 ]]; do
|
|
711
|
+
case "$1" in
|
|
712
|
+
--global) show_global=true; shift ;;
|
|
713
|
+
*) shift ;;
|
|
714
|
+
esac
|
|
715
|
+
done
|
|
716
|
+
|
|
717
|
+
if [[ "$show_global" == "true" ]]; then
|
|
718
|
+
echo ""
|
|
719
|
+
echo -e "${PURPLE}${BOLD}━━━ Global Memory ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
|
720
|
+
if [[ -f "$GLOBAL_MEMORY" ]]; then
|
|
721
|
+
local learning_count
|
|
722
|
+
learning_count=$(jq '.cross_repo_learnings | length' "$GLOBAL_MEMORY" 2>/dev/null || echo 0)
|
|
723
|
+
echo -e " Cross-repo learnings: ${CYAN}${learning_count}${RESET}"
|
|
724
|
+
echo ""
|
|
725
|
+
if [[ "$learning_count" -gt 0 ]]; then
|
|
726
|
+
jq -r '.cross_repo_learnings[-10:][] |
|
|
727
|
+
" \(.repo) — \(.type) (\(.captured_at // "unknown"))"' \
|
|
728
|
+
"$GLOBAL_MEMORY" 2>/dev/null || true
|
|
729
|
+
fi
|
|
730
|
+
else
|
|
731
|
+
echo -e " ${DIM}No global memory yet.${RESET}"
|
|
732
|
+
fi
|
|
733
|
+
echo -e "${PURPLE}${BOLD}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
|
734
|
+
echo ""
|
|
735
|
+
return 0
|
|
736
|
+
fi
|
|
737
|
+
|
|
738
|
+
ensure_memory_dir
|
|
739
|
+
local mem_dir
|
|
740
|
+
mem_dir="$(repo_memory_dir)"
|
|
741
|
+
local repo
|
|
742
|
+
repo="$(repo_name)"
|
|
743
|
+
|
|
744
|
+
echo ""
|
|
745
|
+
echo -e "${PURPLE}${BOLD}━━━ Memory: ${repo} ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
|
746
|
+
echo ""
|
|
747
|
+
|
|
748
|
+
# Patterns
|
|
749
|
+
echo -e "${BOLD} PROJECT${RESET}"
|
|
750
|
+
if [[ -f "$mem_dir/patterns.json" ]]; then
|
|
751
|
+
local proj_type framework lang pkg_mgr test_runner
|
|
752
|
+
proj_type=$(jq -r '.project.type // "unknown"' "$mem_dir/patterns.json" 2>/dev/null)
|
|
753
|
+
framework=$(jq -r '.project.framework // "-"' "$mem_dir/patterns.json" 2>/dev/null)
|
|
754
|
+
lang=$(jq -r '.project.language // "-"' "$mem_dir/patterns.json" 2>/dev/null)
|
|
755
|
+
pkg_mgr=$(jq -r '.project.package_manager // "-"' "$mem_dir/patterns.json" 2>/dev/null)
|
|
756
|
+
test_runner=$(jq -r '.project.test_runner // "-"' "$mem_dir/patterns.json" 2>/dev/null)
|
|
757
|
+
printf " %-18s %s\n" "Type:" "$proj_type"
|
|
758
|
+
printf " %-18s %s\n" "Framework:" "$framework"
|
|
759
|
+
printf " %-18s %s\n" "Language:" "$lang"
|
|
760
|
+
printf " %-18s %s\n" "Package manager:" "$pkg_mgr"
|
|
761
|
+
printf " %-18s %s\n" "Test runner:" "$test_runner"
|
|
762
|
+
else
|
|
763
|
+
echo -e " ${DIM}No patterns captured yet.${RESET}"
|
|
764
|
+
fi
|
|
765
|
+
echo ""
|
|
766
|
+
|
|
767
|
+
# Failures
|
|
768
|
+
echo -e "${BOLD} FAILURE PATTERNS${RESET}"
|
|
769
|
+
if [[ -f "$mem_dir/failures.json" ]]; then
|
|
770
|
+
local failure_count
|
|
771
|
+
failure_count=$(jq '.failures | length' "$mem_dir/failures.json" 2>/dev/null || echo 0)
|
|
772
|
+
if [[ "$failure_count" -gt 0 ]]; then
|
|
773
|
+
jq -r '.failures | sort_by(-.seen_count) | .[:5][] |
|
|
774
|
+
" [\(.stage)] \(.pattern[:80]) — seen \(.seen_count)x"' \
|
|
775
|
+
"$mem_dir/failures.json" 2>/dev/null || true
|
|
776
|
+
else
|
|
777
|
+
echo -e " ${DIM}No failures recorded.${RESET}"
|
|
778
|
+
fi
|
|
779
|
+
else
|
|
780
|
+
echo -e " ${DIM}No failures recorded.${RESET}"
|
|
781
|
+
fi
|
|
782
|
+
echo ""
|
|
783
|
+
|
|
784
|
+
# Decisions
|
|
785
|
+
echo -e "${BOLD} DECISIONS${RESET}"
|
|
786
|
+
if [[ -f "$mem_dir/decisions.json" ]]; then
|
|
787
|
+
local decision_count
|
|
788
|
+
decision_count=$(jq '.decisions | length' "$mem_dir/decisions.json" 2>/dev/null || echo 0)
|
|
789
|
+
if [[ "$decision_count" -gt 0 ]]; then
|
|
790
|
+
jq -r '.decisions[-5:][] |
|
|
791
|
+
" [\(.type)] \(.summary)"' \
|
|
792
|
+
"$mem_dir/decisions.json" 2>/dev/null || true
|
|
793
|
+
else
|
|
794
|
+
echo -e " ${DIM}No decisions recorded.${RESET}"
|
|
795
|
+
fi
|
|
796
|
+
else
|
|
797
|
+
echo -e " ${DIM}No decisions recorded.${RESET}"
|
|
798
|
+
fi
|
|
799
|
+
echo ""
|
|
800
|
+
|
|
801
|
+
# Metrics
|
|
802
|
+
echo -e "${BOLD} BASELINES${RESET}"
|
|
803
|
+
if [[ -f "$mem_dir/metrics.json" ]]; then
|
|
804
|
+
local baseline_count
|
|
805
|
+
baseline_count=$(jq '.baselines | length' "$mem_dir/metrics.json" 2>/dev/null || echo 0)
|
|
806
|
+
if [[ "$baseline_count" -gt 0 ]]; then
|
|
807
|
+
jq -r '.baselines | to_entries[] | " \(.key): \(.value)"' \
|
|
808
|
+
"$mem_dir/metrics.json" 2>/dev/null || true
|
|
809
|
+
else
|
|
810
|
+
echo -e " ${DIM}No baselines tracked yet.${RESET}"
|
|
811
|
+
fi
|
|
812
|
+
else
|
|
813
|
+
echo -e " ${DIM}No baselines tracked yet.${RESET}"
|
|
814
|
+
fi
|
|
815
|
+
|
|
816
|
+
echo ""
|
|
817
|
+
echo -e "${PURPLE}${BOLD}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
|
818
|
+
echo ""
|
|
819
|
+
}
|
|
820
|
+
|
|
821
|
+
memory_search() {
|
|
822
|
+
local keyword="${1:-}"
|
|
823
|
+
|
|
824
|
+
if [[ -z "$keyword" ]]; then
|
|
825
|
+
error "Usage: shipwright memory search <keyword>"
|
|
826
|
+
return 1
|
|
827
|
+
fi
|
|
828
|
+
|
|
829
|
+
ensure_memory_dir
|
|
830
|
+
local mem_dir
|
|
831
|
+
mem_dir="$(repo_memory_dir)"
|
|
832
|
+
local repo
|
|
833
|
+
repo="$(repo_name)"
|
|
834
|
+
|
|
835
|
+
echo ""
|
|
836
|
+
echo -e "${PURPLE}${BOLD}━━━ Memory Search: \"${keyword}\" ━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
|
837
|
+
echo ""
|
|
838
|
+
|
|
839
|
+
local found=0
|
|
840
|
+
|
|
841
|
+
# Search patterns
|
|
842
|
+
if [[ -f "$mem_dir/patterns.json" ]]; then
|
|
843
|
+
local pattern_matches
|
|
844
|
+
pattern_matches=$(grep -i "$keyword" "$mem_dir/patterns.json" 2>/dev/null || true)
|
|
845
|
+
if [[ -n "$pattern_matches" ]]; then
|
|
846
|
+
echo -e " ${BOLD}Patterns:${RESET}"
|
|
847
|
+
echo "$pattern_matches" | head -5 | sed 's/^/ /'
|
|
848
|
+
echo ""
|
|
849
|
+
found=$((found + 1))
|
|
850
|
+
fi
|
|
851
|
+
fi
|
|
852
|
+
|
|
853
|
+
# Search failures
|
|
854
|
+
if [[ -f "$mem_dir/failures.json" ]]; then
|
|
855
|
+
local failure_matches
|
|
856
|
+
failure_matches=$(jq -r --arg kw "$keyword" \
|
|
857
|
+
'.failures[] | select(.pattern | test($kw; "i")) |
|
|
858
|
+
" [\(.stage)] \(.pattern[:80]) — seen \(.seen_count)x"' \
|
|
859
|
+
"$mem_dir/failures.json" 2>/dev/null || true)
|
|
860
|
+
if [[ -n "$failure_matches" ]]; then
|
|
861
|
+
echo -e " ${BOLD}Failures:${RESET}"
|
|
862
|
+
echo "$failure_matches" | head -5
|
|
863
|
+
echo ""
|
|
864
|
+
found=$((found + 1))
|
|
865
|
+
fi
|
|
866
|
+
fi
|
|
867
|
+
|
|
868
|
+
# Search decisions
|
|
869
|
+
if [[ -f "$mem_dir/decisions.json" ]]; then
|
|
870
|
+
local decision_matches
|
|
871
|
+
decision_matches=$(jq -r --arg kw "$keyword" \
|
|
872
|
+
'.decisions[] | select((.summary // "") | test($kw; "i")) |
|
|
873
|
+
" [\(.type)] \(.summary)"' \
|
|
874
|
+
"$mem_dir/decisions.json" 2>/dev/null || true)
|
|
875
|
+
if [[ -n "$decision_matches" ]]; then
|
|
876
|
+
echo -e " ${BOLD}Decisions:${RESET}"
|
|
877
|
+
echo "$decision_matches" | head -5
|
|
878
|
+
echo ""
|
|
879
|
+
found=$((found + 1))
|
|
880
|
+
fi
|
|
881
|
+
fi
|
|
882
|
+
|
|
883
|
+
# Search global memory
|
|
884
|
+
if [[ -f "$GLOBAL_MEMORY" ]]; then
|
|
885
|
+
local global_matches
|
|
886
|
+
global_matches=$(grep -i "$keyword" "$GLOBAL_MEMORY" 2>/dev/null || true)
|
|
887
|
+
if [[ -n "$global_matches" ]]; then
|
|
888
|
+
echo -e " ${BOLD}Global Memory:${RESET}"
|
|
889
|
+
echo "$global_matches" | head -3 | sed 's/^/ /'
|
|
890
|
+
echo ""
|
|
891
|
+
found=$((found + 1))
|
|
892
|
+
fi
|
|
893
|
+
fi
|
|
894
|
+
|
|
895
|
+
if [[ "$found" -eq 0 ]]; then
|
|
896
|
+
echo -e " ${DIM}No matches found for \"${keyword}\".${RESET}"
|
|
897
|
+
fi
|
|
898
|
+
|
|
899
|
+
echo -e "${PURPLE}${BOLD}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
|
900
|
+
echo ""
|
|
901
|
+
}
|
|
902
|
+
|
|
903
|
+
memory_forget() {
|
|
904
|
+
local forget_all=false
|
|
905
|
+
|
|
906
|
+
while [[ $# -gt 0 ]]; do
|
|
907
|
+
case "$1" in
|
|
908
|
+
--all) forget_all=true; shift ;;
|
|
909
|
+
*) shift ;;
|
|
910
|
+
esac
|
|
911
|
+
done
|
|
912
|
+
|
|
913
|
+
if [[ "$forget_all" == "true" ]]; then
|
|
914
|
+
local mem_dir
|
|
915
|
+
mem_dir="$(repo_memory_dir)"
|
|
916
|
+
if [[ -d "$mem_dir" ]]; then
|
|
917
|
+
rm -rf "$mem_dir"
|
|
918
|
+
success "Cleared all memory for $(repo_name)"
|
|
919
|
+
emit_event "memory.forget" "repo=$(repo_name)" "scope=all"
|
|
920
|
+
else
|
|
921
|
+
warn "No memory found for this repository."
|
|
922
|
+
fi
|
|
923
|
+
else
|
|
924
|
+
error "Usage: shipwright memory forget --all"
|
|
925
|
+
echo -e " ${DIM}Use --all to confirm clearing memory for this repo.${RESET}"
|
|
926
|
+
return 1
|
|
927
|
+
fi
|
|
928
|
+
}
|
|
929
|
+
|
|
930
|
+
memory_export() {
|
|
931
|
+
ensure_memory_dir
|
|
932
|
+
local mem_dir
|
|
933
|
+
mem_dir="$(repo_memory_dir)"
|
|
934
|
+
|
|
935
|
+
# Merge all memory files into a single JSON export
|
|
936
|
+
local export_json
|
|
937
|
+
export_json=$(jq -n \
|
|
938
|
+
--arg repo "$(repo_name)" \
|
|
939
|
+
--arg hash "$(repo_hash)" \
|
|
940
|
+
--arg ts "$(now_iso)" \
|
|
941
|
+
--slurpfile patterns "$mem_dir/patterns.json" \
|
|
942
|
+
--slurpfile failures "$mem_dir/failures.json" \
|
|
943
|
+
--slurpfile decisions "$mem_dir/decisions.json" \
|
|
944
|
+
--slurpfile metrics "$mem_dir/metrics.json" \
|
|
945
|
+
'{
|
|
946
|
+
exported_at: $ts,
|
|
947
|
+
repo: $repo,
|
|
948
|
+
repo_hash: $hash,
|
|
949
|
+
patterns: $patterns[0],
|
|
950
|
+
failures: $failures[0],
|
|
951
|
+
decisions: $decisions[0],
|
|
952
|
+
metrics: $metrics[0]
|
|
953
|
+
}')
|
|
954
|
+
|
|
955
|
+
echo "$export_json"
|
|
956
|
+
emit_event "memory.export" "repo=$(repo_name)"
|
|
957
|
+
}
|
|
958
|
+
|
|
959
|
+
memory_import() {
|
|
960
|
+
local import_file="${1:-}"
|
|
961
|
+
|
|
962
|
+
if [[ -z "$import_file" || ! -f "$import_file" ]]; then
|
|
963
|
+
error "Usage: shipwright memory import <file.json>"
|
|
964
|
+
return 1
|
|
965
|
+
fi
|
|
966
|
+
|
|
967
|
+
# Validate JSON
|
|
968
|
+
if ! jq empty "$import_file" 2>/dev/null; then
|
|
969
|
+
error "Invalid JSON file: $import_file"
|
|
970
|
+
return 1
|
|
971
|
+
fi
|
|
972
|
+
|
|
973
|
+
ensure_memory_dir
|
|
974
|
+
local mem_dir
|
|
975
|
+
mem_dir="$(repo_memory_dir)"
|
|
976
|
+
|
|
977
|
+
# Extract and write each section
|
|
978
|
+
local tmp_file
|
|
979
|
+
tmp_file=$(mktemp)
|
|
980
|
+
|
|
981
|
+
jq '.patterns // {}' "$import_file" > "$tmp_file" && mv "$tmp_file" "$mem_dir/patterns.json"
|
|
982
|
+
jq '.failures // {"failures":[]}' "$import_file" > "$tmp_file" && mv "$tmp_file" "$mem_dir/failures.json"
|
|
983
|
+
jq '.decisions // {"decisions":[]}' "$import_file" > "$tmp_file" && mv "$tmp_file" "$mem_dir/decisions.json"
|
|
984
|
+
jq '.metrics // {"baselines":{}}' "$import_file" > "$tmp_file" && mv "$tmp_file" "$mem_dir/metrics.json"
|
|
985
|
+
|
|
986
|
+
success "Imported memory from ${import_file}"
|
|
987
|
+
emit_event "memory.import" "repo=$(repo_name)" "file=${import_file}"
|
|
988
|
+
}
|
|
989
|
+
|
|
990
|
+
memory_stats() {
|
|
991
|
+
ensure_memory_dir
|
|
992
|
+
local mem_dir
|
|
993
|
+
mem_dir="$(repo_memory_dir)"
|
|
994
|
+
local repo
|
|
995
|
+
repo="$(repo_name)"
|
|
996
|
+
|
|
997
|
+
echo ""
|
|
998
|
+
echo -e "${PURPLE}${BOLD}━━━ Memory Stats: ${repo} ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
|
999
|
+
echo ""
|
|
1000
|
+
|
|
1001
|
+
# Size
|
|
1002
|
+
local total_size=0
|
|
1003
|
+
for f in "$mem_dir"/*.json; do
|
|
1004
|
+
if [[ -f "$f" ]]; then
|
|
1005
|
+
local fsize
|
|
1006
|
+
fsize=$(wc -c < "$f" | tr -d ' ')
|
|
1007
|
+
total_size=$((total_size + fsize))
|
|
1008
|
+
fi
|
|
1009
|
+
done
|
|
1010
|
+
|
|
1011
|
+
local size_human
|
|
1012
|
+
if [[ "$total_size" -ge 1048576 ]]; then
|
|
1013
|
+
size_human="$(echo "$total_size" | awk '{printf "%.1fMB", $1/1048576}')"
|
|
1014
|
+
elif [[ "$total_size" -ge 1024 ]]; then
|
|
1015
|
+
size_human="$(echo "$total_size" | awk '{printf "%.1fKB", $1/1024}')"
|
|
1016
|
+
else
|
|
1017
|
+
size_human="${total_size}B"
|
|
1018
|
+
fi
|
|
1019
|
+
|
|
1020
|
+
echo -e " ${BOLD}Storage${RESET}"
|
|
1021
|
+
printf " %-18s %s\n" "Total size:" "$size_human"
|
|
1022
|
+
printf " %-18s %s\n" "Location:" "$mem_dir"
|
|
1023
|
+
echo ""
|
|
1024
|
+
|
|
1025
|
+
# Counts
|
|
1026
|
+
local failure_count decision_count baseline_count known_issue_count
|
|
1027
|
+
failure_count=$(jq '.failures | length' "$mem_dir/failures.json" 2>/dev/null || echo 0)
|
|
1028
|
+
decision_count=$(jq '.decisions | length' "$mem_dir/decisions.json" 2>/dev/null || echo 0)
|
|
1029
|
+
baseline_count=$(jq '.baselines | length' "$mem_dir/metrics.json" 2>/dev/null || echo 0)
|
|
1030
|
+
known_issue_count=$(jq '.known_issues // [] | length' "$mem_dir/patterns.json" 2>/dev/null || echo 0)
|
|
1031
|
+
|
|
1032
|
+
echo -e " ${BOLD}Contents${RESET}"
|
|
1033
|
+
printf " %-18s %s\n" "Failure patterns:" "$failure_count"
|
|
1034
|
+
printf " %-18s %s\n" "Decisions:" "$decision_count"
|
|
1035
|
+
printf " %-18s %s\n" "Baselines:" "$baseline_count"
|
|
1036
|
+
printf " %-18s %s\n" "Known issues:" "$known_issue_count"
|
|
1037
|
+
echo ""
|
|
1038
|
+
|
|
1039
|
+
# Age — oldest captured_at
|
|
1040
|
+
local captured_at
|
|
1041
|
+
captured_at=$(jq -r '.captured_at // ""' "$mem_dir/patterns.json" 2>/dev/null || echo "")
|
|
1042
|
+
if [[ -n "$captured_at" && "$captured_at" != "null" ]]; then
|
|
1043
|
+
printf " %-18s %s\n" "First captured:" "$captured_at"
|
|
1044
|
+
fi
|
|
1045
|
+
|
|
1046
|
+
# Event-based hit rate
|
|
1047
|
+
local inject_count capture_count
|
|
1048
|
+
if [[ -f "$EVENTS_FILE" ]]; then
|
|
1049
|
+
inject_count=$(grep -c '"memory.inject"' "$EVENTS_FILE" 2>/dev/null || echo 0)
|
|
1050
|
+
capture_count=$(grep -c '"memory.capture"' "$EVENTS_FILE" 2>/dev/null || echo 0)
|
|
1051
|
+
echo ""
|
|
1052
|
+
echo -e " ${BOLD}Usage${RESET}"
|
|
1053
|
+
printf " %-18s %s\n" "Context injections:" "$inject_count"
|
|
1054
|
+
printf " %-18s %s\n" "Pipeline captures:" "$capture_count"
|
|
1055
|
+
fi
|
|
1056
|
+
|
|
1057
|
+
echo ""
|
|
1058
|
+
echo -e "${PURPLE}${BOLD}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${RESET}"
|
|
1059
|
+
echo ""
|
|
1060
|
+
}
|
|
1061
|
+
|
|
1062
|
+
# ─── Help ──────────────────────────────────────────────────────────────────
|
|
1063
|
+
|
|
1064
|
+
show_help() {
|
|
1065
|
+
echo -e "${CYAN}${BOLD}shipwright memory${RESET} ${DIM}v${VERSION}${RESET} — Persistent Learning & Context System"
|
|
1066
|
+
echo ""
|
|
1067
|
+
echo -e "${BOLD}USAGE${RESET}"
|
|
1068
|
+
echo -e " ${CYAN}shipwright memory${RESET} <command> [options]"
|
|
1069
|
+
echo ""
|
|
1070
|
+
echo -e "${BOLD}COMMANDS${RESET}"
|
|
1071
|
+
echo -e " ${CYAN}show${RESET} Display memory for current repo"
|
|
1072
|
+
echo -e " ${CYAN}show${RESET} --global Display cross-repo learnings"
|
|
1073
|
+
echo -e " ${CYAN}search${RESET} <keyword> Search memory for keyword"
|
|
1074
|
+
echo -e " ${CYAN}forget${RESET} --all Clear memory for current repo"
|
|
1075
|
+
echo -e " ${CYAN}export${RESET} Export memory as JSON"
|
|
1076
|
+
echo -e " ${CYAN}import${RESET} <file> Import memory from JSON"
|
|
1077
|
+
echo -e " ${CYAN}stats${RESET} Show memory size, age, hit rate"
|
|
1078
|
+
echo ""
|
|
1079
|
+
echo -e "${BOLD}PIPELINE INTEGRATION${RESET}"
|
|
1080
|
+
echo -e " ${CYAN}capture${RESET} <state> <artifacts> Capture pipeline learnings"
|
|
1081
|
+
echo -e " ${CYAN}inject${RESET} <stage_id> Inject context for a stage"
|
|
1082
|
+
echo -e " ${CYAN}pattern${RESET} <type> [data] Record a codebase pattern"
|
|
1083
|
+
echo -e " ${CYAN}metric${RESET} <name> <value> Update a performance baseline"
|
|
1084
|
+
echo -e " ${CYAN}decision${RESET} <type> <summary> Record a design decision"
|
|
1085
|
+
echo -e " ${CYAN}analyze-failure${RESET} <log> <stage> Analyze failure root cause via AI"
|
|
1086
|
+
echo ""
|
|
1087
|
+
echo -e "${BOLD}EXAMPLES${RESET}"
|
|
1088
|
+
echo -e " ${DIM}shipwright memory show${RESET} # View repo memory"
|
|
1089
|
+
echo -e " ${DIM}shipwright memory show --global${RESET} # View cross-repo learnings"
|
|
1090
|
+
echo -e " ${DIM}shipwright memory search \"auth\"${RESET} # Find auth-related memories"
|
|
1091
|
+
echo -e " ${DIM}shipwright memory export > backup.json${RESET} # Export memory"
|
|
1092
|
+
echo -e " ${DIM}shipwright memory import backup.json${RESET} # Import memory"
|
|
1093
|
+
echo -e " ${DIM}shipwright memory capture .claude/pipeline-state.md .claude/pipeline-artifacts${RESET}"
|
|
1094
|
+
echo -e " ${DIM}shipwright memory inject build${RESET} # Get context for build stage"
|
|
1095
|
+
}
|
|
1096
|
+
|
|
1097
|
+
# ─── Command Router ─────────────────────────────────────────────────────────
|
|
1098
|
+
|
|
1099
|
+
SUBCOMMAND="${1:-help}"
|
|
1100
|
+
shift 2>/dev/null || true
|
|
1101
|
+
|
|
1102
|
+
case "$SUBCOMMAND" in
|
|
1103
|
+
show)
|
|
1104
|
+
memory_show "$@"
|
|
1105
|
+
;;
|
|
1106
|
+
search)
|
|
1107
|
+
memory_search "$@"
|
|
1108
|
+
;;
|
|
1109
|
+
forget)
|
|
1110
|
+
memory_forget "$@"
|
|
1111
|
+
;;
|
|
1112
|
+
export)
|
|
1113
|
+
memory_export
|
|
1114
|
+
;;
|
|
1115
|
+
import)
|
|
1116
|
+
memory_import "$@"
|
|
1117
|
+
;;
|
|
1118
|
+
stats)
|
|
1119
|
+
memory_stats
|
|
1120
|
+
;;
|
|
1121
|
+
capture)
|
|
1122
|
+
memory_capture_pipeline "$@"
|
|
1123
|
+
;;
|
|
1124
|
+
inject)
|
|
1125
|
+
memory_inject_context "$@"
|
|
1126
|
+
;;
|
|
1127
|
+
pattern)
|
|
1128
|
+
memory_capture_pattern "$@"
|
|
1129
|
+
;;
|
|
1130
|
+
metric)
|
|
1131
|
+
memory_update_metrics "$@"
|
|
1132
|
+
;;
|
|
1133
|
+
decision)
|
|
1134
|
+
memory_capture_decision "$@"
|
|
1135
|
+
;;
|
|
1136
|
+
analyze-failure)
|
|
1137
|
+
memory_analyze_failure "$@"
|
|
1138
|
+
;;
|
|
1139
|
+
help|--help|-h)
|
|
1140
|
+
show_help
|
|
1141
|
+
;;
|
|
1142
|
+
*)
|
|
1143
|
+
error "Unknown command: ${SUBCOMMAND}"
|
|
1144
|
+
echo ""
|
|
1145
|
+
show_help
|
|
1146
|
+
exit 1
|
|
1147
|
+
;;
|
|
1148
|
+
esac
|