@ekkos/cli 1.0.33 → 1.0.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/capture/jsonl-rewriter.js +72 -7
- package/dist/commands/dashboard.js +186 -557
- package/dist/commands/init.js +3 -15
- package/dist/commands/run.js +221 -259
- package/dist/commands/setup.js +0 -47
- package/dist/commands/swarm-dashboard.js +4 -13
- package/dist/deploy/instructions.d.ts +2 -5
- package/dist/deploy/instructions.js +8 -11
- package/dist/deploy/settings.js +21 -15
- package/dist/deploy/skills.d.ts +0 -8
- package/dist/deploy/skills.js +0 -26
- package/dist/index.js +2 -2
- package/dist/lib/usage-parser.js +1 -2
- package/dist/utils/platform.d.ts +0 -3
- package/dist/utils/platform.js +1 -4
- package/dist/utils/session-binding.d.ts +1 -1
- package/dist/utils/session-binding.js +2 -3
- package/package.json +4 -2
- package/templates/CLAUDE.md +23 -135
- package/templates/agents/README.md +182 -0
- package/templates/agents/code-reviewer.md +166 -0
- package/templates/agents/debug-detective.md +169 -0
- package/templates/agents/ekkOS_Vercel.md +99 -0
- package/templates/agents/extension-manager.md +229 -0
- package/templates/agents/git-companion.md +185 -0
- package/templates/agents/github-test-agent.md +321 -0
- package/templates/agents/railway-manager.md +179 -0
- package/templates/ekkos-manifest.json +8 -8
- package/templates/hooks/assistant-response.ps1 +160 -256
- package/templates/hooks/assistant-response.sh +66 -130
- package/templates/hooks/hooks.json +0 -6
- package/templates/hooks/lib/contract.sh +31 -43
- package/templates/hooks/lib/count-tokens.cjs +0 -0
- package/templates/hooks/lib/ekkos-reminders.sh +0 -0
- package/templates/hooks/lib/state.sh +1 -53
- package/templates/hooks/session-start.ps1 +391 -91
- package/templates/hooks/session-start.sh +166 -201
- package/templates/hooks/stop.ps1 +341 -202
- package/templates/hooks/stop.sh +948 -275
- package/templates/hooks/user-prompt-submit.ps1 +548 -224
- package/templates/hooks/user-prompt-submit.sh +456 -382
- package/templates/plan-template.md +0 -0
- package/templates/spec-template.md +0 -0
- package/templates/windsurf-hooks/before-submit-prompt.sh +238 -0
- package/templates/windsurf-hooks/hooks.json +2 -9
- package/templates/windsurf-hooks/install.sh +0 -0
- package/templates/windsurf-hooks/lib/contract.sh +0 -2
- package/templates/windsurf-hooks/post-cascade-response.sh +0 -0
- package/templates/windsurf-hooks/pre-user-prompt.sh +0 -0
- package/templates/windsurf-skills/ekkos-memory/SKILL.md +219 -0
- package/README.md +0 -57
package/templates/hooks/stop.sh
CHANGED
|
@@ -1,15 +1,11 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
2
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
3
|
-
# ekkOS_ Hook: Stop -
|
|
4
|
-
# NO jq dependency - uses Node.js for all JSON parsing
|
|
3
|
+
# ekkOS_ Hook: Stop - FULL CONTEXT CAPTURE
|
|
5
4
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
6
|
-
#
|
|
7
|
-
#
|
|
8
|
-
#
|
|
9
|
-
#
|
|
10
|
-
# NO compliance checking - skills handle that
|
|
11
|
-
# NO PatternGuard validation - skills handle that
|
|
12
|
-
# NO verbose output - just capture silently
|
|
5
|
+
# Captures FULL turn content to L2 (episodic memory):
|
|
6
|
+
# - Full user query
|
|
7
|
+
# - Full assistant response (no truncation)
|
|
8
|
+
# - Complete file changes with edit content (old_string → new_string)
|
|
13
9
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
14
10
|
|
|
15
11
|
set +e
|
|
@@ -18,341 +14,1018 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
|
18
14
|
PROJECT_ROOT="$(dirname "$(dirname "$SCRIPT_DIR")")"
|
|
19
15
|
STATE_DIR="$PROJECT_ROOT/.claude/state"
|
|
20
16
|
|
|
21
|
-
mkdir -p "$STATE_DIR" 2>/dev/null
|
|
22
|
-
|
|
23
|
-
# ═══════════════════════════════════════════════════════════════════════════
|
|
24
|
-
# CONFIG PATHS - No jq dependency (v1.2 spec)
|
|
25
|
-
# Session words live in ~/.ekkos/ so they work in ANY project
|
|
26
|
-
# ═══════════════════════════════════════════════════════════════════════════
|
|
27
|
-
EKKOS_CONFIG_DIR="${EKKOS_CONFIG_DIR:-$HOME/.ekkos}"
|
|
28
|
-
SESSION_WORDS_JSON="$EKKOS_CONFIG_DIR/session-words.json"
|
|
29
|
-
SESSION_WORDS_DEFAULT="$EKKOS_CONFIG_DIR/.defaults/session-words.json"
|
|
30
|
-
JSON_PARSE_HELPER="$EKKOS_CONFIG_DIR/.helpers/json-parse.cjs"
|
|
31
|
-
|
|
32
17
|
INPUT=$(cat)
|
|
33
18
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
local path="$2"
|
|
38
|
-
echo "$json" | node -e "
|
|
39
|
-
const data = JSON.parse(require('fs').readFileSync('/dev/stdin', 'utf8') || '{}');
|
|
40
|
-
const path = '$path'.replace(/^\./,'').split('.').filter(Boolean);
|
|
41
|
-
let result = data;
|
|
42
|
-
for (const p of path) {
|
|
43
|
-
if (result === undefined || result === null) { result = undefined; break; }
|
|
44
|
-
result = result[p];
|
|
45
|
-
}
|
|
46
|
-
if (result !== undefined && result !== null) console.log(result);
|
|
47
|
-
" 2>/dev/null || echo ""
|
|
48
|
-
}
|
|
19
|
+
RAW_SESSION_ID=$(echo "$INPUT" | jq -r '.session_id // "unknown"')
|
|
20
|
+
TRANSCRIPT_PATH=$(echo "$INPUT" | jq -r '.transcript_path // ""')
|
|
21
|
+
MODEL_USED=$(echo "$INPUT" | jq -r '.model // "claude-sonnet-4-5"')
|
|
49
22
|
|
|
50
|
-
|
|
51
|
-
[
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
[ -z "$MODEL_USED" ] && MODEL_USED="claude-sonnet-4-5"
|
|
23
|
+
# DEBUG: Log hook input (full INPUT for debugging)
|
|
24
|
+
echo "[ekkOS DEBUG] $(date -u +%H:%M:%S) stop.sh: session=$RAW_SESSION_ID, transcript_path=$TRANSCRIPT_PATH" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
25
|
+
echo "[ekkOS DEBUG] $(date -u +%H:%M:%S) stop.sh: transcript exists=$([ -f "$TRANSCRIPT_PATH" ] && echo 'yes' || echo 'no')" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
26
|
+
echo "[ekkOS DEBUG] INPUT keys: $(echo "$INPUT" | jq -r 'keys | join(",")')" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
55
27
|
|
|
56
28
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
57
|
-
# Session ID
|
|
29
|
+
# Session ID - Try Claude's input first, fallback to state file
|
|
58
30
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
59
31
|
SESSION_ID="$RAW_SESSION_ID"
|
|
60
32
|
|
|
33
|
+
# Fallback: Read from state file if input doesn't have valid session_id
|
|
61
34
|
if [ -z "$SESSION_ID" ] || [ "$SESSION_ID" = "unknown" ] || [ "$SESSION_ID" = "null" ]; then
|
|
62
|
-
|
|
35
|
+
STATE_FILE="$HOME/.claude/state/current-session.json"
|
|
36
|
+
if [ -f "$STATE_FILE" ]; then
|
|
37
|
+
SESSION_ID=$(jq -r '.session_id // ""' "$STATE_FILE" 2>/dev/null || echo "")
|
|
38
|
+
fi
|
|
63
39
|
fi
|
|
64
40
|
|
|
65
|
-
#
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
if [[ "$SESSION_ID" =~ $UUID_REGEX ]]; then
|
|
69
|
-
IS_UUID=true
|
|
41
|
+
# Skip if still no valid session ID
|
|
42
|
+
if [ -z "$SESSION_ID" ] || [ "$SESSION_ID" = "unknown" ] || [ "$SESSION_ID" = "null" ]; then
|
|
43
|
+
exit 0
|
|
70
44
|
fi
|
|
71
45
|
|
|
72
46
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
73
|
-
#
|
|
47
|
+
# Load auth
|
|
74
48
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
SESSION_WORDS_LOADED=false
|
|
49
|
+
EKKOS_CONFIG="$HOME/.ekkos/config.json"
|
|
50
|
+
AUTH_TOKEN=""
|
|
51
|
+
USER_ID=""
|
|
79
52
|
|
|
80
|
-
|
|
81
|
-
|
|
53
|
+
if [ -f "$EKKOS_CONFIG" ]; then
|
|
54
|
+
AUTH_TOKEN=$(jq -r '.hookApiKey // .apiKey // ""' "$EKKOS_CONFIG" 2>/dev/null || echo "")
|
|
55
|
+
USER_ID=$(jq -r '.userId // ""' "$EKKOS_CONFIG" 2>/dev/null || echo "")
|
|
56
|
+
fi
|
|
82
57
|
|
|
83
|
-
|
|
84
|
-
|
|
58
|
+
if [ -z "$AUTH_TOKEN" ] && [ -f "$PROJECT_ROOT/.env.local" ]; then
|
|
59
|
+
AUTH_TOKEN=$(grep -E "^SUPABASE_SECRET_KEY=" "$PROJECT_ROOT/.env.local" | cut -d'=' -f2- | tr -d '"' | tr -d "'" | tr -d '\r')
|
|
60
|
+
fi
|
|
85
61
|
|
|
86
|
-
|
|
87
|
-
ADJECTIVES=("unknown"); NOUNS=("session"); VERBS=("starts")
|
|
88
|
-
return 1
|
|
89
|
-
fi
|
|
62
|
+
[ -z "$AUTH_TOKEN" ] && exit 0
|
|
90
63
|
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
64
|
+
MEMORY_API_URL="https://mcp.ekkos.dev"
|
|
65
|
+
|
|
66
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
67
|
+
# WORD-BASED SESSION NAMES: Convert UUID to memorable 3-word name
|
|
68
|
+
# Format: adj-noun-verb (e.g., "cosmic-penguin-runs")
|
|
69
|
+
# 100 × 100 × 100 = 1,000,000 combinations (vs 10,000 with 2-word)
|
|
70
|
+
# Matches server-side session-names.ts algorithm
|
|
71
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
72
|
+
ADJECTIVES=(
|
|
73
|
+
"cosmic" "turbo" "mega" "hyper" "quantum" "atomic" "stellar" "epic"
|
|
74
|
+
"mighty" "groovy" "zippy" "snappy" "jazzy" "funky" "zesty" "peppy"
|
|
75
|
+
"spicy" "crispy" "fluffy" "sparkly" "chunky" "bouncy" "bubbly" "sassy"
|
|
76
|
+
"slick" "sleek" "bold" "nifty" "perky" "plucky" "witty" "nimble"
|
|
77
|
+
"dapper" "fancy" "quirky" "punchy" "swift" "brave" "clever" "dandy"
|
|
78
|
+
"eager" "fiery" "golden" "hasty" "icy" "jolly" "keen" "lively"
|
|
79
|
+
"merry" "noble" "odd" "plush" "quick" "royal" "silly" "tidy"
|
|
80
|
+
"ultra" "vivid" "wacky" "zany" "alpha" "beta" "cyber" "delta"
|
|
81
|
+
"electric" "foggy" "giga" "hazy" "ionic" "jumpy" "kinky" "lunar"
|
|
82
|
+
"magic" "nerdy" "omega" "pixel" "quaint" "retro" "solar" "techno"
|
|
83
|
+
"unified" "viral" "wonky" "xerox" "yappy" "zen" "agile" "binary"
|
|
84
|
+
"chrome" "disco" "elastic" "fizzy" "glossy" "humble" "itchy" "jiffy"
|
|
85
|
+
"kooky" "loopy" "moody" "noisy"
|
|
86
|
+
)
|
|
87
|
+
NOUNS=(
|
|
88
|
+
"penguin" "panda" "otter" "narwhal" "alpaca" "llama" "badger" "walrus"
|
|
89
|
+
"waffle" "pickle" "noodle" "pretzel" "muffin" "taco" "nugget" "biscuit"
|
|
90
|
+
"rocket" "comet" "nebula" "quasar" "meteor" "photon" "pulsar" "nova"
|
|
91
|
+
"ninja" "pirate" "wizard" "robot" "yeti" "phoenix" "sphinx" "kraken"
|
|
92
|
+
"thunder" "blizzard" "tornado" "avalanche" "mango" "kiwi" "banana" "coconut"
|
|
93
|
+
"donut" "espresso" "falafel" "gyro" "hummus" "icecream" "jambon" "kebab"
|
|
94
|
+
"latte" "mocha" "nachos" "olive" "pasta" "quinoa" "ramen" "sushi"
|
|
95
|
+
"tamale" "udon" "velvet" "wasabi" "xmas" "yogurt" "ziti" "anchor"
|
|
96
|
+
"beacon" "canyon" "drifter" "echo" "falcon" "glacier" "harbor" "island"
|
|
97
|
+
"jetpack" "kayak" "lagoon" "meadow" "nebula" "orbit" "parrot" "quest"
|
|
98
|
+
"rapids" "summit" "tunnel" "umbrella" "volcano" "whisper" "xylophone" "yacht"
|
|
99
|
+
"zephyr" "acorn" "bobcat" "cactus" "dolphin" "eagle" "ferret" "gopher"
|
|
100
|
+
"hedgehog" "iguana" "jackal" "koala"
|
|
101
|
+
)
|
|
102
|
+
VERBS=(
|
|
103
|
+
"runs" "jumps" "flies" "swims" "dives" "soars" "glides" "dashes"
|
|
104
|
+
"zooms" "zips" "spins" "twirls" "bounces" "floats" "drifts" "sails"
|
|
105
|
+
"climbs" "leaps" "hops" "skips" "rolls" "slides" "surfs" "rides"
|
|
106
|
+
"builds" "creates" "forges" "shapes" "crafts" "designs" "codes" "types"
|
|
107
|
+
"thinks" "dreams" "learns" "grows" "blooms" "shines" "glows" "sparks"
|
|
108
|
+
"sings" "hums" "calls" "beeps" "clicks" "taps" "pings" "chimes"
|
|
109
|
+
"wins" "leads" "helps" "saves" "guards" "shields" "heals" "fixes"
|
|
110
|
+
"starts" "begins" "launches" "ignites" "blazes" "flares" "bursts" "pops"
|
|
111
|
+
"waves" "nods" "winks" "grins" "smiles" "laughs" "cheers" "claps"
|
|
112
|
+
"seeks" "finds" "spots" "tracks" "hunts" "chases" "catches" "grabs"
|
|
113
|
+
"pushes" "pulls" "lifts" "throws" "kicks" "punts" "bats" "swings"
|
|
114
|
+
"reads" "writes" "draws" "paints" "sculpts" "carves" "molds" "weaves"
|
|
115
|
+
"cooks" "bakes" "grills" "fries"
|
|
116
|
+
)
|
|
113
117
|
|
|
118
|
+
# Convert UUID to 3-word name deterministically
|
|
114
119
|
uuid_to_words() {
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
120
|
+
local uuid="$1"
|
|
121
|
+
local hex="${uuid//-/}"
|
|
122
|
+
hex="${hex:0:12}"
|
|
123
|
+
|
|
124
|
+
if [[ ! "$hex" =~ ^[0-9a-fA-F]+$ ]]; then
|
|
125
|
+
echo "unknown-session-starts"
|
|
126
|
+
return
|
|
127
|
+
fi
|
|
128
|
+
|
|
129
|
+
local adj_seed=$((16#${hex:0:4}))
|
|
130
|
+
local noun_seed=$((16#${hex:4:4}))
|
|
131
|
+
local verb_seed=$((16#${hex:8:4}))
|
|
132
|
+
|
|
133
|
+
local adj_idx=$((adj_seed % ${#ADJECTIVES[@]}))
|
|
134
|
+
local noun_idx=$((noun_seed % ${#NOUNS[@]}))
|
|
135
|
+
local verb_idx=$((verb_seed % ${#VERBS[@]}))
|
|
136
|
+
|
|
137
|
+
echo "${ADJECTIVES[$adj_idx]}-${NOUNS[$noun_idx]}-${VERBS[$verb_idx]}"
|
|
129
138
|
}
|
|
130
139
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
140
|
+
# Generate session name from UUID
|
|
141
|
+
SESSION_NAME=""
|
|
142
|
+
if [ -n "$SESSION_ID" ] && [ "$SESSION_ID" != "unknown" ] && [ "$SESSION_ID" != "null" ]; then
|
|
143
|
+
SESSION_NAME=$(uuid_to_words "$SESSION_ID")
|
|
135
144
|
fi
|
|
136
145
|
|
|
137
146
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
138
|
-
#
|
|
147
|
+
# Get turn number from local counter
|
|
139
148
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
140
|
-
|
|
141
|
-
|
|
149
|
+
PROJECT_SESSION_DIR="$STATE_DIR/sessions"
|
|
150
|
+
TURN_COUNTER_FILE="$PROJECT_SESSION_DIR/${SESSION_ID}.turn"
|
|
142
151
|
TURN_NUMBER=1
|
|
143
|
-
[ -f "$
|
|
152
|
+
[ -f "$TURN_COUNTER_FILE" ] && TURN_NUMBER=$(cat "$TURN_COUNTER_FILE" 2>/dev/null || echo "1")
|
|
144
153
|
|
|
145
154
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
146
|
-
#
|
|
155
|
+
# AUTO-CLEAR DETECTION (EARLY): Must run BEFORE any early exits
|
|
156
|
+
# If context >= 92%, write flag for ekkos run wrapper immediately
|
|
147
157
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
USER_ID=""
|
|
151
|
-
|
|
152
|
-
if [ -f "$EKKOS_CONFIG" ] && [ -f "$JSON_PARSE_HELPER" ]; then
|
|
153
|
-
AUTH_TOKEN=$(node "$JSON_PARSE_HELPER" "$EKKOS_CONFIG" '.hookApiKey' 2>/dev/null || echo "")
|
|
154
|
-
[ -z "$AUTH_TOKEN" ] && AUTH_TOKEN=$(node "$JSON_PARSE_HELPER" "$EKKOS_CONFIG" '.apiKey' 2>/dev/null || echo "")
|
|
155
|
-
USER_ID=$(node "$JSON_PARSE_HELPER" "$EKKOS_CONFIG" '.userId' 2>/dev/null || echo "")
|
|
156
|
-
fi
|
|
157
|
-
|
|
158
|
-
if [ -z "$AUTH_TOKEN" ] && [ -f "$PROJECT_ROOT/.env.local" ]; then
|
|
159
|
-
AUTH_TOKEN=$(grep -E "^SUPABASE_SECRET_KEY=" "$PROJECT_ROOT/.env.local" | cut -d'=' -f2- | tr -d '"' | tr -d "'" | tr -d '\r')
|
|
160
|
-
fi
|
|
158
|
+
if [ -n "$TRANSCRIPT_PATH" ] && [ -f "$TRANSCRIPT_PATH" ]; then
|
|
159
|
+
MAX_TOKENS=200000
|
|
161
160
|
|
|
162
|
-
|
|
161
|
+
# Calculate context percentage
|
|
162
|
+
if stat -f%z "$TRANSCRIPT_PATH" >/dev/null 2>&1; then
|
|
163
|
+
FILE_SIZE=$(stat -f%z "$TRANSCRIPT_PATH")
|
|
164
|
+
else
|
|
165
|
+
FILE_SIZE=$(stat -c%s "$TRANSCRIPT_PATH" 2>/dev/null || echo "0")
|
|
166
|
+
fi
|
|
167
|
+
ROUGH_TOKENS=$((FILE_SIZE / 4))
|
|
168
|
+
TOKEN_PERCENT=$((ROUGH_TOKENS * 100 / MAX_TOKENS))
|
|
163
169
|
|
|
164
|
-
|
|
170
|
+
# More accurate in high-context scenarios
|
|
171
|
+
if [ "$TOKEN_PERCENT" -gt 50 ]; then
|
|
172
|
+
WORD_COUNT=$(wc -w < "$TRANSCRIPT_PATH" 2>/dev/null | tr -d ' ' || echo "0")
|
|
173
|
+
TOKEN_PERCENT=$((WORD_COUNT * 13 / 10 * 100 / MAX_TOKENS))
|
|
174
|
+
fi
|
|
165
175
|
|
|
166
|
-
#
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
PENDING_SESSION_FOR_BIND="${EKKOS_PENDING_SESSION:-_pending}"
|
|
174
|
-
curl -s -X POST "$MEMORY_API_URL/proxy/session/bind" \
|
|
175
|
-
-H "Content-Type: application/json" \
|
|
176
|
-
-d "{\"userId\":\"$USER_ID\",\"realSession\":\"$SESSION_NAME\",\"projectPath\":\"$PROJECT_PATH_FOR_BIND\",\"pendingSession\":\"$PENDING_SESSION_FOR_BIND\"}" \
|
|
177
|
-
--connect-timeout 1 \
|
|
178
|
-
--max-time 2 >/dev/null 2>&1 &
|
|
176
|
+
# If context >= 92%, write flag file for ekkos run wrapper
|
|
177
|
+
if [ "$TOKEN_PERCENT" -ge 92 ]; then
|
|
178
|
+
AUTO_CLEAR_FLAG="$HOME/.ekkos/auto-clear.flag"
|
|
179
|
+
TIMESTAMP_EPOCH=$(date +%s)
|
|
180
|
+
echo "${TOKEN_PERCENT}:${SESSION_NAME}:${TIMESTAMP_EPOCH}" > "$AUTO_CLEAR_FLAG"
|
|
181
|
+
echo "[ekkOS] Context at ${TOKEN_PERCENT}% - auto-clear flag written (session: $SESSION_NAME)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
182
|
+
fi
|
|
179
183
|
fi
|
|
180
184
|
|
|
181
185
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
182
|
-
#
|
|
183
|
-
# No hook-side eviction needed — passthrough is default for cache stability.
|
|
186
|
+
# Check for interruption - skip capture if request was interrupted
|
|
184
187
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
188
|
+
IS_INTERRUPTED=$(echo "$INPUT" | jq -r '.interrupted // false' 2>/dev/null || echo "false")
|
|
189
|
+
STOP_REASON=$(echo "$INPUT" | jq -r '.stop_reason // ""' 2>/dev/null || echo "")
|
|
185
190
|
|
|
186
|
-
#
|
|
187
|
-
# Check for interruption - No jq
|
|
188
|
-
# ═══════════════════════════════════════════════════════════════════════════
|
|
189
|
-
IS_INTERRUPTED=$(parse_json_value "$INPUT" '.interrupted')
|
|
190
|
-
[ -z "$IS_INTERRUPTED" ] && IS_INTERRUPTED="false"
|
|
191
|
-
STOP_REASON=$(parse_json_value "$INPUT" '.stop_reason')
|
|
192
|
-
|
|
191
|
+
# Skip capture for interrupted/cancelled requests
|
|
193
192
|
if [ "$IS_INTERRUPTED" = "true" ] || [ "$STOP_REASON" = "user_cancelled" ] || [ "$STOP_REASON" = "interrupted" ]; then
|
|
194
193
|
exit 0
|
|
195
194
|
fi
|
|
196
195
|
|
|
197
196
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
198
|
-
# Extract conversation from transcript
|
|
197
|
+
# Extract conversation from transcript
|
|
199
198
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
200
199
|
LAST_USER=""
|
|
201
200
|
LAST_ASSISTANT=""
|
|
202
201
|
FILE_CHANGES="[]"
|
|
203
202
|
|
|
204
203
|
if [ -n "$TRANSCRIPT_PATH" ] && [ -f "$TRANSCRIPT_PATH" ]; then
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
} else if (Array.isArray(content)) {
|
|
218
|
-
const textPart = content.find(c => c.type === 'text' && !c.text?.startsWith('<'));
|
|
219
|
-
if (textPart) { lastUser = textPart.text; lastUserTime = e.timestamp || ''; break; }
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
}
|
|
204
|
+
# Extract user messages:
|
|
205
|
+
# Content can be STRING or ARRAY of {type: "text", text: "..."} objects
|
|
206
|
+
# Filter out system prefixes (<) and tool_results
|
|
207
|
+
LAST_USER=$(cat "$TRANSCRIPT_PATH" | jq -r '
|
|
208
|
+
select(.type == "user")
|
|
209
|
+
| .message.content
|
|
210
|
+
| if type == "string" then
|
|
211
|
+
if startswith("<") then empty else . end
|
|
212
|
+
elif type == "array" then
|
|
213
|
+
.[] | select(.type == "text") | .text | select(startswith("<") | not)
|
|
214
|
+
else empty end
|
|
215
|
+
' 2>/dev/null | tail -1 || echo "")
|
|
223
216
|
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
const e = entries[i];
|
|
227
|
-
if (e.type === 'assistant' && (!lastUserTime || e.timestamp >= lastUserTime)) {
|
|
228
|
-
const content = e.message?.content;
|
|
229
|
-
if (typeof content === 'string') { lastAssistant = content; break; }
|
|
230
|
-
else if (Array.isArray(content)) {
|
|
231
|
-
const parts = content.map(c => {
|
|
232
|
-
if (c.type === 'text') return c.text;
|
|
233
|
-
if (c.type === 'tool_use') return '[TOOL: ' + c.name + ']';
|
|
234
|
-
if (c.type === 'thinking') return '[THINKING]' + (c.thinking || c.text || '') + '[/THINKING]';
|
|
235
|
-
return '';
|
|
236
|
-
}).filter(Boolean);
|
|
237
|
-
lastAssistant = parts.join('\n'); break;
|
|
238
|
-
}
|
|
239
|
-
}
|
|
240
|
-
}
|
|
241
|
-
|
|
242
|
-
const fileChanges = [];
|
|
243
|
-
entries.filter(e => e.type === 'assistant').forEach(e => {
|
|
244
|
-
const content = e.message?.content;
|
|
245
|
-
if (Array.isArray(content)) {
|
|
246
|
-
content.filter(c => c.type === 'tool_use' && ['Edit', 'Write', 'Read'].includes(c.name)).forEach(c => {
|
|
247
|
-
fileChanges.push({tool: c.name, path: c.input?.file_path || c.input?.path, action: c.name.toLowerCase()});
|
|
248
|
-
});
|
|
249
|
-
}
|
|
250
|
-
});
|
|
251
|
-
|
|
252
|
-
console.log(JSON.stringify({
|
|
253
|
-
user: lastUser,
|
|
254
|
-
assistant: lastAssistant.substring(0, 50000),
|
|
255
|
-
fileChanges: fileChanges.slice(0, 20)
|
|
256
|
-
}));
|
|
257
|
-
" 2>/dev/null || echo '{"user":"","assistant":"","fileChanges":[]}')
|
|
258
|
-
|
|
259
|
-
LAST_USER=$(echo "$EXTRACTION" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.user||'')" 2>/dev/null || echo "")
|
|
260
|
-
LAST_ASSISTANT=$(echo "$EXTRACTION" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.assistant||'')" 2>/dev/null || echo "")
|
|
261
|
-
FILE_CHANGES=$(echo "$EXTRACTION" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(JSON.stringify(d.fileChanges||[]))" 2>/dev/null || echo "[]")
|
|
262
|
-
fi
|
|
217
|
+
# DEBUG: Show what we extracted
|
|
218
|
+
echo "[ekkOS DEBUG] LAST_USER length=${#LAST_USER}, first 50: '${LAST_USER:0:50}'" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
263
219
|
|
|
264
|
-
if
|
|
220
|
+
# Log if empty (but don't exit - we still want to try extracting assistant response)
|
|
221
|
+
if [ -z "$LAST_USER" ]; then
|
|
222
|
+
echo "[ekkOS] Turn $TURN_NUMBER: LAST_USER empty, will try to get assistant response anyway (session: $SESSION_NAME)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
223
|
+
echo "[ekkOS DEBUG] Transcript line count: $(wc -l < "$TRANSCRIPT_PATH" 2>/dev/null || echo 0)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
224
|
+
# Don't exit - continue to extract assistant response for local cache
|
|
225
|
+
fi
|
|
226
|
+
if [[ "$LAST_USER" == *"[Request interrupted"* ]] || \
|
|
227
|
+
[[ "$LAST_USER" == *"interrupted by user"* ]]; then
|
|
228
|
+
echo "[ekkOS] Turn $TURN_NUMBER skipped: interruption marker (session: $SESSION_NAME)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
265
229
|
exit 0
|
|
230
|
+
fi
|
|
231
|
+
|
|
232
|
+
# Get timestamp of last valid user message (handles both string and array content)
|
|
233
|
+
LAST_USER_TIME=$(cat "$TRANSCRIPT_PATH" | jq -r '
|
|
234
|
+
select(.type == "user")
|
|
235
|
+
| select(
|
|
236
|
+
(.message.content | type == "string" and (startswith("<") | not)) or
|
|
237
|
+
(.message.content | type == "array" and any(.[]; .type == "text" and (.text | startswith("<") | not)))
|
|
238
|
+
)
|
|
239
|
+
| .timestamp
|
|
240
|
+
' 2>/dev/null | tail -1 || echo "")
|
|
241
|
+
|
|
242
|
+
if [ -n "$LAST_USER_TIME" ]; then
|
|
243
|
+
# Get assistant response after user message - FULL CONTENT including tool calls
|
|
244
|
+
# Captures: text blocks, tool_use (with name + input), and extended_thinking
|
|
245
|
+
LAST_ASSISTANT=$(cat "$TRANSCRIPT_PATH" | jq -rs --arg time "$LAST_USER_TIME" '
|
|
246
|
+
[.[] | select(.type == "assistant" and .timestamp > $time)] | last |
|
|
247
|
+
.message.content |
|
|
248
|
+
if type == "string" then .
|
|
249
|
+
elif type == "array" then
|
|
250
|
+
[.[] |
|
|
251
|
+
if .type == "text" then .text
|
|
252
|
+
elif .type == "tool_use" then
|
|
253
|
+
"\n[TOOL: " + .name + "]\n" +
|
|
254
|
+
(if .name == "Bash" then "$ " + (.input.command // "") + "\n"
|
|
255
|
+
elif .name == "Read" then "Reading: " + (.input.file_path // "") + "\n"
|
|
256
|
+
elif .name == "Write" then "Writing: " + (.input.file_path // "") + "\n"
|
|
257
|
+
elif .name == "Edit" then "Editing: " + (.input.file_path // "") + "\n"
|
|
258
|
+
elif .name == "Grep" then "Searching: " + (.input.pattern // "") + "\n"
|
|
259
|
+
elif .name == "Glob" then "Finding: " + (.input.pattern // "") + "\n"
|
|
260
|
+
elif .name == "WebFetch" then "Fetching: " + (.input.url // "") + "\n"
|
|
261
|
+
elif .name == "Task" then "Agent: " + (.input.subagent_type // "") + " - " + (.input.description // "") + "\n"
|
|
262
|
+
else (.input | tostring | .[0:500]) + "\n"
|
|
263
|
+
end)
|
|
264
|
+
elif .type == "thinking" then "\n[THINKING]\n" + (.thinking // .text // "") + "\n[/THINKING]\n"
|
|
265
|
+
else empty
|
|
266
|
+
end
|
|
267
|
+
] | join("")
|
|
268
|
+
else empty end
|
|
269
|
+
' 2>/dev/null || echo "")
|
|
270
|
+
|
|
271
|
+
# Also capture tool_results that follow this assistant message
|
|
272
|
+
TOOL_RESULTS=$(cat "$TRANSCRIPT_PATH" | jq -rs --arg time "$LAST_USER_TIME" '
|
|
273
|
+
[.[] | select(.timestamp > $time)] |
|
|
274
|
+
# Get tool results between last assistant and next user message
|
|
275
|
+
[.[] | select(.type == "tool_result" or (.type == "user" and (.message.content | type == "array") and (.message.content | any(.type == "tool_result"))))] |
|
|
276
|
+
.[0:10] | # Limit to first 10 tool results
|
|
277
|
+
[.[] |
|
|
278
|
+
if .type == "tool_result" then
|
|
279
|
+
"\n[RESULT: " + (.tool_use_id // "unknown")[0:8] + "]\n" +
|
|
280
|
+
(if (.content | type == "string") then (.content | .[0:2000])
|
|
281
|
+
elif (.content | type == "array") then ([.content[] | select(.type == "text") | .text] | join("\n") | .[0:2000])
|
|
282
|
+
else ""
|
|
283
|
+
end) + "\n"
|
|
284
|
+
elif .type == "user" then
|
|
285
|
+
([.message.content[] | select(.type == "tool_result") |
|
|
286
|
+
"\n[RESULT: " + (.tool_use_id // "unknown")[0:8] + "]\n" +
|
|
287
|
+
(if (.content | type == "string") then (.content | .[0:2000])
|
|
288
|
+
elif (.content | type == "array") then ([.content[] | select(.type == "text") | .text] | join("\n") | .[0:2000])
|
|
289
|
+
else ""
|
|
290
|
+
end) + "\n"
|
|
291
|
+
] | join(""))
|
|
292
|
+
else ""
|
|
293
|
+
end
|
|
294
|
+
] | join("")
|
|
295
|
+
' 2>/dev/null || echo "")
|
|
296
|
+
|
|
297
|
+
# Combine assistant response with tool results
|
|
298
|
+
if [ -n "$TOOL_RESULTS" ]; then
|
|
299
|
+
LAST_ASSISTANT="${LAST_ASSISTANT}${TOOL_RESULTS}"
|
|
300
|
+
fi
|
|
301
|
+
fi
|
|
302
|
+
|
|
303
|
+
# Fallback: get last assistant message if timestamp method fails
|
|
304
|
+
if [ -z "$LAST_ASSISTANT" ]; then
|
|
305
|
+
LAST_ASSISTANT=$(cat "$TRANSCRIPT_PATH" | jq -rs '
|
|
306
|
+
[.[] | select(.type == "assistant")] | last |
|
|
307
|
+
.message.content |
|
|
308
|
+
if type == "string" then .
|
|
309
|
+
elif type == "array" then
|
|
310
|
+
[.[] |
|
|
311
|
+
if .type == "text" then .text
|
|
312
|
+
elif .type == "tool_use" then
|
|
313
|
+
"\n[TOOL: " + .name + "]\n" +
|
|
314
|
+
(if .name == "Bash" then "$ " + (.input.command // "") + "\n"
|
|
315
|
+
elif .name == "Read" then "Reading: " + (.input.file_path // "") + "\n"
|
|
316
|
+
elif .name == "Write" then "Writing: " + (.input.file_path // "") + "\n"
|
|
317
|
+
elif .name == "Edit" then "Editing: " + (.input.file_path // "") + "\n"
|
|
318
|
+
else (.input | tostring | .[0:500]) + "\n"
|
|
319
|
+
end)
|
|
320
|
+
elif .type == "thinking" then "\n[THINKING]\n" + (.thinking // .text // "") + "\n[/THINKING]\n"
|
|
321
|
+
else empty
|
|
322
|
+
end
|
|
323
|
+
] | join("")
|
|
324
|
+
else empty end
|
|
325
|
+
' 2>/dev/null || echo "")
|
|
326
|
+
fi
|
|
327
|
+
|
|
328
|
+
# Extract file changes WITH FULL EDIT CONTENT for perfect context restoration
|
|
329
|
+
# Includes old_string/new_string for edits, content for writes
|
|
330
|
+
FILE_CHANGES=$(cat "$TRANSCRIPT_PATH" | jq -s '
|
|
331
|
+
[.[] | select(.type == "assistant") | .message.content[]? | select(.type == "tool_use") |
|
|
332
|
+
select(.name == "Edit" or .name == "Write" or .name == "Read") |
|
|
333
|
+
{
|
|
334
|
+
tool: .name,
|
|
335
|
+
path: (.input.file_path // .input.path),
|
|
336
|
+
action: (if .name == "Edit" then "edit" elif .name == "Write" then "write" else "read" end),
|
|
337
|
+
# Full edit details for context restoration
|
|
338
|
+
old_string: (if .name == "Edit" then (.input.old_string // null) else null end),
|
|
339
|
+
new_string: (if .name == "Edit" then (.input.new_string // null) else null end),
|
|
340
|
+
# Write content (truncated to 2000 chars to avoid massive payloads)
|
|
341
|
+
content: (if .name == "Write" then (.input.content[:2000] // null) else null end),
|
|
342
|
+
replace_all: (if .name == "Edit" then (.input.replace_all // false) else null end)
|
|
343
|
+
}
|
|
344
|
+
] | map(select(.path != null))
|
|
345
|
+
' 2>/dev/null || echo "[]")
|
|
266
346
|
fi
|
|
267
347
|
|
|
268
348
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
269
|
-
# Capture to
|
|
349
|
+
# Capture to L2 (episodic memory) - SYNCHRONOUS for reliability
|
|
350
|
+
# Background was causing missed captures when Claude Code exits fast
|
|
270
351
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
352
|
+
if [ -z "$LAST_ASSISTANT" ]; then
|
|
353
|
+
echo "[ekkOS] Turn $TURN_NUMBER skipped: LAST_ASSISTANT empty (session: $SESSION_NAME)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
354
|
+
fi
|
|
355
|
+
|
|
271
356
|
if [ -n "$LAST_USER" ] && [ -n "$LAST_ASSISTANT" ]; then
|
|
272
|
-
(
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
TOOLS_USED=$(echo "$FILE_CHANGES" | node -e "
|
|
276
|
-
const d = JSON.parse(require('fs').readFileSync('/dev/stdin','utf8') || '[]');
|
|
277
|
-
console.log(JSON.stringify([...new Set(d.map(f => f.tool).filter(Boolean))]));
|
|
278
|
-
" 2>/dev/null || echo "[]")
|
|
279
|
-
|
|
280
|
-
FILES_REF=$(echo "$FILE_CHANGES" | node -e "
|
|
281
|
-
const d = JSON.parse(require('fs').readFileSync('/dev/stdin','utf8') || '[]');
|
|
282
|
-
console.log(JSON.stringify([...new Set(d.map(f => f.path).filter(Boolean))]));
|
|
283
|
-
" 2>/dev/null || echo "[]")
|
|
284
|
-
|
|
285
|
-
# Token breakdown from tokenizer script
|
|
286
|
-
TOTAL_TOKENS=0
|
|
287
|
-
INPUT_TOKENS=0
|
|
288
|
-
CACHE_READ_TOKENS=0
|
|
289
|
-
CACHE_CREATION_TOKENS=0
|
|
290
|
-
TOKENIZER_SCRIPT="$SCRIPT_DIR/lib/count-tokens.cjs"
|
|
291
|
-
if [ -n "$TRANSCRIPT_PATH" ] && [ -f "$TRANSCRIPT_PATH" ] && [ -f "$TOKENIZER_SCRIPT" ]; then
|
|
292
|
-
TOKEN_JSON=$(node "$TOKENIZER_SCRIPT" "$TRANSCRIPT_PATH" --json 2>/dev/null || echo '{}')
|
|
293
|
-
TOTAL_TOKENS=$(echo "$TOKEN_JSON" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8')||'{}');console.log(d.total_tokens||0)" 2>/dev/null || echo "0")
|
|
294
|
-
INPUT_TOKENS=$(echo "$TOKEN_JSON" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8')||'{}');console.log(d.input_tokens||0)" 2>/dev/null || echo "0")
|
|
295
|
-
CACHE_READ_TOKENS=$(echo "$TOKEN_JSON" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8')||'{}');console.log(d.cache_read_tokens||0)" 2>/dev/null || echo "0")
|
|
296
|
-
CACHE_CREATION_TOKENS=$(echo "$TOKEN_JSON" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8')||'{}');console.log(d.cache_creation_tokens||0)" 2>/dev/null || echo "0")
|
|
297
|
-
[[ ! "$TOTAL_TOKENS" =~ ^[0-9]+$ ]] && TOTAL_TOKENS=0
|
|
298
|
-
fi
|
|
357
|
+
PAYLOAD_FILE=$(mktemp /tmp/ekkos-capture.XXXXXX.json)
|
|
358
|
+
TIMESTAMP=$(date -u +%Y-%m-%dT%H:%M:%SZ)
|
|
299
359
|
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
360
|
+
jq -n \
|
|
361
|
+
--arg user_query "$LAST_USER" \
|
|
362
|
+
--arg assistant_response "$LAST_ASSISTANT" \
|
|
363
|
+
--arg session_id "$SESSION_ID" \
|
|
364
|
+
--arg user_id "${USER_ID:-system}" \
|
|
365
|
+
--arg model_used "$MODEL_USED" \
|
|
366
|
+
--arg captured_at "$TIMESTAMP" \
|
|
367
|
+
--argjson file_changes "${FILE_CHANGES:-[]}" \
|
|
368
|
+
'{
|
|
369
|
+
user_query: $user_query,
|
|
370
|
+
assistant_response: $assistant_response,
|
|
371
|
+
session_id: $session_id,
|
|
372
|
+
user_id: $user_id,
|
|
373
|
+
file_changes: $file_changes,
|
|
374
|
+
metadata: {
|
|
375
|
+
source: "claude-code",
|
|
376
|
+
model_used: $model_used,
|
|
377
|
+
captured_at: $captured_at,
|
|
378
|
+
file_changes: $file_changes,
|
|
379
|
+
minimal_hook: true
|
|
380
|
+
}
|
|
381
|
+
}' > "$PAYLOAD_FILE" 2>/dev/null
|
|
382
|
+
|
|
383
|
+
if jq empty "$PAYLOAD_FILE" 2>/dev/null; then
|
|
384
|
+
# Retry with backoff for L2 episodic capture
|
|
385
|
+
for RETRY in 1 2 3; do
|
|
386
|
+
CAPTURE_RESULT=$(curl -s -w "\n%{http_code}" -X POST "$MEMORY_API_URL/api/v1/memory/capture" \
|
|
321
387
|
-H "Authorization: Bearer $AUTH_TOKEN" \
|
|
322
388
|
-H "Content-Type: application/json" \
|
|
323
|
-
-d "
|
|
389
|
+
-d "@$PAYLOAD_FILE" \
|
|
324
390
|
--connect-timeout 3 \
|
|
325
|
-
--max-time 5 >/dev/null
|
|
391
|
+
--max-time 5 2>/dev/null || echo -e "\n000")
|
|
392
|
+
|
|
393
|
+
HTTP_CODE=$(echo "$CAPTURE_RESULT" | tail -1)
|
|
394
|
+
|
|
395
|
+
if [ "$HTTP_CODE" = "200" ] || [ "$HTTP_CODE" = "201" ]; then
|
|
396
|
+
break
|
|
397
|
+
fi
|
|
398
|
+
[ $RETRY -lt 3 ] && sleep 0.5
|
|
399
|
+
done
|
|
400
|
+
|
|
401
|
+
if [ "$HTTP_CODE" != "200" ] && [ "$HTTP_CODE" != "201" ]; then
|
|
402
|
+
echo "[ekkOS] L2 capture failed after 3 attempts: HTTP $HTTP_CODE" >&2
|
|
403
|
+
mkdir -p "$HOME/.ekkos/wal" 2>/dev/null
|
|
404
|
+
cp "$PAYLOAD_FILE" "$HOME/.ekkos/wal/l2-$(date +%s)-$$.json" 2>/dev/null
|
|
326
405
|
fi
|
|
406
|
+
fi
|
|
327
407
|
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
408
|
+
rm -f "$PAYLOAD_FILE" 2>/dev/null
|
|
409
|
+
fi
|
|
410
|
+
|
|
411
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
412
|
+
# REDIS WORKING MEMORY: Store verbatim turn in multi-session hot cache
|
|
413
|
+
# 5 sessions × 20 turns = 100 turns total for instant context restoration
|
|
414
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
415
|
+
if [ -n "$LAST_USER" ] && [ -n "$LAST_ASSISTANT" ] && [ -n "$SESSION_NAME" ]; then
|
|
416
|
+
REDIS_PAYLOAD_FILE=$(mktemp /tmp/ekkos-redis.XXXXXX.json)
|
|
417
|
+
|
|
418
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
419
|
+
# SECRET SCRUBBING: Detect and store secrets, replace with references
|
|
420
|
+
# Patterns: API keys, tokens, passwords → stored in L11 Secrets vault
|
|
421
|
+
# Source: GitHub secret scanning patterns + community lists
|
|
422
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
423
|
+
store_secret() {
|
|
424
|
+
local service="$1"
|
|
425
|
+
local secret="$2"
|
|
426
|
+
local type="$3"
|
|
427
|
+
curl -s -X POST "$MEMORY_API_URL/api/v1/secrets" \
|
|
428
|
+
-H "Authorization: Bearer $AUTH_TOKEN" \
|
|
429
|
+
-H "Content-Type: application/json" \
|
|
430
|
+
-d "{\"service\":\"$service\",\"value\":\"$secret\",\"type\":\"$type\"}" \
|
|
431
|
+
--connect-timeout 1 --max-time 2 >/dev/null 2>&1 &
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
scrub_secrets() {
|
|
435
|
+
local text="$1"
|
|
436
|
+
local scrubbed="$text"
|
|
437
|
+
|
|
438
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
439
|
+
# OpenAI (sk-..., sk-proj-...)
|
|
440
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
441
|
+
while [[ "$scrubbed" =~ (sk-proj-[a-zA-Z0-9_-]{20,}) ]]; do
|
|
442
|
+
local secret="${BASH_REMATCH[1]}"
|
|
443
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
444
|
+
store_secret "openai_proj_$hash" "$secret" "api_key"
|
|
445
|
+
scrubbed="${scrubbed//$secret/[SECRET:openai_proj_$hash:api_key]}"
|
|
446
|
+
done
|
|
447
|
+
while [[ "$scrubbed" =~ (sk-[a-zA-Z0-9]{20,}) ]]; do
|
|
448
|
+
local secret="${BASH_REMATCH[1]}"
|
|
449
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
450
|
+
store_secret "openai_$hash" "$secret" "api_key"
|
|
451
|
+
scrubbed="${scrubbed//$secret/[SECRET:openai_$hash:api_key]}"
|
|
452
|
+
done
|
|
453
|
+
|
|
454
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
455
|
+
# Anthropic (sk-ant-...)
|
|
456
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
457
|
+
while [[ "$scrubbed" =~ (sk-ant-[a-zA-Z0-9_-]{20,}) ]]; do
|
|
458
|
+
local secret="${BASH_REMATCH[1]}"
|
|
459
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
460
|
+
store_secret "anthropic_$hash" "$secret" "api_key"
|
|
461
|
+
scrubbed="${scrubbed//$secret/[SECRET:anthropic_$hash:api_key]}"
|
|
462
|
+
done
|
|
346
463
|
|
|
347
|
-
|
|
348
|
-
|
|
464
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
465
|
+
# GitHub (ghp_, gho_, ghu_, ghs_, ghr_)
|
|
466
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
467
|
+
while [[ "$scrubbed" =~ (ghp_[a-zA-Z0-9]{36}) ]]; do
|
|
468
|
+
local secret="${BASH_REMATCH[1]}"
|
|
469
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
470
|
+
store_secret "github_pat_$hash" "$secret" "token"
|
|
471
|
+
scrubbed="${scrubbed//$secret/[SECRET:github_pat_$hash:token]}"
|
|
472
|
+
done
|
|
473
|
+
while [[ "$scrubbed" =~ (gho_[a-zA-Z0-9]{36}) ]]; do
|
|
474
|
+
local secret="${BASH_REMATCH[1]}"
|
|
475
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
476
|
+
store_secret "github_oauth_$hash" "$secret" "token"
|
|
477
|
+
scrubbed="${scrubbed//$secret/[SECRET:github_oauth_$hash:token]}"
|
|
478
|
+
done
|
|
479
|
+
while [[ "$scrubbed" =~ (ghu_[a-zA-Z0-9]{36}) ]]; do
|
|
480
|
+
local secret="${BASH_REMATCH[1]}"
|
|
481
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
482
|
+
store_secret "github_user_$hash" "$secret" "token"
|
|
483
|
+
scrubbed="${scrubbed//$secret/[SECRET:github_user_$hash:token]}"
|
|
484
|
+
done
|
|
485
|
+
while [[ "$scrubbed" =~ (ghs_[a-zA-Z0-9]{36}) ]]; do
|
|
486
|
+
local secret="${BASH_REMATCH[1]}"
|
|
487
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
488
|
+
store_secret "github_app_$hash" "$secret" "token"
|
|
489
|
+
scrubbed="${scrubbed//$secret/[SECRET:github_app_$hash:token]}"
|
|
490
|
+
done
|
|
491
|
+
while [[ "$scrubbed" =~ (ghr_[a-zA-Z0-9]{36}) ]]; do
|
|
492
|
+
local secret="${BASH_REMATCH[1]}"
|
|
493
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
494
|
+
store_secret "github_refresh_$hash" "$secret" "token"
|
|
495
|
+
scrubbed="${scrubbed//$secret/[SECRET:github_refresh_$hash:token]}"
|
|
496
|
+
done
|
|
497
|
+
|
|
498
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
499
|
+
# GitLab (glpat-...)
|
|
500
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
501
|
+
while [[ "$scrubbed" =~ (glpat-[a-zA-Z0-9_-]{20,}) ]]; do
|
|
502
|
+
local secret="${BASH_REMATCH[1]}"
|
|
503
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
504
|
+
store_secret "gitlab_$hash" "$secret" "token"
|
|
505
|
+
scrubbed="${scrubbed//$secret/[SECRET:gitlab_$hash:token]}"
|
|
506
|
+
done
|
|
507
|
+
|
|
508
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
509
|
+
# AWS (AKIA...)
|
|
510
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
511
|
+
while [[ "$scrubbed" =~ (AKIA[A-Z0-9]{16}) ]]; do
|
|
512
|
+
local secret="${BASH_REMATCH[1]}"
|
|
513
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
514
|
+
store_secret "aws_$hash" "$secret" "api_key"
|
|
515
|
+
scrubbed="${scrubbed//$secret/[SECRET:aws_$hash:api_key]}"
|
|
516
|
+
done
|
|
517
|
+
|
|
518
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
519
|
+
# Stripe (sk_live_, sk_test_, pk_live_, pk_test_)
|
|
520
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
521
|
+
while [[ "$scrubbed" =~ (sk_live_[a-zA-Z0-9]{24,}) ]]; do
|
|
522
|
+
local secret="${BASH_REMATCH[1]}"
|
|
523
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
524
|
+
store_secret "stripe_live_$hash" "$secret" "api_key"
|
|
525
|
+
scrubbed="${scrubbed//$secret/[SECRET:stripe_live_$hash:api_key]}"
|
|
526
|
+
done
|
|
527
|
+
while [[ "$scrubbed" =~ (sk_test_[a-zA-Z0-9]{24,}) ]]; do
|
|
528
|
+
local secret="${BASH_REMATCH[1]}"
|
|
529
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
530
|
+
store_secret "stripe_test_$hash" "$secret" "api_key"
|
|
531
|
+
scrubbed="${scrubbed//$secret/[SECRET:stripe_test_$hash:api_key]}"
|
|
532
|
+
done
|
|
533
|
+
while [[ "$scrubbed" =~ (rk_live_[a-zA-Z0-9]{24,}) ]]; do
|
|
534
|
+
local secret="${BASH_REMATCH[1]}"
|
|
535
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
536
|
+
store_secret "stripe_restricted_$hash" "$secret" "api_key"
|
|
537
|
+
scrubbed="${scrubbed//$secret/[SECRET:stripe_restricted_$hash:api_key]}"
|
|
538
|
+
done
|
|
539
|
+
|
|
540
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
541
|
+
# Slack (xoxb-, xoxp-, xoxa-, xoxs-)
|
|
542
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
543
|
+
while [[ "$scrubbed" =~ (xoxb-[0-9a-zA-Z-]{24,}) ]]; do
|
|
544
|
+
local secret="${BASH_REMATCH[1]}"
|
|
545
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
546
|
+
store_secret "slack_bot_$hash" "$secret" "token"
|
|
547
|
+
scrubbed="${scrubbed//$secret/[SECRET:slack_bot_$hash:token]}"
|
|
548
|
+
done
|
|
549
|
+
while [[ "$scrubbed" =~ (xoxp-[0-9a-zA-Z-]{24,}) ]]; do
|
|
550
|
+
local secret="${BASH_REMATCH[1]}"
|
|
551
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
552
|
+
store_secret "slack_user_$hash" "$secret" "token"
|
|
553
|
+
scrubbed="${scrubbed//$secret/[SECRET:slack_user_$hash:token]}"
|
|
554
|
+
done
|
|
555
|
+
|
|
556
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
557
|
+
# Google (AIza...)
|
|
558
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
559
|
+
while [[ "$scrubbed" =~ (AIza[0-9A-Za-z_-]{35}) ]]; do
|
|
560
|
+
local secret="${BASH_REMATCH[1]}"
|
|
561
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
562
|
+
store_secret "google_$hash" "$secret" "api_key"
|
|
563
|
+
scrubbed="${scrubbed//$secret/[SECRET:google_$hash:api_key]}"
|
|
564
|
+
done
|
|
565
|
+
|
|
566
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
567
|
+
# Twilio (SK...)
|
|
568
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
569
|
+
while [[ "$scrubbed" =~ (SK[0-9a-fA-F]{32}) ]]; do
|
|
570
|
+
local secret="${BASH_REMATCH[1]}"
|
|
571
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
572
|
+
store_secret "twilio_$hash" "$secret" "api_key"
|
|
573
|
+
scrubbed="${scrubbed//$secret/[SECRET:twilio_$hash:api_key]}"
|
|
574
|
+
done
|
|
575
|
+
|
|
576
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
577
|
+
# SendGrid (SG....)
|
|
578
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
579
|
+
while [[ "$scrubbed" =~ (SG\.[a-zA-Z0-9_-]{22}\.[a-zA-Z0-9_-]{43}) ]]; do
|
|
580
|
+
local secret="${BASH_REMATCH[1]}"
|
|
581
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
582
|
+
store_secret "sendgrid_$hash" "$secret" "api_key"
|
|
583
|
+
scrubbed="${scrubbed//$secret/[SECRET:sendgrid_$hash:api_key]}"
|
|
584
|
+
done
|
|
585
|
+
|
|
586
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
587
|
+
# Mailgun (key-...)
|
|
588
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
589
|
+
while [[ "$scrubbed" =~ (key-[0-9a-zA-Z]{32}) ]]; do
|
|
590
|
+
local secret="${BASH_REMATCH[1]}"
|
|
591
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
592
|
+
store_secret "mailgun_$hash" "$secret" "api_key"
|
|
593
|
+
scrubbed="${scrubbed//$secret/[SECRET:mailgun_$hash:api_key]}"
|
|
594
|
+
done
|
|
595
|
+
|
|
596
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
597
|
+
# DigitalOcean (dop_v1_...)
|
|
598
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
599
|
+
while [[ "$scrubbed" =~ (dop_v1_[a-z0-9]{64}) ]]; do
|
|
600
|
+
local secret="${BASH_REMATCH[1]}"
|
|
601
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
602
|
+
store_secret "digitalocean_$hash" "$secret" "token"
|
|
603
|
+
scrubbed="${scrubbed//$secret/[SECRET:digitalocean_$hash:token]}"
|
|
604
|
+
done
|
|
605
|
+
|
|
606
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
607
|
+
# Shopify (shpat_...)
|
|
608
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
609
|
+
while [[ "$scrubbed" =~ (shpat_[0-9a-fA-F]{32}) ]]; do
|
|
610
|
+
local secret="${BASH_REMATCH[1]}"
|
|
611
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
612
|
+
store_secret "shopify_$hash" "$secret" "token"
|
|
613
|
+
scrubbed="${scrubbed//$secret/[SECRET:shopify_$hash:token]}"
|
|
614
|
+
done
|
|
615
|
+
|
|
616
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
617
|
+
# npm (npm_...)
|
|
618
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
619
|
+
while [[ "$scrubbed" =~ (npm_[a-zA-Z0-9]{36}) ]]; do
|
|
620
|
+
local secret="${BASH_REMATCH[1]}"
|
|
621
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
622
|
+
store_secret "npm_$hash" "$secret" "token"
|
|
623
|
+
scrubbed="${scrubbed//$secret/[SECRET:npm_$hash:token]}"
|
|
624
|
+
done
|
|
625
|
+
|
|
626
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
627
|
+
# PyPI (pypi-...)
|
|
628
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
629
|
+
while [[ "$scrubbed" =~ (pypi-[A-Za-z0-9_-]{50,}) ]]; do
|
|
630
|
+
local secret="${BASH_REMATCH[1]}"
|
|
631
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
632
|
+
store_secret "pypi_$hash" "$secret" "token"
|
|
633
|
+
scrubbed="${scrubbed//$secret/[SECRET:pypi_$hash:token]}"
|
|
634
|
+
done
|
|
635
|
+
|
|
636
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
637
|
+
# Supabase (sbp_...)
|
|
638
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
639
|
+
while [[ "$scrubbed" =~ (sbp_[a-zA-Z0-9]{40,}) ]]; do
|
|
640
|
+
local secret="${BASH_REMATCH[1]}"
|
|
641
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
642
|
+
store_secret "supabase_$hash" "$secret" "api_key"
|
|
643
|
+
scrubbed="${scrubbed//$secret/[SECRET:supabase_$hash:api_key]}"
|
|
644
|
+
done
|
|
645
|
+
|
|
646
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
647
|
+
# Discord Bot Token
|
|
648
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
649
|
+
while [[ "$scrubbed" =~ ([MN][A-Za-z0-9]{23,}\.[A-Za-z0-9_-]{6}\.[A-Za-z0-9_-]{27}) ]]; do
|
|
650
|
+
local secret="${BASH_REMATCH[1]}"
|
|
651
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
652
|
+
store_secret "discord_$hash" "$secret" "token"
|
|
653
|
+
scrubbed="${scrubbed//$secret/[SECRET:discord_$hash:token]}"
|
|
654
|
+
done
|
|
655
|
+
|
|
656
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
657
|
+
# Vercel (vercel_...)
|
|
658
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
659
|
+
while [[ "$scrubbed" =~ (vercel_[a-zA-Z0-9]{24,}) ]]; do
|
|
660
|
+
local secret="${BASH_REMATCH[1]}"
|
|
661
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
662
|
+
store_secret "vercel_$hash" "$secret" "token"
|
|
663
|
+
scrubbed="${scrubbed//$secret/[SECRET:vercel_$hash:token]}"
|
|
664
|
+
done
|
|
665
|
+
|
|
666
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
667
|
+
# Heroku (heroku_...)
|
|
668
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
669
|
+
while [[ "$scrubbed" =~ (heroku_[a-zA-Z0-9_-]{30,}) ]]; do
|
|
670
|
+
local secret="${BASH_REMATCH[1]}"
|
|
671
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
672
|
+
store_secret "heroku_$hash" "$secret" "api_key"
|
|
673
|
+
scrubbed="${scrubbed//$secret/[SECRET:heroku_$hash:api_key]}"
|
|
674
|
+
done
|
|
675
|
+
|
|
676
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
677
|
+
# Datadog (dd...)
|
|
678
|
+
# ─────────────────────────────────────────────────────────────────────────
|
|
679
|
+
while [[ "$scrubbed" =~ (ddapi_[a-zA-Z0-9]{32,}) ]]; do
|
|
680
|
+
local secret="${BASH_REMATCH[1]}"
|
|
681
|
+
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
682
|
+
store_secret "datadog_$hash" "$secret" "api_key"
|
|
683
|
+
scrubbed="${scrubbed//$secret/[SECRET:datadog_$hash:api_key]}"
|
|
684
|
+
done
|
|
685
|
+
|
|
686
|
+
echo "$scrubbed"
|
|
687
|
+
}
|
|
688
|
+
|
|
689
|
+
# Scrub user query and assistant response
|
|
690
|
+
SCRUBBED_USER=$(scrub_secrets "$LAST_USER")
|
|
691
|
+
SCRUBBED_ASSISTANT=$(scrub_secrets "$LAST_ASSISTANT")
|
|
692
|
+
|
|
693
|
+
# Extract tools used from assistant response (simple grep for tool names)
|
|
694
|
+
TOOLS_USED=$(echo "$SCRUBBED_ASSISTANT" | grep -oE '\[TOOL: [^\]]+\]' | sed 's/\[TOOL: //g; s/\]//g' | sort -u | jq -R -s -c 'split("\n") | map(select(. != ""))')
|
|
695
|
+
[ -z "$TOOLS_USED" ] && TOOLS_USED="[]"
|
|
696
|
+
|
|
697
|
+
# Extract files referenced from file changes
|
|
698
|
+
FILES_REFERENCED=$(echo "$FILE_CHANGES" | jq -c '[.[].path] | unique // []' 2>/dev/null || echo "[]")
|
|
699
|
+
|
|
700
|
+
# Build edits array from file changes (write and edit actions only)
|
|
701
|
+
EDITS=$(echo "$FILE_CHANGES" | jq -c '[.[] | select(.action == "edit" or .action == "write") | {file_path: .path, action: .action, diff: (if .old_string then ("old: " + (.old_string | .[0:200]) + "\nnew: " + (.new_string | .[0:200])) else (.content | .[0:500]) end)}]' 2>/dev/null || echo "[]")
|
|
702
|
+
|
|
703
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
704
|
+
# ACCURATE TOKEN TRACKING: Extract REAL token counts from Anthropic API response
|
|
705
|
+
# This gives us exact context usage instead of rough estimation
|
|
706
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
707
|
+
TOTAL_CONTEXT_TOKENS=0
|
|
708
|
+
INPUT_TOKENS=0
|
|
709
|
+
OUTPUT_TOKENS=0
|
|
710
|
+
|
|
711
|
+
if [ -n "$TRANSCRIPT_PATH" ] && [ -f "$TRANSCRIPT_PATH" ]; then
|
|
712
|
+
# Get the last assistant message with usage data (macOS compatible)
|
|
713
|
+
# tac doesn't exist on macOS, use grep | tail instead
|
|
714
|
+
LAST_USAGE=$(grep '"usage"' "$TRANSCRIPT_PATH" 2>/dev/null | tail -1)
|
|
715
|
+
|
|
716
|
+
if [ -n "$LAST_USAGE" ]; then
|
|
717
|
+
# Extract token counts from Anthropic API usage object
|
|
718
|
+
INPUT_TOKENS=$(echo "$LAST_USAGE" | jq -r '
|
|
719
|
+
(.message.usage.input_tokens // 0) +
|
|
720
|
+
(.message.usage.cache_creation_input_tokens // 0) +
|
|
721
|
+
(.message.usage.cache_read_input_tokens // 0)
|
|
722
|
+
' 2>/dev/null || echo "0")
|
|
723
|
+
|
|
724
|
+
OUTPUT_TOKENS=$(echo "$LAST_USAGE" | jq -r '.message.usage.output_tokens // 0' 2>/dev/null || echo "0")
|
|
725
|
+
|
|
726
|
+
# Total context = input + output
|
|
727
|
+
TOTAL_CONTEXT_TOKENS=$((INPUT_TOKENS + OUTPUT_TOKENS))
|
|
728
|
+
fi
|
|
729
|
+
fi
|
|
730
|
+
|
|
731
|
+
jq -n \
|
|
732
|
+
--arg session_name "$SESSION_NAME" \
|
|
733
|
+
--argjson turn_number "$TURN_NUMBER" \
|
|
734
|
+
--arg user_query "$SCRUBBED_USER" \
|
|
735
|
+
--arg agent_response "$SCRUBBED_ASSISTANT" \
|
|
736
|
+
--arg model "$MODEL_USED" \
|
|
737
|
+
--argjson tools_used "$TOOLS_USED" \
|
|
738
|
+
--argjson files_referenced "$FILES_REFERENCED" \
|
|
739
|
+
--argjson edits "$EDITS" \
|
|
740
|
+
--argjson total_context_tokens "$TOTAL_CONTEXT_TOKENS" \
|
|
741
|
+
--argjson input_tokens "$INPUT_TOKENS" \
|
|
742
|
+
--argjson output_tokens "$OUTPUT_TOKENS" \
|
|
743
|
+
'{
|
|
744
|
+
session_name: $session_name,
|
|
745
|
+
turn_number: $turn_number,
|
|
746
|
+
user_query: $user_query,
|
|
747
|
+
agent_response: $agent_response,
|
|
748
|
+
model: $model,
|
|
749
|
+
tools_used: $tools_used,
|
|
750
|
+
files_referenced: $files_referenced,
|
|
751
|
+
edits: $edits,
|
|
752
|
+
patterns_used: [],
|
|
753
|
+
total_context_tokens: $total_context_tokens,
|
|
754
|
+
input_tokens: $input_tokens,
|
|
755
|
+
output_tokens: $output_tokens
|
|
756
|
+
}' > "$REDIS_PAYLOAD_FILE" 2>/dev/null
|
|
757
|
+
|
|
758
|
+
if jq empty "$REDIS_PAYLOAD_FILE" 2>/dev/null; then
|
|
759
|
+
# Retry with backoff for Redis working memory (critical for /continue)
|
|
760
|
+
MAX_RETRIES=3
|
|
761
|
+
RETRY=0
|
|
762
|
+
REDIS_SUCCESS=false
|
|
763
|
+
|
|
764
|
+
while [ $RETRY -lt $MAX_RETRIES ] && [ "$REDIS_SUCCESS" = "false" ]; do
|
|
765
|
+
REDIS_RESULT=$(curl -s -w "\n%{http_code}" -X POST "$MEMORY_API_URL/api/v1/working/turn" \
|
|
349
766
|
-H "Authorization: Bearer $AUTH_TOKEN" \
|
|
350
767
|
-H "Content-Type: application/json" \
|
|
351
|
-
-d "
|
|
768
|
+
-d "@$REDIS_PAYLOAD_FILE" \
|
|
352
769
|
--connect-timeout 3 \
|
|
353
|
-
--max-time 5 >/dev/null
|
|
770
|
+
--max-time 5 2>/dev/null || echo -e "\n000")
|
|
771
|
+
|
|
772
|
+
REDIS_HTTP_CODE=$(echo "$REDIS_RESULT" | tail -1)
|
|
773
|
+
|
|
774
|
+
if [ "$REDIS_HTTP_CODE" = "200" ] || [ "$REDIS_HTTP_CODE" = "201" ]; then
|
|
775
|
+
REDIS_SUCCESS=true
|
|
776
|
+
else
|
|
777
|
+
RETRY=$((RETRY + 1))
|
|
778
|
+
[ $RETRY -lt $MAX_RETRIES ] && sleep 0.3
|
|
779
|
+
fi
|
|
780
|
+
done
|
|
781
|
+
|
|
782
|
+
# Log final failure with context
|
|
783
|
+
if [ "$REDIS_SUCCESS" = "false" ]; then
|
|
784
|
+
echo "[ekkOS] Redis capture failed after $MAX_RETRIES attempts: HTTP $REDIS_HTTP_CODE (session: $SESSION_NAME, turn: $TURN_NUMBER)" >&2
|
|
785
|
+
# Write-ahead log for recovery
|
|
786
|
+
WAL_DIR="$HOME/.ekkos/wal"
|
|
787
|
+
mkdir -p "$WAL_DIR" 2>/dev/null
|
|
788
|
+
cp "$REDIS_PAYLOAD_FILE" "$WAL_DIR/redis-$(date +%s)-$$.json" 2>/dev/null
|
|
789
|
+
else
|
|
790
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
791
|
+
# 🎯 ACK: Update local cache ACK cursor after successful Redis flush
|
|
792
|
+
# This enables safe pruning of turns that are backed up to Redis
|
|
793
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
794
|
+
if command -v ekkos-capture &>/dev/null && [ -n "$SESSION_ID" ]; then
|
|
795
|
+
(ekkos-capture ack "$SESSION_ID" "$TURN_NUMBER" >/dev/null 2>&1) &
|
|
796
|
+
fi
|
|
797
|
+
fi
|
|
798
|
+
fi
|
|
799
|
+
|
|
800
|
+
rm -f "$REDIS_PAYLOAD_FILE" 2>/dev/null
|
|
801
|
+
|
|
802
|
+
# ═════════════════════════════════════════════════════════════════════════
|
|
803
|
+
# ⚡ FAST CAPTURE: Structured context for instant /continue (parallel)
|
|
804
|
+
# Lightweight extraction - no LLM, pure parsing for ~1-2k token restoration
|
|
805
|
+
# ═════════════════════════════════════════════════════════════════════════
|
|
806
|
+
|
|
807
|
+
# Extract user intent patterns (no LLM needed)
|
|
808
|
+
USER_DECISION=""
|
|
809
|
+
USER_CORRECTION=""
|
|
810
|
+
USER_PREFERENCE=""
|
|
811
|
+
|
|
812
|
+
# Decision patterns: yes/no/ok/go ahead/use X instead
|
|
813
|
+
USER_DECISION=$(echo "$SCRUBBED_USER" | grep -oiE "^(yes|no|ok|do it|go ahead|approved|confirmed|use .{1,30} instead)" | head -1 || echo "")
|
|
814
|
+
|
|
815
|
+
# Correction patterns
|
|
816
|
+
USER_CORRECTION=$(echo "$SCRUBBED_USER" | grep -oiE "(actually|no,? I meant|not that|wrong|instead)" | head -1 || echo "")
|
|
817
|
+
|
|
818
|
+
# Preference patterns
|
|
819
|
+
USER_PREFERENCE=$(echo "$SCRUBBED_USER" | grep -oiE "(always|never|I prefer|don.t|avoid) .{1,50}" | head -1 || echo "")
|
|
820
|
+
|
|
821
|
+
# Extract errors from assistant response
|
|
822
|
+
ERRORS_FOUND=$(echo "$SCRUBBED_ASSISTANT" | grep -oiE "(error|failed|cannot|exception|not found).{0,80}" | head -3 | jq -R -s -c 'split("\n") | map(select(. != ""))' || echo "[]")
|
|
823
|
+
[ -z "$ERRORS_FOUND" ] && ERRORS_FOUND="[]"
|
|
824
|
+
|
|
825
|
+
# Get git status (fast, local only)
|
|
826
|
+
GIT_CHANGED=$(git diff --name-only 2>/dev/null | head -10 | jq -R -s -c 'split("\n") | map(select(. != ""))' || echo "[]")
|
|
827
|
+
GIT_STAT=$(git diff --stat 2>/dev/null | tail -1 | tr -d '\n' || echo "")
|
|
828
|
+
|
|
829
|
+
# Extract commands from Bash tool calls (first 50 chars each)
|
|
830
|
+
COMMANDS_RUN=$(echo "$SCRUBBED_ASSISTANT" | grep -oE '\$ [^\n]{1,50}' | head -5 | sed 's/^\$ //' | jq -R -s -c 'split("\n") | map(select(. != ""))' || echo "[]")
|
|
831
|
+
[ -z "$COMMANDS_RUN" ] && COMMANDS_RUN="[]"
|
|
832
|
+
|
|
833
|
+
# Build fast-capture payload
|
|
834
|
+
FAST_PAYLOAD=$(jq -n \
|
|
835
|
+
--arg session_name "$SESSION_NAME" \
|
|
836
|
+
--argjson turn_number "$TURN_NUMBER" \
|
|
837
|
+
--arg user_intent "${SCRUBBED_USER:0:200}" \
|
|
838
|
+
--arg user_decision "$USER_DECISION" \
|
|
839
|
+
--arg user_correction "$USER_CORRECTION" \
|
|
840
|
+
--arg user_preference "$USER_PREFERENCE" \
|
|
841
|
+
--argjson tools_used "$TOOLS_USED" \
|
|
842
|
+
--argjson files_modified "$FILES_REFERENCED" \
|
|
843
|
+
--argjson commands_run "$COMMANDS_RUN" \
|
|
844
|
+
--argjson errors "$ERRORS_FOUND" \
|
|
845
|
+
--argjson git_files_changed "$GIT_CHANGED" \
|
|
846
|
+
--arg git_diff_stat "$GIT_STAT" \
|
|
847
|
+
--arg outcome "success" \
|
|
848
|
+
'{
|
|
849
|
+
session_name: $session_name,
|
|
850
|
+
turn_number: $turn_number,
|
|
851
|
+
user_intent: $user_intent,
|
|
852
|
+
user_decision: (if $user_decision == "" then null else $user_decision end),
|
|
853
|
+
user_correction: (if $user_correction == "" then null else $user_correction end),
|
|
854
|
+
user_preference: (if $user_preference == "" then null else $user_preference end),
|
|
855
|
+
tools_used: $tools_used,
|
|
856
|
+
files_modified: $files_modified,
|
|
857
|
+
commands_run: $commands_run,
|
|
858
|
+
errors: $errors,
|
|
859
|
+
git_files_changed: $git_files_changed,
|
|
860
|
+
git_diff_stat: (if $git_diff_stat == "" then null else $git_diff_stat end),
|
|
861
|
+
outcome: $outcome
|
|
862
|
+
}' 2>/dev/null)
|
|
863
|
+
|
|
864
|
+
# Fire fast-capture in background (non-blocking, <20ms)
|
|
865
|
+
if [ -n "$FAST_PAYLOAD" ]; then
|
|
866
|
+
curl -s -X POST "$MEMORY_API_URL/api/v1/working/fast-capture" \
|
|
867
|
+
-H "Authorization: Bearer $AUTH_TOKEN" \
|
|
868
|
+
-H "Content-Type: application/json" \
|
|
869
|
+
-d "$FAST_PAYLOAD" \
|
|
870
|
+
--connect-timeout 1 \
|
|
871
|
+
--max-time 2 >/dev/null 2>&1 &
|
|
872
|
+
fi
|
|
873
|
+
|
|
874
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
875
|
+
# 💾 LOCAL CACHE: Tier 0 - Update turn with assistant response
|
|
876
|
+
# Updates the turn created by user-prompt-submit hook with the response
|
|
877
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
878
|
+
if command -v ekkos-capture &>/dev/null && [ -n "$SESSION_ID" ]; then
|
|
879
|
+
# Escape response for shell (use base64 for safety with complex content)
|
|
880
|
+
RESPONSE_B64=$(echo "$SCRUBBED_ASSISTANT" | base64 2>/dev/null || echo "")
|
|
881
|
+
if [ -n "$RESPONSE_B64" ]; then
|
|
882
|
+
# Decode and pass to capture command (handles newlines and special chars)
|
|
883
|
+
DECODED_RESPONSE=$(echo "$RESPONSE_B64" | base64 -d 2>/dev/null || echo "")
|
|
884
|
+
if [ -n "$DECODED_RESPONSE" ]; then
|
|
885
|
+
(ekkos-capture response "$SESSION_ID" "$TURN_NUMBER" "$DECODED_RESPONSE" "$TOOLS_USED" "$FILES_REFERENCED" \
|
|
886
|
+
>/dev/null 2>&1) &
|
|
887
|
+
fi
|
|
888
|
+
fi
|
|
889
|
+
fi
|
|
890
|
+
fi
|
|
891
|
+
|
|
892
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
893
|
+
# 💾 FALLBACK LOCAL CACHE UPDATE: Even if L2/Redis capture was skipped
|
|
894
|
+
# This ensures local cache gets updated with assistant response for /continue
|
|
895
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
896
|
+
if [ -n "$LAST_ASSISTANT" ] && command -v ekkos-capture &>/dev/null && [ -n "$SESSION_ID" ]; then
|
|
897
|
+
# Only run if we didn't already update (check if inside the main block or not)
|
|
898
|
+
# This handles the case where LAST_USER was empty but LAST_ASSISTANT is available
|
|
899
|
+
if [ -z "$LAST_USER" ]; then
|
|
900
|
+
echo "[ekkOS DEBUG] Fallback local cache update: LAST_ASSISTANT available, updating turn $TURN_NUMBER" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
901
|
+
RESPONSE_B64=$(echo "$LAST_ASSISTANT" | base64 2>/dev/null || echo "")
|
|
902
|
+
if [ -n "$RESPONSE_B64" ]; then
|
|
903
|
+
DECODED_RESPONSE=$(echo "$RESPONSE_B64" | base64 -d 2>/dev/null || echo "")
|
|
904
|
+
if [ -n "$DECODED_RESPONSE" ]; then
|
|
905
|
+
TOOLS_USED=$(echo "$LAST_ASSISTANT" | grep -oE '\[TOOL: [^\]]+\]' | sed 's/\[TOOL: //g; s/\]//g' | sort -u | jq -R -s -c 'split("\n") | map(select(. != ""))' 2>/dev/null || echo "[]")
|
|
906
|
+
FILES_REFERENCED="[]"
|
|
907
|
+
(ekkos-capture response "$SESSION_ID" "$TURN_NUMBER" "$DECODED_RESPONSE" "$TOOLS_USED" "$FILES_REFERENCED" \
|
|
908
|
+
>/dev/null 2>&1) &
|
|
909
|
+
echo "[ekkOS] Turn $TURN_NUMBER: Local cache updated via fallback (session: $SESSION_NAME)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
910
|
+
fi
|
|
911
|
+
fi
|
|
912
|
+
fi
|
|
913
|
+
fi
|
|
914
|
+
|
|
915
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
916
|
+
# 🔄 GOLDEN LOOP: DETECT PHASES FROM RESPONSE
|
|
917
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
918
|
+
GOLDEN_LOOP_FILE="$PROJECT_ROOT/.ekkos/golden-loop-current.json"
|
|
919
|
+
|
|
920
|
+
if [ -n "$LAST_ASSISTANT" ] && [ -f "$GOLDEN_LOOP_FILE" ]; then
|
|
921
|
+
# Detect phases from agent response
|
|
922
|
+
RETRIEVED=0
|
|
923
|
+
APPLIED=0
|
|
924
|
+
FORGED=0
|
|
925
|
+
|
|
926
|
+
# 🔍 RETRIEVE: Count ekkOS_Search calls (MCP tool invocations)
|
|
927
|
+
RETRIEVED=$(echo "$LAST_ASSISTANT" | grep -c "mcp__ekkos-memory__ekkOS_Search" 2>/dev/null || echo "0")
|
|
928
|
+
[ "$RETRIEVED" -eq 0 ] && RETRIEVED=$(echo "$LAST_ASSISTANT" | grep -c "ekkOS_Search" 2>/dev/null || echo "0")
|
|
929
|
+
|
|
930
|
+
# 💉 INJECT: Count [ekkOS_SELECT] pattern acknowledgments
|
|
931
|
+
APPLIED=$(echo "$LAST_ASSISTANT" | grep -c "\[ekkOS_SELECT\]" 2>/dev/null || echo "0")
|
|
932
|
+
|
|
933
|
+
# 📊 MEASURE: Count ekkOS_Forge calls (pattern creation)
|
|
934
|
+
FORGED=$(echo "$LAST_ASSISTANT" | grep -c "mcp__ekkos-memory__ekkOS_Forge" 2>/dev/null || echo "0")
|
|
935
|
+
[ "$FORGED" -eq 0 ] && FORGED=$(echo "$LAST_ASSISTANT" | grep -c "ekkOS_Forge" 2>/dev/null || echo "0")
|
|
936
|
+
|
|
937
|
+
# Determine current phase based on what's happening
|
|
938
|
+
CURRENT_PHASE="complete"
|
|
939
|
+
if [ "$FORGED" -gt 0 ]; then
|
|
940
|
+
CURRENT_PHASE="measure"
|
|
941
|
+
elif [ "$APPLIED" -gt 0 ]; then
|
|
942
|
+
CURRENT_PHASE="inject"
|
|
943
|
+
elif [ "$RETRIEVED" -gt 0 ]; then
|
|
944
|
+
CURRENT_PHASE="retrieve"
|
|
945
|
+
fi
|
|
946
|
+
|
|
947
|
+
# Update Golden Loop file with detected stats
|
|
948
|
+
jq -n \
|
|
949
|
+
--arg phase "$CURRENT_PHASE" \
|
|
950
|
+
--argjson turn "$TURN_NUMBER" \
|
|
951
|
+
--arg session "$SESSION_NAME" \
|
|
952
|
+
--arg timestamp "$(date -u +%Y-%m-%dT%H:%M:%SZ)" \
|
|
953
|
+
--argjson retrieved "$RETRIEVED" \
|
|
954
|
+
--argjson applied "$APPLIED" \
|
|
955
|
+
--argjson forged "$FORGED" \
|
|
956
|
+
'{
|
|
957
|
+
phase: $phase,
|
|
958
|
+
turn: $turn,
|
|
959
|
+
session: $session,
|
|
960
|
+
timestamp: $timestamp,
|
|
961
|
+
stats: {
|
|
962
|
+
retrieved: $retrieved,
|
|
963
|
+
applied: $applied,
|
|
964
|
+
forged: $forged
|
|
965
|
+
}
|
|
966
|
+
}' > "$GOLDEN_LOOP_FILE" 2>/dev/null || true
|
|
967
|
+
|
|
968
|
+
# ═══════════════════════════════════════════════════════════════════════
|
|
969
|
+
# 📊 GOLDEN LOOP STEP 5: AUTO-OUTCOME — Close the feedback loop
|
|
970
|
+
# Parses [ekkOS_SELECT] blocks → extracts pattern IDs → calls ekkOS_Outcome
|
|
971
|
+
# Feeds: pattern_applications → pattern_application_links → PROMETHEUS delta
|
|
972
|
+
# ekkOS_Outcome is ENHANCED: auto-creates Track record (no separate Track call needed)
|
|
973
|
+
# ═══════════════════════════════════════════════════════════════════════
|
|
974
|
+
if [ "$APPLIED" -gt 0 ] && [ -n "$EKKOS_API_KEY" ]; then
|
|
975
|
+
# Extract pattern IDs from [ekkOS_SELECT] blocks
|
|
976
|
+
# Format: - id: <pattern_id> (UUID format: 8-4-4-4-12 hex)
|
|
977
|
+
SELECTED_IDS=$(echo "$LAST_ASSISTANT" | grep -oE 'id: [a-f0-9-]{36}' | sed 's/id: //' | sort -u || echo "")
|
|
978
|
+
|
|
979
|
+
# Also try compact format: id:<pattern_id>
|
|
980
|
+
if [ -z "$SELECTED_IDS" ]; then
|
|
981
|
+
SELECTED_IDS=$(echo "$LAST_ASSISTANT" | grep -oE 'id:[a-f0-9-]{36}' | sed 's/id://' | sort -u || echo "")
|
|
982
|
+
fi
|
|
983
|
+
|
|
984
|
+
if [ -n "$SELECTED_IDS" ]; then
|
|
985
|
+
# Build memory_ids JSON array using jq
|
|
986
|
+
MEMORY_IDS_JSON=$(echo "$SELECTED_IDS" | jq -R -s 'split("\n") | map(select(length > 0))' 2>/dev/null || echo "[]")
|
|
987
|
+
SELECTED_COUNT=$(echo "$SELECTED_IDS" | wc -l | tr -d ' ')
|
|
988
|
+
|
|
989
|
+
# Infer outcome: success unless unresolved errors detected
|
|
990
|
+
# The heuristic checks for error indicators WITHOUT resolution markers
|
|
991
|
+
OUTCOME_SUCCESS=true
|
|
992
|
+
HAS_ERRORS=$(echo "$LAST_ASSISTANT" | grep -ciE '(error|failed|cannot|exception|not found|bug|broken|not working|traceback|panic)' 2>/dev/null || echo "0")
|
|
993
|
+
HAS_RESOLUTION=$(echo "$LAST_ASSISTANT" | grep -ciE '(fixed|resolved|working now|succeeded|completed successfully|done|solved|corrected)' 2>/dev/null || echo "0")
|
|
994
|
+
|
|
995
|
+
if [ "$HAS_ERRORS" -gt 0 ] && [ "$HAS_RESOLUTION" -eq 0 ]; then
|
|
996
|
+
OUTCOME_SUCCESS=false
|
|
997
|
+
fi
|
|
998
|
+
|
|
999
|
+
# Call ekkOS_Outcome via MCP gateway (async, non-blocking)
|
|
1000
|
+
# Enhanced endpoint: auto-creates Track record from memory_ids
|
|
1001
|
+
# Populates: pattern_applications.applied_at, pattern_applications.outcome_success
|
|
1002
|
+
# Increments: patterns.applied_count, patterns.last_applied_at
|
|
1003
|
+
# Links: pattern_application_links for cross-layer PROMETHEUS delta
|
|
1004
|
+
OUTCOME_BODY=$(jq -n \
|
|
1005
|
+
--argjson memory_ids "$MEMORY_IDS_JSON" \
|
|
1006
|
+
--argjson success "$OUTCOME_SUCCESS" \
|
|
1007
|
+
--arg model_used "$MODEL_USED" \
|
|
1008
|
+
'{
|
|
1009
|
+
tool: "ekkOS_Outcome",
|
|
1010
|
+
arguments: {
|
|
1011
|
+
memory_ids: $memory_ids,
|
|
1012
|
+
success: $success,
|
|
1013
|
+
model_used: $model_used
|
|
1014
|
+
}
|
|
1015
|
+
}' 2>/dev/null)
|
|
1016
|
+
|
|
1017
|
+
if [ -n "$OUTCOME_BODY" ]; then
|
|
1018
|
+
(curl -s -X POST "$MEMORY_API_URL/api/v1/mcp/call" \
|
|
1019
|
+
-H "Authorization: Bearer $EKKOS_API_KEY" \
|
|
1020
|
+
-H "Content-Type: application/json" \
|
|
1021
|
+
-d "$OUTCOME_BODY" \
|
|
1022
|
+
--connect-timeout 2 \
|
|
1023
|
+
--max-time 3 >/dev/null 2>&1) &
|
|
1024
|
+
|
|
1025
|
+
echo "[ekkOS] Auto-outcome: ${SELECTED_COUNT} patterns, success=${OUTCOME_SUCCESS} (session: $SESSION_NAME, turn: $TURN_NUMBER)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
1026
|
+
fi
|
|
354
1027
|
fi
|
|
355
|
-
|
|
1028
|
+
fi
|
|
356
1029
|
fi
|
|
357
1030
|
|
|
358
1031
|
# ═══════════════════════════════════════════════════════════════════════════
|