@ekkos/cli 0.2.9 → 0.2.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cache/LocalSessionStore.d.ts +34 -21
- package/dist/cache/LocalSessionStore.js +169 -53
- package/dist/cache/capture.d.ts +19 -11
- package/dist/cache/capture.js +243 -76
- package/dist/cache/types.d.ts +14 -1
- package/dist/commands/doctor.d.ts +10 -0
- package/dist/commands/doctor.js +148 -73
- package/dist/commands/hooks.d.ts +109 -0
- package/dist/commands/hooks.js +668 -0
- package/dist/commands/run.d.ts +1 -0
- package/dist/commands/run.js +69 -21
- package/dist/index.js +42 -1
- package/dist/restore/RestoreOrchestrator.d.ts +17 -3
- package/dist/restore/RestoreOrchestrator.js +64 -22
- package/dist/utils/paths.d.ts +125 -0
- package/dist/utils/paths.js +283 -0
- package/package.json +1 -1
- package/templates/ekkos-manifest.json +223 -0
- package/templates/helpers/json-parse.cjs +101 -0
- package/templates/hooks/assistant-response.ps1 +256 -0
- package/templates/hooks/assistant-response.sh +124 -64
- package/templates/hooks/session-start.ps1 +107 -2
- package/templates/hooks/session-start.sh +201 -166
- package/templates/hooks/stop.ps1 +124 -3
- package/templates/hooks/stop.sh +470 -843
- package/templates/hooks/user-prompt-submit.ps1 +107 -22
- package/templates/hooks/user-prompt-submit.sh +403 -393
- package/templates/project-stubs/session-start.ps1 +63 -0
- package/templates/project-stubs/session-start.sh +55 -0
- package/templates/project-stubs/stop.ps1 +63 -0
- package/templates/project-stubs/stop.sh +55 -0
- package/templates/project-stubs/user-prompt-submit.ps1 +63 -0
- package/templates/project-stubs/user-prompt-submit.sh +55 -0
- package/templates/shared/hooks-enabled.json +22 -0
- package/templates/shared/session-words.json +45 -0
package/templates/hooks/stop.sh
CHANGED
|
@@ -1,11 +1,15 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
2
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
3
3
|
# ekkOS_ Hook: Stop - FULL CONTEXT CAPTURE
|
|
4
|
+
# MANAGED BY ekkos-connect - DO NOT EDIT DIRECTLY
|
|
5
|
+
# EKKOS_MANAGED=1
|
|
4
6
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
5
7
|
# Captures FULL turn content to L2 (episodic memory):
|
|
6
8
|
# - Full user query
|
|
7
9
|
# - Full assistant response (no truncation)
|
|
8
10
|
# - Complete file changes with edit content (old_string → new_string)
|
|
11
|
+
#
|
|
12
|
+
# Per spec v1.2 Addendum: NO jq dependency, NO hardcoded arrays
|
|
9
13
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
10
14
|
|
|
11
15
|
set +e
|
|
@@ -14,16 +18,41 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
|
14
18
|
PROJECT_ROOT="$(dirname "$(dirname "$SCRIPT_DIR")")"
|
|
15
19
|
STATE_DIR="$PROJECT_ROOT/.claude/state"
|
|
16
20
|
|
|
21
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
22
|
+
# CONFIG PATHS - Per spec v1.2 Addendum
|
|
23
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
24
|
+
EKKOS_CONFIG_DIR="${EKKOS_CONFIG_DIR:-$HOME/.ekkos}"
|
|
25
|
+
SESSION_WORDS_JSON="$EKKOS_CONFIG_DIR/session-words.json"
|
|
26
|
+
SESSION_WORDS_DEFAULT="$EKKOS_CONFIG_DIR/.defaults/session-words.json"
|
|
27
|
+
JSON_PARSE_HELPER="$EKKOS_CONFIG_DIR/.helpers/json-parse.cjs"
|
|
28
|
+
|
|
17
29
|
INPUT=$(cat)
|
|
18
30
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
31
|
+
# Parse JSON using Node (no jq)
|
|
32
|
+
parse_json_value() {
|
|
33
|
+
local json="$1"
|
|
34
|
+
local path="$2"
|
|
35
|
+
echo "$json" | node -e "
|
|
36
|
+
const data = JSON.parse(require('fs').readFileSync('/dev/stdin', 'utf8') || '{}');
|
|
37
|
+
const path = '$path'.replace(/^\./,'').split('.').filter(Boolean);
|
|
38
|
+
let result = data;
|
|
39
|
+
for (const p of path) {
|
|
40
|
+
if (result === undefined || result === null) { result = undefined; break; }
|
|
41
|
+
result = result[p];
|
|
42
|
+
}
|
|
43
|
+
if (result !== undefined && result !== null) console.log(result);
|
|
44
|
+
" 2>/dev/null || echo ""
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
RAW_SESSION_ID=$(parse_json_value "$INPUT" '.session_id')
|
|
48
|
+
[ -z "$RAW_SESSION_ID" ] && RAW_SESSION_ID="unknown"
|
|
22
49
|
|
|
23
|
-
|
|
50
|
+
TRANSCRIPT_PATH=$(parse_json_value "$INPUT" '.transcript_path')
|
|
51
|
+
MODEL_USED=$(parse_json_value "$INPUT" '.model')
|
|
52
|
+
[ -z "$MODEL_USED" ] && MODEL_USED="claude-sonnet-4-5"
|
|
53
|
+
|
|
54
|
+
# DEBUG: Log hook input
|
|
24
55
|
echo "[ekkOS DEBUG] $(date -u +%H:%M:%S) stop.sh: session=$RAW_SESSION_ID, transcript_path=$TRANSCRIPT_PATH" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
25
|
-
echo "[ekkOS DEBUG] $(date -u +%H:%M:%S) stop.sh: transcript exists=$([ -f "$TRANSCRIPT_PATH" ] && echo 'yes' || echo 'no')" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
26
|
-
echo "[ekkOS DEBUG] INPUT keys: $(echo "$INPUT" | jq -r 'keys | join(",")')" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
27
56
|
|
|
28
57
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
29
58
|
# Session ID - Try Claude's input first, fallback to state file
|
|
@@ -32,15 +61,15 @@ SESSION_ID="$RAW_SESSION_ID"
|
|
|
32
61
|
|
|
33
62
|
# Fallback: Read from state file if input doesn't have valid session_id
|
|
34
63
|
if [ -z "$SESSION_ID" ] || [ "$SESSION_ID" = "unknown" ] || [ "$SESSION_ID" = "null" ]; then
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
64
|
+
STATE_FILE="$HOME/.claude/state/current-session.json"
|
|
65
|
+
if [ -f "$STATE_FILE" ] && [ -f "$JSON_PARSE_HELPER" ]; then
|
|
66
|
+
SESSION_ID=$(node "$JSON_PARSE_HELPER" "$STATE_FILE" '.session_id' 2>/dev/null || echo "")
|
|
67
|
+
fi
|
|
39
68
|
fi
|
|
40
69
|
|
|
41
70
|
# Skip if still no valid session ID
|
|
42
71
|
if [ -z "$SESSION_ID" ] || [ "$SESSION_ID" = "unknown" ] || [ "$SESSION_ID" = "null" ]; then
|
|
43
|
-
|
|
72
|
+
exit 0
|
|
44
73
|
fi
|
|
45
74
|
|
|
46
75
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
@@ -50,13 +79,16 @@ EKKOS_CONFIG="$HOME/.ekkos/config.json"
|
|
|
50
79
|
AUTH_TOKEN=""
|
|
51
80
|
USER_ID=""
|
|
52
81
|
|
|
53
|
-
if [ -f "$EKKOS_CONFIG" ]; then
|
|
54
|
-
|
|
55
|
-
|
|
82
|
+
if [ -f "$EKKOS_CONFIG" ] && [ -f "$JSON_PARSE_HELPER" ]; then
|
|
83
|
+
AUTH_TOKEN=$(node "$JSON_PARSE_HELPER" "$EKKOS_CONFIG" '.hookApiKey' 2>/dev/null || echo "")
|
|
84
|
+
if [ -z "$AUTH_TOKEN" ]; then
|
|
85
|
+
AUTH_TOKEN=$(node "$JSON_PARSE_HELPER" "$EKKOS_CONFIG" '.apiKey' 2>/dev/null || echo "")
|
|
86
|
+
fi
|
|
87
|
+
USER_ID=$(node "$JSON_PARSE_HELPER" "$EKKOS_CONFIG" '.userId' 2>/dev/null || echo "")
|
|
56
88
|
fi
|
|
57
89
|
|
|
58
90
|
if [ -z "$AUTH_TOKEN" ] && [ -f "$PROJECT_ROOT/.env.local" ]; then
|
|
59
|
-
|
|
91
|
+
AUTH_TOKEN=$(grep -E "^SUPABASE_SECRET_KEY=" "$PROJECT_ROOT/.env.local" | cut -d'=' -f2- | tr -d '"' | tr -d "'" | tr -d '\r')
|
|
60
92
|
fi
|
|
61
93
|
|
|
62
94
|
[ -z "$AUTH_TOKEN" ] && exit 0
|
|
@@ -64,83 +96,80 @@ fi
|
|
|
64
96
|
MEMORY_API_URL="https://mcp.ekkos.dev"
|
|
65
97
|
|
|
66
98
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
67
|
-
# WORD-BASED SESSION NAMES
|
|
68
|
-
# Format: adj-noun-verb (e.g., "cosmic-penguin-runs")
|
|
69
|
-
# 100 × 100 × 100 = 1,000,000 combinations (vs 10,000 with 2-word)
|
|
70
|
-
# Matches server-side session-names.ts algorithm
|
|
99
|
+
# WORD-BASED SESSION NAMES - Uses external session-words.json (NO hardcoded arrays)
|
|
71
100
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
72
|
-
ADJECTIVES
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
VERBS=(
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
"pushes" "pulls" "lifts" "throws" "kicks" "punts" "bats" "swings"
|
|
114
|
-
"reads" "writes" "draws" "paints" "sculpts" "carves" "molds" "weaves"
|
|
115
|
-
"cooks" "bakes" "grills" "fries"
|
|
116
|
-
)
|
|
117
|
-
|
|
118
|
-
# Convert UUID to 3-word name deterministically
|
|
101
|
+
declare -a ADJECTIVES
|
|
102
|
+
declare -a NOUNS
|
|
103
|
+
declare -a VERBS
|
|
104
|
+
SESSION_WORDS_LOADED=false
|
|
105
|
+
|
|
106
|
+
load_session_words() {
|
|
107
|
+
if [ "$SESSION_WORDS_LOADED" = "true" ]; then
|
|
108
|
+
return 0
|
|
109
|
+
fi
|
|
110
|
+
|
|
111
|
+
local words_file="$SESSION_WORDS_JSON"
|
|
112
|
+
if [ ! -f "$words_file" ]; then
|
|
113
|
+
words_file="$SESSION_WORDS_DEFAULT"
|
|
114
|
+
fi
|
|
115
|
+
|
|
116
|
+
if [ ! -f "$words_file" ] || [ ! -f "$JSON_PARSE_HELPER" ]; then
|
|
117
|
+
return 1
|
|
118
|
+
fi
|
|
119
|
+
|
|
120
|
+
if command -v node &>/dev/null; then
|
|
121
|
+
if [ "${BASH_VERSINFO[0]}" -ge 4 ]; then
|
|
122
|
+
readarray -t ADJECTIVES < <(node "$JSON_PARSE_HELPER" "$words_file" '.adjectives' 2>/dev/null)
|
|
123
|
+
readarray -t NOUNS < <(node "$JSON_PARSE_HELPER" "$words_file" '.nouns' 2>/dev/null)
|
|
124
|
+
readarray -t VERBS < <(node "$JSON_PARSE_HELPER" "$words_file" '.verbs' 2>/dev/null)
|
|
125
|
+
else
|
|
126
|
+
local i=0
|
|
127
|
+
while IFS= read -r line; do ADJECTIVES[i]="$line"; ((i++)); done < <(node "$JSON_PARSE_HELPER" "$words_file" '.adjectives' 2>/dev/null)
|
|
128
|
+
i=0
|
|
129
|
+
while IFS= read -r line; do NOUNS[i]="$line"; ((i++)); done < <(node "$JSON_PARSE_HELPER" "$words_file" '.nouns' 2>/dev/null)
|
|
130
|
+
i=0
|
|
131
|
+
while IFS= read -r line; do VERBS[i]="$line"; ((i++)); done < <(node "$JSON_PARSE_HELPER" "$words_file" '.verbs' 2>/dev/null)
|
|
132
|
+
fi
|
|
133
|
+
|
|
134
|
+
if [ ${#ADJECTIVES[@]} -gt 0 ] && [ ${#NOUNS[@]} -gt 0 ] && [ ${#VERBS[@]} -gt 0 ]; then
|
|
135
|
+
SESSION_WORDS_LOADED=true
|
|
136
|
+
return 0
|
|
137
|
+
fi
|
|
138
|
+
fi
|
|
139
|
+
return 1
|
|
140
|
+
}
|
|
141
|
+
|
|
119
142
|
uuid_to_words() {
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
143
|
+
local uuid="$1"
|
|
144
|
+
|
|
145
|
+
load_session_words || {
|
|
146
|
+
echo "unknown-session-starts"
|
|
147
|
+
return
|
|
148
|
+
}
|
|
123
149
|
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
150
|
+
local hex="${uuid//-/}"
|
|
151
|
+
hex="${hex:0:12}"
|
|
152
|
+
|
|
153
|
+
if [[ ! "$hex" =~ ^[0-9a-fA-F]+$ ]]; then
|
|
154
|
+
echo "unknown-session-starts"
|
|
155
|
+
return
|
|
156
|
+
fi
|
|
128
157
|
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
158
|
+
local adj_seed=$((16#${hex:0:4}))
|
|
159
|
+
local noun_seed=$((16#${hex:4:4}))
|
|
160
|
+
local verb_seed=$((16#${hex:8:4}))
|
|
132
161
|
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
162
|
+
local adj_idx=$((adj_seed % ${#ADJECTIVES[@]}))
|
|
163
|
+
local noun_idx=$((noun_seed % ${#NOUNS[@]}))
|
|
164
|
+
local verb_idx=$((verb_seed % ${#VERBS[@]}))
|
|
136
165
|
|
|
137
|
-
|
|
166
|
+
echo "${ADJECTIVES[$adj_idx]}-${NOUNS[$noun_idx]}-${VERBS[$verb_idx]}"
|
|
138
167
|
}
|
|
139
168
|
|
|
140
169
|
# Generate session name from UUID
|
|
141
170
|
SESSION_NAME=""
|
|
142
171
|
if [ -n "$SESSION_ID" ] && [ "$SESSION_ID" != "unknown" ] && [ "$SESSION_ID" != "null" ]; then
|
|
143
|
-
|
|
172
|
+
SESSION_NAME=$(uuid_to_words "$SESSION_ID")
|
|
144
173
|
fi
|
|
145
174
|
|
|
146
175
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
@@ -153,829 +182,427 @@ TURN_NUMBER=1
|
|
|
153
182
|
|
|
154
183
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
155
184
|
# AUTO-CLEAR DETECTION (EARLY): Must run BEFORE any early exits
|
|
156
|
-
# If context >= 92%, write flag for ekkos run wrapper immediately
|
|
157
185
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
158
186
|
if [ -n "$TRANSCRIPT_PATH" ] && [ -f "$TRANSCRIPT_PATH" ]; then
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
echo "${TOKEN_PERCENT}:${SESSION_NAME}:${TIMESTAMP_EPOCH}" > "$AUTO_CLEAR_FLAG"
|
|
181
|
-
echo "[ekkOS] Context at ${TOKEN_PERCENT}% - auto-clear flag written (session: $SESSION_NAME)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
182
|
-
fi
|
|
187
|
+
MAX_TOKENS=200000
|
|
188
|
+
|
|
189
|
+
if stat -f%z "$TRANSCRIPT_PATH" >/dev/null 2>&1; then
|
|
190
|
+
FILE_SIZE=$(stat -f%z "$TRANSCRIPT_PATH")
|
|
191
|
+
else
|
|
192
|
+
FILE_SIZE=$(stat -c%s "$TRANSCRIPT_PATH" 2>/dev/null || echo "0")
|
|
193
|
+
fi
|
|
194
|
+
ROUGH_TOKENS=$((FILE_SIZE / 4))
|
|
195
|
+
TOKEN_PERCENT=$((ROUGH_TOKENS * 100 / MAX_TOKENS))
|
|
196
|
+
|
|
197
|
+
if [ "$TOKEN_PERCENT" -gt 50 ]; then
|
|
198
|
+
WORD_COUNT=$(wc -w < "$TRANSCRIPT_PATH" 2>/dev/null | tr -d ' ' || echo "0")
|
|
199
|
+
TOKEN_PERCENT=$((WORD_COUNT * 13 / 10 * 100 / MAX_TOKENS))
|
|
200
|
+
fi
|
|
201
|
+
|
|
202
|
+
if [ "$TOKEN_PERCENT" -ge 92 ]; then
|
|
203
|
+
AUTO_CLEAR_FLAG="$HOME/.ekkos/auto-clear.flag"
|
|
204
|
+
TIMESTAMP_EPOCH=$(date +%s)
|
|
205
|
+
echo "${TOKEN_PERCENT}:${SESSION_NAME}:${TIMESTAMP_EPOCH}" > "$AUTO_CLEAR_FLAG"
|
|
206
|
+
echo "[ekkOS] Context at ${TOKEN_PERCENT}% - auto-clear flag written (session: $SESSION_NAME)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
207
|
+
fi
|
|
183
208
|
fi
|
|
184
209
|
|
|
185
210
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
186
211
|
# Check for interruption - skip capture if request was interrupted
|
|
187
212
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
188
|
-
IS_INTERRUPTED=$(
|
|
189
|
-
|
|
213
|
+
IS_INTERRUPTED=$(parse_json_value "$INPUT" '.interrupted')
|
|
214
|
+
[ -z "$IS_INTERRUPTED" ] && IS_INTERRUPTED="false"
|
|
215
|
+
|
|
216
|
+
STOP_REASON=$(parse_json_value "$INPUT" '.stop_reason')
|
|
190
217
|
|
|
191
|
-
# Skip capture for interrupted/cancelled requests
|
|
192
218
|
if [ "$IS_INTERRUPTED" = "true" ] || [ "$STOP_REASON" = "user_cancelled" ] || [ "$STOP_REASON" = "interrupted" ]; then
|
|
193
|
-
|
|
219
|
+
exit 0
|
|
194
220
|
fi
|
|
195
221
|
|
|
196
222
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
197
|
-
# Extract conversation from transcript
|
|
223
|
+
# Extract conversation from transcript using Node (no jq)
|
|
198
224
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
199
225
|
LAST_USER=""
|
|
200
226
|
LAST_ASSISTANT=""
|
|
201
227
|
FILE_CHANGES="[]"
|
|
202
228
|
|
|
203
229
|
if [ -n "$TRANSCRIPT_PATH" ] && [ -f "$TRANSCRIPT_PATH" ]; then
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
230
|
+
# Extract using Node - handles complex JSON reliably
|
|
231
|
+
EXTRACTION=$(node -e "
|
|
232
|
+
const fs = require('fs');
|
|
233
|
+
const lines = fs.readFileSync('$TRANSCRIPT_PATH', 'utf8').split('\\n').filter(Boolean);
|
|
234
|
+
const entries = lines.map(l => { try { return JSON.parse(l); } catch { return null; } }).filter(Boolean);
|
|
235
|
+
|
|
236
|
+
// Get last user message (not starting with <)
|
|
237
|
+
let lastUser = '';
|
|
238
|
+
let lastUserTime = '';
|
|
239
|
+
for (let i = entries.length - 1; i >= 0; i--) {
|
|
240
|
+
const e = entries[i];
|
|
241
|
+
if (e.type === 'user') {
|
|
242
|
+
const content = e.message?.content;
|
|
243
|
+
if (typeof content === 'string' && !content.startsWith('<')) {
|
|
244
|
+
lastUser = content;
|
|
245
|
+
lastUserTime = e.timestamp || '';
|
|
246
|
+
break;
|
|
247
|
+
} else if (Array.isArray(content)) {
|
|
248
|
+
const textPart = content.find(c => c.type === 'text' && !c.text?.startsWith('<'));
|
|
249
|
+
if (textPart) {
|
|
250
|
+
lastUser = textPart.text;
|
|
251
|
+
lastUserTime = e.timestamp || '';
|
|
252
|
+
break;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
// Get last assistant message (after the user message)
|
|
259
|
+
let lastAssistant = '';
|
|
260
|
+
for (let i = entries.length - 1; i >= 0; i--) {
|
|
261
|
+
const e = entries[i];
|
|
262
|
+
if (e.type === 'assistant' && (!lastUserTime || e.timestamp >= lastUserTime)) {
|
|
263
|
+
const content = e.message?.content;
|
|
264
|
+
if (typeof content === 'string') {
|
|
265
|
+
lastAssistant = content;
|
|
266
|
+
break;
|
|
267
|
+
} else if (Array.isArray(content)) {
|
|
268
|
+
const parts = content.map(c => {
|
|
269
|
+
if (c.type === 'text') return c.text;
|
|
270
|
+
if (c.type === 'tool_use') return '[TOOL: ' + c.name + ']';
|
|
271
|
+
if (c.type === 'thinking') return '[THINKING]' + (c.thinking || c.text || '') + '[/THINKING]';
|
|
272
|
+
return '';
|
|
273
|
+
}).filter(Boolean);
|
|
274
|
+
lastAssistant = parts.join('\\n');
|
|
275
|
+
break;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
// Extract file changes
|
|
281
|
+
const fileChanges = [];
|
|
282
|
+
entries.filter(e => e.type === 'assistant').forEach(e => {
|
|
283
|
+
const content = e.message?.content;
|
|
284
|
+
if (Array.isArray(content)) {
|
|
285
|
+
content.filter(c => c.type === 'tool_use' && ['Edit', 'Write', 'Read'].includes(c.name)).forEach(c => {
|
|
286
|
+
fileChanges.push({
|
|
287
|
+
tool: c.name,
|
|
288
|
+
path: c.input?.file_path || c.input?.path,
|
|
289
|
+
action: c.name.toLowerCase(),
|
|
290
|
+
old_string: c.name === 'Edit' ? (c.input?.old_string || '').substring(0, 200) : null,
|
|
291
|
+
new_string: c.name === 'Edit' ? (c.input?.new_string || '').substring(0, 200) : null,
|
|
292
|
+
content: c.name === 'Write' ? (c.input?.content || '').substring(0, 500) : null
|
|
293
|
+
});
|
|
294
|
+
});
|
|
295
|
+
}
|
|
296
|
+
});
|
|
297
|
+
|
|
298
|
+
// Output as JSON
|
|
299
|
+
console.log(JSON.stringify({
|
|
300
|
+
user: lastUser,
|
|
301
|
+
assistant: lastAssistant.substring(0, 50000),
|
|
302
|
+
fileChanges: fileChanges.slice(0, 20)
|
|
303
|
+
}));
|
|
304
|
+
" 2>/dev/null || echo '{"user":"","assistant":"","fileChanges":[]}')
|
|
305
|
+
|
|
306
|
+
LAST_USER=$(echo "$EXTRACTION" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.user||'')" 2>/dev/null || echo "")
|
|
307
|
+
LAST_ASSISTANT=$(echo "$EXTRACTION" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.assistant||'')" 2>/dev/null || echo "")
|
|
308
|
+
FILE_CHANGES=$(echo "$EXTRACTION" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(JSON.stringify(d.fileChanges||[]))" 2>/dev/null || echo "[]")
|
|
309
|
+
fi
|
|
310
|
+
|
|
311
|
+
# Check for interruption markers
|
|
312
|
+
if [[ "$LAST_USER" == *"[Request interrupted"* ]] || [[ "$LAST_USER" == *"interrupted by user"* ]]; then
|
|
228
313
|
echo "[ekkOS] Turn $TURN_NUMBER skipped: interruption marker (session: $SESSION_NAME)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
229
314
|
exit 0
|
|
230
|
-
fi
|
|
231
|
-
|
|
232
|
-
# Get timestamp of last valid user message (handles both string and array content)
|
|
233
|
-
LAST_USER_TIME=$(cat "$TRANSCRIPT_PATH" | jq -r '
|
|
234
|
-
select(.type == "user")
|
|
235
|
-
| select(
|
|
236
|
-
(.message.content | type == "string" and (startswith("<") | not)) or
|
|
237
|
-
(.message.content | type == "array" and any(.[]; .type == "text" and (.text | startswith("<") | not)))
|
|
238
|
-
)
|
|
239
|
-
| .timestamp
|
|
240
|
-
' 2>/dev/null | tail -1 || echo "")
|
|
241
|
-
|
|
242
|
-
if [ -n "$LAST_USER_TIME" ]; then
|
|
243
|
-
# Get assistant response after user message - FULL CONTENT including tool calls
|
|
244
|
-
# Captures: text blocks, tool_use (with name + input), and extended_thinking
|
|
245
|
-
LAST_ASSISTANT=$(cat "$TRANSCRIPT_PATH" | jq -rs --arg time "$LAST_USER_TIME" '
|
|
246
|
-
[.[] | select(.type == "assistant" and .timestamp > $time)] | last |
|
|
247
|
-
.message.content |
|
|
248
|
-
if type == "string" then .
|
|
249
|
-
elif type == "array" then
|
|
250
|
-
[.[] |
|
|
251
|
-
if .type == "text" then .text
|
|
252
|
-
elif .type == "tool_use" then
|
|
253
|
-
"\n[TOOL: " + .name + "]\n" +
|
|
254
|
-
(if .name == "Bash" then "$ " + (.input.command // "") + "\n"
|
|
255
|
-
elif .name == "Read" then "Reading: " + (.input.file_path // "") + "\n"
|
|
256
|
-
elif .name == "Write" then "Writing: " + (.input.file_path // "") + "\n"
|
|
257
|
-
elif .name == "Edit" then "Editing: " + (.input.file_path // "") + "\n"
|
|
258
|
-
elif .name == "Grep" then "Searching: " + (.input.pattern // "") + "\n"
|
|
259
|
-
elif .name == "Glob" then "Finding: " + (.input.pattern // "") + "\n"
|
|
260
|
-
elif .name == "WebFetch" then "Fetching: " + (.input.url // "") + "\n"
|
|
261
|
-
elif .name == "Task" then "Agent: " + (.input.subagent_type // "") + " - " + (.input.description // "") + "\n"
|
|
262
|
-
else (.input | tostring | .[0:500]) + "\n"
|
|
263
|
-
end)
|
|
264
|
-
elif .type == "thinking" then "\n[THINKING]\n" + (.thinking // .text // "") + "\n[/THINKING]\n"
|
|
265
|
-
else empty
|
|
266
|
-
end
|
|
267
|
-
] | join("")
|
|
268
|
-
else empty end
|
|
269
|
-
' 2>/dev/null || echo "")
|
|
270
|
-
|
|
271
|
-
# Also capture tool_results that follow this assistant message
|
|
272
|
-
TOOL_RESULTS=$(cat "$TRANSCRIPT_PATH" | jq -rs --arg time "$LAST_USER_TIME" '
|
|
273
|
-
[.[] | select(.timestamp > $time)] |
|
|
274
|
-
# Get tool results between last assistant and next user message
|
|
275
|
-
[.[] | select(.type == "tool_result" or (.type == "user" and (.message.content | type == "array") and (.message.content | any(.type == "tool_result"))))] |
|
|
276
|
-
.[0:10] | # Limit to first 10 tool results
|
|
277
|
-
[.[] |
|
|
278
|
-
if .type == "tool_result" then
|
|
279
|
-
"\n[RESULT: " + (.tool_use_id // "unknown")[0:8] + "]\n" +
|
|
280
|
-
(if (.content | type == "string") then (.content | .[0:2000])
|
|
281
|
-
elif (.content | type == "array") then ([.content[] | select(.type == "text") | .text] | join("\n") | .[0:2000])
|
|
282
|
-
else ""
|
|
283
|
-
end) + "\n"
|
|
284
|
-
elif .type == "user" then
|
|
285
|
-
([.message.content[] | select(.type == "tool_result") |
|
|
286
|
-
"\n[RESULT: " + (.tool_use_id // "unknown")[0:8] + "]\n" +
|
|
287
|
-
(if (.content | type == "string") then (.content | .[0:2000])
|
|
288
|
-
elif (.content | type == "array") then ([.content[] | select(.type == "text") | .text] | join("\n") | .[0:2000])
|
|
289
|
-
else ""
|
|
290
|
-
end) + "\n"
|
|
291
|
-
] | join(""))
|
|
292
|
-
else ""
|
|
293
|
-
end
|
|
294
|
-
] | join("")
|
|
295
|
-
' 2>/dev/null || echo "")
|
|
296
|
-
|
|
297
|
-
# Combine assistant response with tool results
|
|
298
|
-
if [ -n "$TOOL_RESULTS" ]; then
|
|
299
|
-
LAST_ASSISTANT="${LAST_ASSISTANT}${TOOL_RESULTS}"
|
|
300
|
-
fi
|
|
301
|
-
fi
|
|
302
|
-
|
|
303
|
-
# Fallback: get last assistant message if timestamp method fails
|
|
304
|
-
if [ -z "$LAST_ASSISTANT" ]; then
|
|
305
|
-
LAST_ASSISTANT=$(cat "$TRANSCRIPT_PATH" | jq -rs '
|
|
306
|
-
[.[] | select(.type == "assistant")] | last |
|
|
307
|
-
.message.content |
|
|
308
|
-
if type == "string" then .
|
|
309
|
-
elif type == "array" then
|
|
310
|
-
[.[] |
|
|
311
|
-
if .type == "text" then .text
|
|
312
|
-
elif .type == "tool_use" then
|
|
313
|
-
"\n[TOOL: " + .name + "]\n" +
|
|
314
|
-
(if .name == "Bash" then "$ " + (.input.command // "") + "\n"
|
|
315
|
-
elif .name == "Read" then "Reading: " + (.input.file_path // "") + "\n"
|
|
316
|
-
elif .name == "Write" then "Writing: " + (.input.file_path // "") + "\n"
|
|
317
|
-
elif .name == "Edit" then "Editing: " + (.input.file_path // "") + "\n"
|
|
318
|
-
else (.input | tostring | .[0:500]) + "\n"
|
|
319
|
-
end)
|
|
320
|
-
elif .type == "thinking" then "\n[THINKING]\n" + (.thinking // .text // "") + "\n[/THINKING]\n"
|
|
321
|
-
else empty
|
|
322
|
-
end
|
|
323
|
-
] | join("")
|
|
324
|
-
else empty end
|
|
325
|
-
' 2>/dev/null || echo "")
|
|
326
|
-
fi
|
|
327
|
-
|
|
328
|
-
# Extract file changes WITH FULL EDIT CONTENT for perfect context restoration
|
|
329
|
-
# Includes old_string/new_string for edits, content for writes
|
|
330
|
-
FILE_CHANGES=$(cat "$TRANSCRIPT_PATH" | jq -s '
|
|
331
|
-
[.[] | select(.type == "assistant") | .message.content[]? | select(.type == "tool_use") |
|
|
332
|
-
select(.name == "Edit" or .name == "Write" or .name == "Read") |
|
|
333
|
-
{
|
|
334
|
-
tool: .name,
|
|
335
|
-
path: (.input.file_path // .input.path),
|
|
336
|
-
action: (if .name == "Edit" then "edit" elif .name == "Write" then "write" else "read" end),
|
|
337
|
-
# Full edit details for context restoration
|
|
338
|
-
old_string: (if .name == "Edit" then (.input.old_string // null) else null end),
|
|
339
|
-
new_string: (if .name == "Edit" then (.input.new_string // null) else null end),
|
|
340
|
-
# Write content (truncated to 2000 chars to avoid massive payloads)
|
|
341
|
-
content: (if .name == "Write" then (.input.content[:2000] // null) else null end),
|
|
342
|
-
replace_all: (if .name == "Edit" then (.input.replace_all // false) else null end)
|
|
343
|
-
}
|
|
344
|
-
] | map(select(.path != null))
|
|
345
|
-
' 2>/dev/null || echo "[]")
|
|
346
315
|
fi
|
|
347
316
|
|
|
348
317
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
349
318
|
# Capture to L2 (episodic memory) - SYNCHRONOUS for reliability
|
|
350
|
-
# Background was causing missed captures when Claude Code exits fast
|
|
351
319
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
352
320
|
if [ -z "$LAST_ASSISTANT" ]; then
|
|
353
|
-
|
|
321
|
+
echo "[ekkOS] Turn $TURN_NUMBER skipped: LAST_ASSISTANT empty (session: $SESSION_NAME)" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
354
322
|
fi
|
|
355
323
|
|
|
356
324
|
if [ -n "$LAST_USER" ] && [ -n "$LAST_ASSISTANT" ]; then
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
'
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
file_changes: $file_changes,
|
|
374
|
-
metadata: {
|
|
375
|
-
source: "claude-code",
|
|
376
|
-
model_used: $model_used,
|
|
377
|
-
captured_at: $captured_at,
|
|
378
|
-
file_changes: $file_changes,
|
|
325
|
+
PAYLOAD_FILE=$(mktemp /tmp/ekkos-capture.XXXXXX.json)
|
|
326
|
+
TIMESTAMP=$(date -u +%Y-%m-%dT%H:%M:%SZ)
|
|
327
|
+
|
|
328
|
+
# Build payload using Node (no jq)
|
|
329
|
+
node -e "
|
|
330
|
+
const fs = require('fs');
|
|
331
|
+
const payload = {
|
|
332
|
+
user_query: process.argv[1],
|
|
333
|
+
assistant_response: process.argv[2],
|
|
334
|
+
session_id: process.argv[3],
|
|
335
|
+
user_id: process.argv[4] || 'system',
|
|
336
|
+
file_changes: JSON.parse(process.argv[5] || '[]'),
|
|
337
|
+
metadata: {
|
|
338
|
+
source: 'claude-code',
|
|
339
|
+
model_used: process.argv[6],
|
|
340
|
+
captured_at: process.argv[7],
|
|
379
341
|
minimal_hook: true
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
342
|
+
}
|
|
343
|
+
};
|
|
344
|
+
fs.writeFileSync('$PAYLOAD_FILE', JSON.stringify(payload));
|
|
345
|
+
" "$LAST_USER" "$LAST_ASSISTANT" "$SESSION_ID" "${USER_ID:-system}" "$FILE_CHANGES" "$MODEL_USED" "$TIMESTAMP" 2>/dev/null
|
|
346
|
+
|
|
347
|
+
# Validate and send
|
|
348
|
+
if node -e "JSON.parse(require('fs').readFileSync('$PAYLOAD_FILE','utf8'))" 2>/dev/null; then
|
|
349
|
+
for RETRY in 1 2 3; do
|
|
350
|
+
CAPTURE_RESULT=$(curl -s -w "\n%{http_code}" -X POST "$MEMORY_API_URL/api/v1/memory/capture" \
|
|
351
|
+
-H "Authorization: Bearer $AUTH_TOKEN" \
|
|
352
|
+
-H "Content-Type: application/json" \
|
|
353
|
+
-d "@$PAYLOAD_FILE" \
|
|
354
|
+
--connect-timeout 3 \
|
|
355
|
+
--max-time 5 2>/dev/null || echo -e "\n000")
|
|
356
|
+
|
|
357
|
+
HTTP_CODE=$(echo "$CAPTURE_RESULT" | tail -1)
|
|
358
|
+
|
|
359
|
+
if [ "$HTTP_CODE" = "200" ] || [ "$HTTP_CODE" = "201" ]; then
|
|
360
|
+
break
|
|
361
|
+
fi
|
|
362
|
+
[ $RETRY -lt 3 ] && sleep 0.5
|
|
363
|
+
done
|
|
364
|
+
|
|
365
|
+
if [ "$HTTP_CODE" != "200" ] && [ "$HTTP_CODE" != "201" ]; then
|
|
366
|
+
echo "[ekkOS] L2 capture failed after 3 attempts: HTTP $HTTP_CODE" >&2
|
|
367
|
+
mkdir -p "$HOME/.ekkos/wal" 2>/dev/null
|
|
368
|
+
cp "$PAYLOAD_FILE" "$HOME/.ekkos/wal/l2-$(date +%s)-$$.json" 2>/dev/null
|
|
369
|
+
fi
|
|
405
370
|
fi
|
|
406
|
-
fi
|
|
407
371
|
|
|
408
|
-
|
|
372
|
+
rm -f "$PAYLOAD_FILE" 2>/dev/null
|
|
409
373
|
fi
|
|
410
374
|
|
|
411
375
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
412
376
|
# REDIS WORKING MEMORY: Store verbatim turn in multi-session hot cache
|
|
413
|
-
# 5 sessions × 20 turns = 100 turns total for instant context restoration
|
|
414
377
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
415
378
|
if [ -n "$LAST_USER" ] && [ -n "$LAST_ASSISTANT" ] && [ -n "$SESSION_NAME" ]; then
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
450
|
-
store_secret "openai_$hash" "$secret" "api_key"
|
|
451
|
-
scrubbed="${scrubbed//$secret/[SECRET:openai_$hash:api_key]}"
|
|
452
|
-
done
|
|
453
|
-
|
|
454
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
455
|
-
# Anthropic (sk-ant-...)
|
|
456
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
457
|
-
while [[ "$scrubbed" =~ (sk-ant-[a-zA-Z0-9_-]{20,}) ]]; do
|
|
458
|
-
local secret="${BASH_REMATCH[1]}"
|
|
459
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
460
|
-
store_secret "anthropic_$hash" "$secret" "api_key"
|
|
461
|
-
scrubbed="${scrubbed//$secret/[SECRET:anthropic_$hash:api_key]}"
|
|
462
|
-
done
|
|
463
|
-
|
|
464
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
465
|
-
# GitHub (ghp_, gho_, ghu_, ghs_, ghr_)
|
|
466
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
467
|
-
while [[ "$scrubbed" =~ (ghp_[a-zA-Z0-9]{36}) ]]; do
|
|
468
|
-
local secret="${BASH_REMATCH[1]}"
|
|
469
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
470
|
-
store_secret "github_pat_$hash" "$secret" "token"
|
|
471
|
-
scrubbed="${scrubbed//$secret/[SECRET:github_pat_$hash:token]}"
|
|
472
|
-
done
|
|
473
|
-
while [[ "$scrubbed" =~ (gho_[a-zA-Z0-9]{36}) ]]; do
|
|
474
|
-
local secret="${BASH_REMATCH[1]}"
|
|
475
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
476
|
-
store_secret "github_oauth_$hash" "$secret" "token"
|
|
477
|
-
scrubbed="${scrubbed//$secret/[SECRET:github_oauth_$hash:token]}"
|
|
478
|
-
done
|
|
479
|
-
while [[ "$scrubbed" =~ (ghu_[a-zA-Z0-9]{36}) ]]; do
|
|
480
|
-
local secret="${BASH_REMATCH[1]}"
|
|
481
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
482
|
-
store_secret "github_user_$hash" "$secret" "token"
|
|
483
|
-
scrubbed="${scrubbed//$secret/[SECRET:github_user_$hash:token]}"
|
|
484
|
-
done
|
|
485
|
-
while [[ "$scrubbed" =~ (ghs_[a-zA-Z0-9]{36}) ]]; do
|
|
486
|
-
local secret="${BASH_REMATCH[1]}"
|
|
487
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
488
|
-
store_secret "github_app_$hash" "$secret" "token"
|
|
489
|
-
scrubbed="${scrubbed//$secret/[SECRET:github_app_$hash:token]}"
|
|
490
|
-
done
|
|
491
|
-
while [[ "$scrubbed" =~ (ghr_[a-zA-Z0-9]{36}) ]]; do
|
|
492
|
-
local secret="${BASH_REMATCH[1]}"
|
|
493
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
494
|
-
store_secret "github_refresh_$hash" "$secret" "token"
|
|
495
|
-
scrubbed="${scrubbed//$secret/[SECRET:github_refresh_$hash:token]}"
|
|
496
|
-
done
|
|
497
|
-
|
|
498
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
499
|
-
# GitLab (glpat-...)
|
|
500
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
501
|
-
while [[ "$scrubbed" =~ (glpat-[a-zA-Z0-9_-]{20,}) ]]; do
|
|
502
|
-
local secret="${BASH_REMATCH[1]}"
|
|
503
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
504
|
-
store_secret "gitlab_$hash" "$secret" "token"
|
|
505
|
-
scrubbed="${scrubbed//$secret/[SECRET:gitlab_$hash:token]}"
|
|
506
|
-
done
|
|
507
|
-
|
|
508
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
509
|
-
# AWS (AKIA...)
|
|
510
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
511
|
-
while [[ "$scrubbed" =~ (AKIA[A-Z0-9]{16}) ]]; do
|
|
512
|
-
local secret="${BASH_REMATCH[1]}"
|
|
513
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
514
|
-
store_secret "aws_$hash" "$secret" "api_key"
|
|
515
|
-
scrubbed="${scrubbed//$secret/[SECRET:aws_$hash:api_key]}"
|
|
516
|
-
done
|
|
517
|
-
|
|
518
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
519
|
-
# Stripe (sk_live_, sk_test_, pk_live_, pk_test_)
|
|
520
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
521
|
-
while [[ "$scrubbed" =~ (sk_live_[a-zA-Z0-9]{24,}) ]]; do
|
|
522
|
-
local secret="${BASH_REMATCH[1]}"
|
|
523
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
524
|
-
store_secret "stripe_live_$hash" "$secret" "api_key"
|
|
525
|
-
scrubbed="${scrubbed//$secret/[SECRET:stripe_live_$hash:api_key]}"
|
|
526
|
-
done
|
|
527
|
-
while [[ "$scrubbed" =~ (sk_test_[a-zA-Z0-9]{24,}) ]]; do
|
|
528
|
-
local secret="${BASH_REMATCH[1]}"
|
|
529
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
530
|
-
store_secret "stripe_test_$hash" "$secret" "api_key"
|
|
531
|
-
scrubbed="${scrubbed//$secret/[SECRET:stripe_test_$hash:api_key]}"
|
|
532
|
-
done
|
|
533
|
-
while [[ "$scrubbed" =~ (rk_live_[a-zA-Z0-9]{24,}) ]]; do
|
|
534
|
-
local secret="${BASH_REMATCH[1]}"
|
|
535
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
536
|
-
store_secret "stripe_restricted_$hash" "$secret" "api_key"
|
|
537
|
-
scrubbed="${scrubbed//$secret/[SECRET:stripe_restricted_$hash:api_key]}"
|
|
538
|
-
done
|
|
539
|
-
|
|
540
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
541
|
-
# Slack (xoxb-, xoxp-, xoxa-, xoxs-)
|
|
542
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
543
|
-
while [[ "$scrubbed" =~ (xoxb-[0-9a-zA-Z-]{24,}) ]]; do
|
|
544
|
-
local secret="${BASH_REMATCH[1]}"
|
|
545
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
546
|
-
store_secret "slack_bot_$hash" "$secret" "token"
|
|
547
|
-
scrubbed="${scrubbed//$secret/[SECRET:slack_bot_$hash:token]}"
|
|
548
|
-
done
|
|
549
|
-
while [[ "$scrubbed" =~ (xoxp-[0-9a-zA-Z-]{24,}) ]]; do
|
|
550
|
-
local secret="${BASH_REMATCH[1]}"
|
|
551
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
552
|
-
store_secret "slack_user_$hash" "$secret" "token"
|
|
553
|
-
scrubbed="${scrubbed//$secret/[SECRET:slack_user_$hash:token]}"
|
|
554
|
-
done
|
|
555
|
-
|
|
556
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
557
|
-
# Google (AIza...)
|
|
558
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
559
|
-
while [[ "$scrubbed" =~ (AIza[0-9A-Za-z_-]{35}) ]]; do
|
|
560
|
-
local secret="${BASH_REMATCH[1]}"
|
|
561
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
562
|
-
store_secret "google_$hash" "$secret" "api_key"
|
|
563
|
-
scrubbed="${scrubbed//$secret/[SECRET:google_$hash:api_key]}"
|
|
564
|
-
done
|
|
565
|
-
|
|
566
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
567
|
-
# Twilio (SK...)
|
|
568
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
569
|
-
while [[ "$scrubbed" =~ (SK[0-9a-fA-F]{32}) ]]; do
|
|
570
|
-
local secret="${BASH_REMATCH[1]}"
|
|
571
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
572
|
-
store_secret "twilio_$hash" "$secret" "api_key"
|
|
573
|
-
scrubbed="${scrubbed//$secret/[SECRET:twilio_$hash:api_key]}"
|
|
574
|
-
done
|
|
575
|
-
|
|
576
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
577
|
-
# SendGrid (SG....)
|
|
578
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
579
|
-
while [[ "$scrubbed" =~ (SG\.[a-zA-Z0-9_-]{22}\.[a-zA-Z0-9_-]{43}) ]]; do
|
|
580
|
-
local secret="${BASH_REMATCH[1]}"
|
|
581
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
582
|
-
store_secret "sendgrid_$hash" "$secret" "api_key"
|
|
583
|
-
scrubbed="${scrubbed//$secret/[SECRET:sendgrid_$hash:api_key]}"
|
|
584
|
-
done
|
|
585
|
-
|
|
586
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
587
|
-
# Mailgun (key-...)
|
|
588
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
589
|
-
while [[ "$scrubbed" =~ (key-[0-9a-zA-Z]{32}) ]]; do
|
|
590
|
-
local secret="${BASH_REMATCH[1]}"
|
|
591
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
592
|
-
store_secret "mailgun_$hash" "$secret" "api_key"
|
|
593
|
-
scrubbed="${scrubbed//$secret/[SECRET:mailgun_$hash:api_key]}"
|
|
594
|
-
done
|
|
595
|
-
|
|
596
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
597
|
-
# DigitalOcean (dop_v1_...)
|
|
598
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
599
|
-
while [[ "$scrubbed" =~ (dop_v1_[a-z0-9]{64}) ]]; do
|
|
600
|
-
local secret="${BASH_REMATCH[1]}"
|
|
601
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
602
|
-
store_secret "digitalocean_$hash" "$secret" "token"
|
|
603
|
-
scrubbed="${scrubbed//$secret/[SECRET:digitalocean_$hash:token]}"
|
|
604
|
-
done
|
|
605
|
-
|
|
606
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
607
|
-
# Shopify (shpat_...)
|
|
608
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
609
|
-
while [[ "$scrubbed" =~ (shpat_[0-9a-fA-F]{32}) ]]; do
|
|
610
|
-
local secret="${BASH_REMATCH[1]}"
|
|
611
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
612
|
-
store_secret "shopify_$hash" "$secret" "token"
|
|
613
|
-
scrubbed="${scrubbed//$secret/[SECRET:shopify_$hash:token]}"
|
|
614
|
-
done
|
|
615
|
-
|
|
616
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
617
|
-
# npm (npm_...)
|
|
618
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
619
|
-
while [[ "$scrubbed" =~ (npm_[a-zA-Z0-9]{36}) ]]; do
|
|
620
|
-
local secret="${BASH_REMATCH[1]}"
|
|
621
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
622
|
-
store_secret "npm_$hash" "$secret" "token"
|
|
623
|
-
scrubbed="${scrubbed//$secret/[SECRET:npm_$hash:token]}"
|
|
624
|
-
done
|
|
625
|
-
|
|
626
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
627
|
-
# PyPI (pypi-...)
|
|
628
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
629
|
-
while [[ "$scrubbed" =~ (pypi-[A-Za-z0-9_-]{50,}) ]]; do
|
|
630
|
-
local secret="${BASH_REMATCH[1]}"
|
|
631
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
632
|
-
store_secret "pypi_$hash" "$secret" "token"
|
|
633
|
-
scrubbed="${scrubbed//$secret/[SECRET:pypi_$hash:token]}"
|
|
634
|
-
done
|
|
635
|
-
|
|
636
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
637
|
-
# Supabase (sbp_...)
|
|
638
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
639
|
-
while [[ "$scrubbed" =~ (sbp_[a-zA-Z0-9]{40,}) ]]; do
|
|
640
|
-
local secret="${BASH_REMATCH[1]}"
|
|
641
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
642
|
-
store_secret "supabase_$hash" "$secret" "api_key"
|
|
643
|
-
scrubbed="${scrubbed//$secret/[SECRET:supabase_$hash:api_key]}"
|
|
644
|
-
done
|
|
645
|
-
|
|
646
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
647
|
-
# Discord Bot Token
|
|
648
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
649
|
-
while [[ "$scrubbed" =~ ([MN][A-Za-z0-9]{23,}\.[A-Za-z0-9_-]{6}\.[A-Za-z0-9_-]{27}) ]]; do
|
|
650
|
-
local secret="${BASH_REMATCH[1]}"
|
|
651
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
652
|
-
store_secret "discord_$hash" "$secret" "token"
|
|
653
|
-
scrubbed="${scrubbed//$secret/[SECRET:discord_$hash:token]}"
|
|
654
|
-
done
|
|
655
|
-
|
|
656
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
657
|
-
# Vercel (vercel_...)
|
|
658
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
659
|
-
while [[ "$scrubbed" =~ (vercel_[a-zA-Z0-9]{24,}) ]]; do
|
|
660
|
-
local secret="${BASH_REMATCH[1]}"
|
|
661
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
662
|
-
store_secret "vercel_$hash" "$secret" "token"
|
|
663
|
-
scrubbed="${scrubbed//$secret/[SECRET:vercel_$hash:token]}"
|
|
664
|
-
done
|
|
665
|
-
|
|
666
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
667
|
-
# Heroku (heroku_...)
|
|
668
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
669
|
-
while [[ "$scrubbed" =~ (heroku_[a-zA-Z0-9_-]{30,}) ]]; do
|
|
670
|
-
local secret="${BASH_REMATCH[1]}"
|
|
671
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
672
|
-
store_secret "heroku_$hash" "$secret" "api_key"
|
|
673
|
-
scrubbed="${scrubbed//$secret/[SECRET:heroku_$hash:api_key]}"
|
|
674
|
-
done
|
|
675
|
-
|
|
676
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
677
|
-
# Datadog (dd...)
|
|
678
|
-
# ─────────────────────────────────────────────────────────────────────────
|
|
679
|
-
while [[ "$scrubbed" =~ (ddapi_[a-zA-Z0-9]{32,}) ]]; do
|
|
680
|
-
local secret="${BASH_REMATCH[1]}"
|
|
681
|
-
local hash=$(echo -n "$secret" | md5 | cut -c1-8)
|
|
682
|
-
store_secret "datadog_$hash" "$secret" "api_key"
|
|
683
|
-
scrubbed="${scrubbed//$secret/[SECRET:datadog_$hash:api_key]}"
|
|
684
|
-
done
|
|
685
|
-
|
|
686
|
-
echo "$scrubbed"
|
|
687
|
-
}
|
|
688
|
-
|
|
689
|
-
# Scrub user query and assistant response
|
|
690
|
-
SCRUBBED_USER=$(scrub_secrets "$LAST_USER")
|
|
691
|
-
SCRUBBED_ASSISTANT=$(scrub_secrets "$LAST_ASSISTANT")
|
|
692
|
-
|
|
693
|
-
# Extract tools used from assistant response (simple grep for tool names)
|
|
694
|
-
TOOLS_USED=$(echo "$SCRUBBED_ASSISTANT" | grep -oE '\[TOOL: [^\]]+\]' | sed 's/\[TOOL: //g; s/\]//g' | sort -u | jq -R -s -c 'split("\n") | map(select(. != ""))')
|
|
695
|
-
[ -z "$TOOLS_USED" ] && TOOLS_USED="[]"
|
|
696
|
-
|
|
697
|
-
# Extract files referenced from file changes
|
|
698
|
-
FILES_REFERENCED=$(echo "$FILE_CHANGES" | jq -c '[.[].path] | unique // []' 2>/dev/null || echo "[]")
|
|
699
|
-
|
|
700
|
-
# Build edits array from file changes (write and edit actions only)
|
|
701
|
-
EDITS=$(echo "$FILE_CHANGES" | jq -c '[.[] | select(.action == "edit" or .action == "write") | {file_path: .path, action: .action, diff: (if .old_string then ("old: " + (.old_string | .[0:200]) + "\nnew: " + (.new_string | .[0:200])) else (.content | .[0:500]) end)}]' 2>/dev/null || echo "[]")
|
|
702
|
-
|
|
703
|
-
# ═══════════════════════════════════════════════════════════════════════════
|
|
704
|
-
# ACCURATE TOKEN TRACKING: Extract REAL token counts from Anthropic API response
|
|
705
|
-
# This gives us exact context usage instead of rough estimation
|
|
706
|
-
# ═══════════════════════════════════════════════════════════════════════════
|
|
707
|
-
TOTAL_CONTEXT_TOKENS=0
|
|
708
|
-
INPUT_TOKENS=0
|
|
709
|
-
OUTPUT_TOKENS=0
|
|
710
|
-
|
|
711
|
-
if [ -n "$TRANSCRIPT_PATH" ] && [ -f "$TRANSCRIPT_PATH" ]; then
|
|
712
|
-
# Get the last assistant message with usage data (macOS compatible)
|
|
713
|
-
# tac doesn't exist on macOS, use grep | tail instead
|
|
714
|
-
LAST_USAGE=$(grep '"usage"' "$TRANSCRIPT_PATH" 2>/dev/null | tail -1)
|
|
715
|
-
|
|
716
|
-
if [ -n "$LAST_USAGE" ]; then
|
|
717
|
-
# Extract token counts from Anthropic API usage object
|
|
718
|
-
INPUT_TOKENS=$(echo "$LAST_USAGE" | jq -r '
|
|
719
|
-
(.message.usage.input_tokens // 0) +
|
|
720
|
-
(.message.usage.cache_creation_input_tokens // 0) +
|
|
721
|
-
(.message.usage.cache_read_input_tokens // 0)
|
|
722
|
-
' 2>/dev/null || echo "0")
|
|
723
|
-
|
|
724
|
-
OUTPUT_TOKENS=$(echo "$LAST_USAGE" | jq -r '.message.usage.output_tokens // 0' 2>/dev/null || echo "0")
|
|
725
|
-
|
|
726
|
-
# Total context = input + output
|
|
727
|
-
TOTAL_CONTEXT_TOKENS=$((INPUT_TOKENS + OUTPUT_TOKENS))
|
|
379
|
+
REDIS_PAYLOAD_FILE=$(mktemp /tmp/ekkos-redis.XXXXXX.json)
|
|
380
|
+
|
|
381
|
+
# Extract tools used
|
|
382
|
+
TOOLS_USED=$(echo "$LAST_ASSISTANT" | grep -oE '\[TOOL: [^\]]+\]' | sed 's/\[TOOL: //g; s/\]//g' | sort -u | node -e "
|
|
383
|
+
const lines = require('fs').readFileSync('/dev/stdin','utf8').split('\\n').filter(Boolean);
|
|
384
|
+
console.log(JSON.stringify(lines));
|
|
385
|
+
" 2>/dev/null || echo "[]")
|
|
386
|
+
|
|
387
|
+
# Extract files referenced
|
|
388
|
+
FILES_REFERENCED=$(echo "$FILE_CHANGES" | node -e "
|
|
389
|
+
const d = JSON.parse(require('fs').readFileSync('/dev/stdin','utf8') || '[]');
|
|
390
|
+
console.log(JSON.stringify([...new Set(d.map(f => f.path).filter(Boolean))]));
|
|
391
|
+
" 2>/dev/null || echo "[]")
|
|
392
|
+
|
|
393
|
+
# Get token counts from transcript
|
|
394
|
+
TOTAL_CONTEXT_TOKENS=0
|
|
395
|
+
INPUT_TOKENS=0
|
|
396
|
+
OUTPUT_TOKENS=0
|
|
397
|
+
|
|
398
|
+
if [ -n "$TRANSCRIPT_PATH" ] && [ -f "$TRANSCRIPT_PATH" ]; then
|
|
399
|
+
TOKEN_DATA=$(grep '"usage"' "$TRANSCRIPT_PATH" 2>/dev/null | tail -1 | node -e "
|
|
400
|
+
const line = require('fs').readFileSync('/dev/stdin','utf8');
|
|
401
|
+
try {
|
|
402
|
+
const d = JSON.parse(line);
|
|
403
|
+
const u = d.message?.usage || {};
|
|
404
|
+
const input = (u.input_tokens || 0) + (u.cache_creation_input_tokens || 0) + (u.cache_read_input_tokens || 0);
|
|
405
|
+
const output = u.output_tokens || 0;
|
|
406
|
+
console.log(input + ':' + output);
|
|
407
|
+
} catch { console.log('0:0'); }
|
|
408
|
+
" 2>/dev/null || echo "0:0")
|
|
409
|
+
INPUT_TOKENS=$(echo "$TOKEN_DATA" | cut -d: -f1)
|
|
410
|
+
OUTPUT_TOKENS=$(echo "$TOKEN_DATA" | cut -d: -f2)
|
|
411
|
+
TOTAL_CONTEXT_TOKENS=$((INPUT_TOKENS + OUTPUT_TOKENS))
|
|
728
412
|
fi
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
WAL_DIR="$HOME/.ekkos/wal"
|
|
787
|
-
mkdir -p "$WAL_DIR" 2>/dev/null
|
|
788
|
-
cp "$REDIS_PAYLOAD_FILE" "$WAL_DIR/redis-$(date +%s)-$$.json" 2>/dev/null
|
|
789
|
-
else
|
|
790
|
-
# ═══════════════════════════════════════════════════════════════════════════
|
|
791
|
-
# 🎯 ACK: Update local cache ACK cursor after successful Redis flush
|
|
792
|
-
# This enables safe pruning of turns that are backed up to Redis
|
|
793
|
-
# ═══════════════════════════════════════════════════════════════════════════
|
|
794
|
-
if command -v ekkos-capture &>/dev/null && [ -n "$SESSION_ID" ]; then
|
|
795
|
-
(ekkos-capture ack "$SESSION_ID" "$TURN_NUMBER" >/dev/null 2>&1) &
|
|
796
|
-
fi
|
|
413
|
+
|
|
414
|
+
# Build Redis payload
|
|
415
|
+
node -e "
|
|
416
|
+
const fs = require('fs');
|
|
417
|
+
const payload = {
|
|
418
|
+
session_name: process.argv[1],
|
|
419
|
+
turn_number: parseInt(process.argv[2]),
|
|
420
|
+
user_query: process.argv[3],
|
|
421
|
+
agent_response: process.argv[4].substring(0, 50000),
|
|
422
|
+
model: process.argv[5],
|
|
423
|
+
tools_used: JSON.parse(process.argv[6] || '[]'),
|
|
424
|
+
files_referenced: JSON.parse(process.argv[7] || '[]'),
|
|
425
|
+
edits: [],
|
|
426
|
+
patterns_used: [],
|
|
427
|
+
total_context_tokens: parseInt(process.argv[8]) || 0,
|
|
428
|
+
input_tokens: parseInt(process.argv[9]) || 0,
|
|
429
|
+
output_tokens: parseInt(process.argv[10]) || 0
|
|
430
|
+
};
|
|
431
|
+
fs.writeFileSync('$REDIS_PAYLOAD_FILE', JSON.stringify(payload));
|
|
432
|
+
" "$SESSION_NAME" "$TURN_NUMBER" "$LAST_USER" "$LAST_ASSISTANT" "$MODEL_USED" "$TOOLS_USED" "$FILES_REFERENCED" "$TOTAL_CONTEXT_TOKENS" "$INPUT_TOKENS" "$OUTPUT_TOKENS" 2>/dev/null
|
|
433
|
+
|
|
434
|
+
if node -e "JSON.parse(require('fs').readFileSync('$REDIS_PAYLOAD_FILE','utf8'))" 2>/dev/null; then
|
|
435
|
+
MAX_RETRIES=3
|
|
436
|
+
RETRY=0
|
|
437
|
+
REDIS_SUCCESS=false
|
|
438
|
+
|
|
439
|
+
while [ $RETRY -lt $MAX_RETRIES ] && [ "$REDIS_SUCCESS" = "false" ]; do
|
|
440
|
+
REDIS_RESULT=$(curl -s -w "\n%{http_code}" -X POST "$MEMORY_API_URL/api/v1/working/turn" \
|
|
441
|
+
-H "Authorization: Bearer $AUTH_TOKEN" \
|
|
442
|
+
-H "Content-Type: application/json" \
|
|
443
|
+
-d "@$REDIS_PAYLOAD_FILE" \
|
|
444
|
+
--connect-timeout 3 \
|
|
445
|
+
--max-time 5 2>/dev/null || echo -e "\n000")
|
|
446
|
+
|
|
447
|
+
REDIS_HTTP_CODE=$(echo "$REDIS_RESULT" | tail -1)
|
|
448
|
+
|
|
449
|
+
if [ "$REDIS_HTTP_CODE" = "200" ] || [ "$REDIS_HTTP_CODE" = "201" ]; then
|
|
450
|
+
REDIS_SUCCESS=true
|
|
451
|
+
else
|
|
452
|
+
RETRY=$((RETRY + 1))
|
|
453
|
+
[ $RETRY -lt $MAX_RETRIES ] && sleep 0.3
|
|
454
|
+
fi
|
|
455
|
+
done
|
|
456
|
+
|
|
457
|
+
if [ "$REDIS_SUCCESS" = "false" ]; then
|
|
458
|
+
echo "[ekkOS] Redis capture failed after $MAX_RETRIES attempts: HTTP $REDIS_HTTP_CODE (session: $SESSION_NAME, turn: $TURN_NUMBER)" >&2
|
|
459
|
+
WAL_DIR="$HOME/.ekkos/wal"
|
|
460
|
+
mkdir -p "$WAL_DIR" 2>/dev/null
|
|
461
|
+
cp "$REDIS_PAYLOAD_FILE" "$WAL_DIR/redis-$(date +%s)-$$.json" 2>/dev/null
|
|
462
|
+
else
|
|
463
|
+
# ACK: Update local cache cursor
|
|
464
|
+
# Per ekkOS Onboarding Spec v1.2 ADDENDUM: Pass instanceId for namespacing
|
|
465
|
+
if command -v ekkos-capture &>/dev/null && [ -n "$SESSION_ID" ]; then
|
|
466
|
+
INSTANCE_ID="${EKKOS_INSTANCE_ID:-default}"
|
|
467
|
+
(ekkos-capture ack "$SESSION_ID" "$TURN_NUMBER" --instance="$INSTANCE_ID" >/dev/null 2>&1) &
|
|
468
|
+
fi
|
|
469
|
+
fi
|
|
797
470
|
fi
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
-H "Content-Type: application/json" \
|
|
869
|
-
-d "$FAST_PAYLOAD" \
|
|
870
|
-
--connect-timeout 1 \
|
|
871
|
-
--max-time 2 >/dev/null 2>&1 &
|
|
872
|
-
fi
|
|
873
|
-
|
|
874
|
-
# ═══════════════════════════════════════════════════════════════════════════
|
|
875
|
-
# 💾 LOCAL CACHE: Tier 0 - Update turn with assistant response
|
|
876
|
-
# Updates the turn created by user-prompt-submit hook with the response
|
|
877
|
-
# ═══════════════════════════════════════════════════════════════════════════
|
|
878
|
-
if command -v ekkos-capture &>/dev/null && [ -n "$SESSION_ID" ]; then
|
|
879
|
-
# Escape response for shell (use base64 for safety with complex content)
|
|
880
|
-
RESPONSE_B64=$(echo "$SCRUBBED_ASSISTANT" | base64 2>/dev/null || echo "")
|
|
881
|
-
if [ -n "$RESPONSE_B64" ]; then
|
|
882
|
-
# Decode and pass to capture command (handles newlines and special chars)
|
|
883
|
-
DECODED_RESPONSE=$(echo "$RESPONSE_B64" | base64 -d 2>/dev/null || echo "")
|
|
884
|
-
if [ -n "$DECODED_RESPONSE" ]; then
|
|
885
|
-
(ekkos-capture response "$SESSION_ID" "$TURN_NUMBER" "$DECODED_RESPONSE" "$TOOLS_USED" "$FILES_REFERENCED" \
|
|
886
|
-
>/dev/null 2>&1) &
|
|
887
|
-
fi
|
|
471
|
+
|
|
472
|
+
rm -f "$REDIS_PAYLOAD_FILE" 2>/dev/null
|
|
473
|
+
|
|
474
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
475
|
+
# FAST CAPTURE: Structured context for instant /continue
|
|
476
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
477
|
+
USER_DECISION=$(echo "$LAST_USER" | grep -oiE "^(yes|no|ok|do it|go ahead|approved|confirmed|use .{1,30} instead)" | head -1 || echo "")
|
|
478
|
+
USER_CORRECTION=$(echo "$LAST_USER" | grep -oiE "(actually|no,? I meant|not that|wrong|instead)" | head -1 || echo "")
|
|
479
|
+
USER_PREFERENCE=$(echo "$LAST_USER" | grep -oiE "(always|never|I prefer|don.t|avoid) .{1,50}" | head -1 || echo "")
|
|
480
|
+
|
|
481
|
+
ERRORS_FOUND=$(echo "$LAST_ASSISTANT" | grep -oiE "(error|failed|cannot|exception|not found).{0,80}" | head -3 | node -e "
|
|
482
|
+
const lines = require('fs').readFileSync('/dev/stdin','utf8').split('\\n').filter(Boolean);
|
|
483
|
+
console.log(JSON.stringify(lines));
|
|
484
|
+
" 2>/dev/null || echo "[]")
|
|
485
|
+
|
|
486
|
+
GIT_CHANGED=$(git diff --name-only 2>/dev/null | head -10 | node -e "
|
|
487
|
+
const lines = require('fs').readFileSync('/dev/stdin','utf8').split('\\n').filter(Boolean);
|
|
488
|
+
console.log(JSON.stringify(lines));
|
|
489
|
+
" 2>/dev/null || echo "[]")
|
|
490
|
+
|
|
491
|
+
GIT_STAT=$(git diff --stat 2>/dev/null | tail -1 | tr -d '\n' || echo "")
|
|
492
|
+
|
|
493
|
+
COMMANDS_RUN=$(echo "$LAST_ASSISTANT" | grep -oE '\$ [^\n]{1,50}' | head -5 | sed 's/^\$ //' | node -e "
|
|
494
|
+
const lines = require('fs').readFileSync('/dev/stdin','utf8').split('\\n').filter(Boolean);
|
|
495
|
+
console.log(JSON.stringify(lines));
|
|
496
|
+
" 2>/dev/null || echo "[]")
|
|
497
|
+
|
|
498
|
+
# Build and send fast-capture
|
|
499
|
+
FAST_PAYLOAD=$(node -e "
|
|
500
|
+
console.log(JSON.stringify({
|
|
501
|
+
session_name: process.argv[1],
|
|
502
|
+
turn_number: parseInt(process.argv[2]),
|
|
503
|
+
user_intent: process.argv[3].substring(0, 200),
|
|
504
|
+
user_decision: process.argv[4] || null,
|
|
505
|
+
user_correction: process.argv[5] || null,
|
|
506
|
+
user_preference: process.argv[6] || null,
|
|
507
|
+
tools_used: JSON.parse(process.argv[7] || '[]'),
|
|
508
|
+
files_modified: JSON.parse(process.argv[8] || '[]'),
|
|
509
|
+
commands_run: JSON.parse(process.argv[9] || '[]'),
|
|
510
|
+
errors: JSON.parse(process.argv[10] || '[]'),
|
|
511
|
+
git_files_changed: JSON.parse(process.argv[11] || '[]'),
|
|
512
|
+
git_diff_stat: process.argv[12] || null,
|
|
513
|
+
outcome: 'success'
|
|
514
|
+
}));
|
|
515
|
+
" "$SESSION_NAME" "$TURN_NUMBER" "$LAST_USER" "$USER_DECISION" "$USER_CORRECTION" "$USER_PREFERENCE" "$TOOLS_USED" "$FILES_REFERENCED" "$COMMANDS_RUN" "$ERRORS_FOUND" "$GIT_CHANGED" "$GIT_STAT" 2>/dev/null)
|
|
516
|
+
|
|
517
|
+
if [ -n "$FAST_PAYLOAD" ]; then
|
|
518
|
+
curl -s -X POST "$MEMORY_API_URL/api/v1/working/fast-capture" \
|
|
519
|
+
-H "Authorization: Bearer $AUTH_TOKEN" \
|
|
520
|
+
-H "Content-Type: application/json" \
|
|
521
|
+
-d "$FAST_PAYLOAD" \
|
|
522
|
+
--connect-timeout 1 \
|
|
523
|
+
--max-time 2 >/dev/null 2>&1 &
|
|
524
|
+
fi
|
|
525
|
+
|
|
526
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
527
|
+
# LOCAL CACHE: Tier 0 - Update turn with assistant response
|
|
528
|
+
# Per ekkOS Onboarding Spec v1.2 ADDENDUM: Pass instanceId for namespacing
|
|
529
|
+
# ═══════════════════════════════════════════════════════════════════════════
|
|
530
|
+
if command -v ekkos-capture &>/dev/null && [ -n "$SESSION_ID" ]; then
|
|
531
|
+
RESPONSE_B64=$(echo "$LAST_ASSISTANT" | base64 2>/dev/null || echo "")
|
|
532
|
+
if [ -n "$RESPONSE_B64" ]; then
|
|
533
|
+
DECODED_RESPONSE=$(echo "$RESPONSE_B64" | base64 -d 2>/dev/null || echo "")
|
|
534
|
+
if [ -n "$DECODED_RESPONSE" ]; then
|
|
535
|
+
# NEW format: ekkos-capture response <instance_id> <session_id> <turn_id> <response> [tools] [files]
|
|
536
|
+
INSTANCE_ID="${EKKOS_INSTANCE_ID:-default}"
|
|
537
|
+
(ekkos-capture response "$INSTANCE_ID" "$SESSION_ID" "$TURN_NUMBER" "$DECODED_RESPONSE" "$TOOLS_USED" "$FILES_REFERENCED" \
|
|
538
|
+
>/dev/null 2>&1) &
|
|
539
|
+
fi
|
|
540
|
+
fi
|
|
888
541
|
fi
|
|
889
|
-
fi
|
|
890
542
|
fi
|
|
891
543
|
|
|
892
544
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
893
|
-
#
|
|
894
|
-
#
|
|
545
|
+
# FALLBACK LOCAL CACHE UPDATE
|
|
546
|
+
# Per ekkOS Onboarding Spec v1.2 ADDENDUM: Pass instanceId for namespacing
|
|
895
547
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
896
548
|
if [ -n "$LAST_ASSISTANT" ] && command -v ekkos-capture &>/dev/null && [ -n "$SESSION_ID" ]; then
|
|
897
|
-
|
|
898
|
-
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
549
|
+
if [ -z "$LAST_USER" ]; then
|
|
550
|
+
echo "[ekkOS DEBUG] Fallback local cache update: LAST_ASSISTANT available, updating turn $TURN_NUMBER" >> "$HOME/.ekkos/capture-debug.log" 2>/dev/null
|
|
551
|
+
RESPONSE_B64=$(echo "$LAST_ASSISTANT" | base64 2>/dev/null || echo "")
|
|
552
|
+
if [ -n "$RESPONSE_B64" ]; then
|
|
553
|
+
DECODED_RESPONSE=$(echo "$RESPONSE_B64" | base64 -d 2>/dev/null || echo "")
|
|
554
|
+
if [ -n "$DECODED_RESPONSE" ]; then
|
|
555
|
+
TOOLS_USED=$(echo "$LAST_ASSISTANT" | grep -oE '\[TOOL: [^\]]+\]' | sed 's/\[TOOL: //g; s/\]//g' | sort -u | node -e "
|
|
556
|
+
const lines = require('fs').readFileSync('/dev/stdin','utf8').split('\\n').filter(Boolean);
|
|
557
|
+
console.log(JSON.stringify(lines));
|
|
558
|
+
" 2>/dev/null || echo "[]")
|
|
559
|
+
# NEW format: ekkos-capture response <instance_id> <session_id> <turn_id> <response> [tools] [files]
|
|
560
|
+
INSTANCE_ID="${EKKOS_INSTANCE_ID:-default}"
|
|
561
|
+
(ekkos-capture response "$INSTANCE_ID" "$SESSION_ID" "$TURN_NUMBER" "$DECODED_RESPONSE" "$TOOLS_USED" "[]" \
|
|
562
|
+
>/dev/null 2>&1) &
|
|
563
|
+
fi
|
|
564
|
+
fi
|
|
911
565
|
fi
|
|
912
|
-
fi
|
|
913
566
|
fi
|
|
914
567
|
|
|
915
568
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
916
|
-
#
|
|
569
|
+
# GOLDEN LOOP: DETECT PHASES FROM RESPONSE
|
|
917
570
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
918
571
|
GOLDEN_LOOP_FILE="$PROJECT_ROOT/.ekkos/golden-loop-current.json"
|
|
919
572
|
|
|
920
573
|
if [ -n "$LAST_ASSISTANT" ] && [ -f "$GOLDEN_LOOP_FILE" ]; then
|
|
921
|
-
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
926
|
-
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
elif [ "$APPLIED" -gt 0 ]; then
|
|
942
|
-
CURRENT_PHASE="inject"
|
|
943
|
-
elif [ "$RETRIEVED" -gt 0 ]; then
|
|
944
|
-
CURRENT_PHASE="retrieve"
|
|
945
|
-
fi
|
|
946
|
-
|
|
947
|
-
# Update Golden Loop file with detected stats
|
|
948
|
-
jq -n \
|
|
949
|
-
--arg phase "$CURRENT_PHASE" \
|
|
950
|
-
--argjson turn "$TURN_NUMBER" \
|
|
951
|
-
--arg session "$SESSION_NAME" \
|
|
952
|
-
--arg timestamp "$(date -u +%Y-%m-%dT%H:%M:%SZ)" \
|
|
953
|
-
--argjson retrieved "$RETRIEVED" \
|
|
954
|
-
--argjson applied "$APPLIED" \
|
|
955
|
-
--argjson forged "$FORGED" \
|
|
956
|
-
'{
|
|
957
|
-
phase: $phase,
|
|
958
|
-
turn: $turn,
|
|
959
|
-
session: $session,
|
|
960
|
-
timestamp: $timestamp,
|
|
961
|
-
stats: {
|
|
962
|
-
retrieved: $retrieved,
|
|
963
|
-
applied: $applied,
|
|
964
|
-
forged: $forged
|
|
965
|
-
}
|
|
966
|
-
}' > "$GOLDEN_LOOP_FILE" 2>/dev/null || true
|
|
574
|
+
RETRIEVED=$(echo "$LAST_ASSISTANT" | grep -c "ekkOS_Search" 2>/dev/null || echo "0")
|
|
575
|
+
APPLIED=$(echo "$LAST_ASSISTANT" | grep -c "\[ekkOS_SELECT\]" 2>/dev/null || echo "0")
|
|
576
|
+
FORGED=$(echo "$LAST_ASSISTANT" | grep -c "ekkOS_Forge" 2>/dev/null || echo "0")
|
|
577
|
+
|
|
578
|
+
CURRENT_PHASE="complete"
|
|
579
|
+
[ "$FORGED" -gt 0 ] && CURRENT_PHASE="measure"
|
|
580
|
+
[ "$APPLIED" -gt 0 ] && CURRENT_PHASE="inject"
|
|
581
|
+
[ "$RETRIEVED" -gt 0 ] && CURRENT_PHASE="retrieve"
|
|
582
|
+
|
|
583
|
+
node -e "
|
|
584
|
+
const fs = require('fs');
|
|
585
|
+
const data = {
|
|
586
|
+
phase: '$CURRENT_PHASE',
|
|
587
|
+
turn: $TURN_NUMBER,
|
|
588
|
+
session: '$SESSION_NAME',
|
|
589
|
+
timestamp: new Date().toISOString(),
|
|
590
|
+
stats: { retrieved: $RETRIEVED, applied: $APPLIED, forged: $FORGED }
|
|
591
|
+
};
|
|
592
|
+
fs.writeFileSync('$GOLDEN_LOOP_FILE', JSON.stringify(data, null, 2));
|
|
593
|
+
" 2>/dev/null || true
|
|
967
594
|
fi
|
|
968
595
|
|
|
969
596
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
970
|
-
# Update local .ekkos/current-focus.md
|
|
597
|
+
# Update local .ekkos/current-focus.md
|
|
971
598
|
# ═══════════════════════════════════════════════════════════════════════════
|
|
972
599
|
EKKOS_LOCAL_DIR="$PROJECT_ROOT/.ekkos"
|
|
973
600
|
if [ -d "$EKKOS_LOCAL_DIR" ] && [ -n "$LAST_USER" ]; then
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
601
|
+
FOCUS_FILE="$EKKOS_LOCAL_DIR/current-focus.md"
|
|
602
|
+
TASK_SUMMARY="${LAST_USER:0:100}"
|
|
603
|
+
[ ${#LAST_USER} -gt 100 ] && TASK_SUMMARY="${TASK_SUMMARY}..."
|
|
977
604
|
|
|
978
|
-
|
|
605
|
+
cat > "$FOCUS_FILE" << EOF
|
|
979
606
|
---
|
|
980
607
|
last_updated: $(date -u +%Y-%m-%dT%H:%M:%SZ)
|
|
981
608
|
session_id: ${SESSION_ID}
|