aether-colony 3.1.17 → 5.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/{runtime → .aether}/CONTEXT.md +1 -1
- package/{runtime → .aether}/aether-utils.sh +1772 -98
- package/.aether/docs/QUEEN-SYSTEM.md +211 -0
- package/.aether/docs/QUEEN.md +84 -0
- package/.aether/docs/README.md +68 -0
- package/.aether/docs/caste-system.md +48 -0
- package/{runtime → .aether/docs/disciplines}/DISCIPLINES.md +8 -8
- package/.aether/docs/error-codes.md +268 -0
- package/{runtime → .aether}/docs/known-issues.md +42 -26
- package/.aether/docs/queen-commands.md +97 -0
- package/.aether/exchange/colony-registry.xml +11 -0
- package/{runtime → .aether}/exchange/pheromone-xml.sh +2 -1
- package/.aether/exchange/pheromones.xml +87 -0
- package/.aether/exchange/queen-wisdom.xml +14 -0
- package/{runtime → .aether}/exchange/registry-xml.sh +7 -3
- package/{runtime → .aether}/exchange/wisdom-xml.sh +11 -4
- package/.aether/rules/aether-colony.md +134 -0
- package/.aether/schemas/example-prompt-builder.xml +234 -0
- package/.aether/templates/colony-state-reset.jq.template +22 -0
- package/.aether/templates/colony-state.template.json +35 -0
- package/.aether/templates/constraints.template.json +9 -0
- package/.aether/templates/crowned-anthill.template.md +36 -0
- package/.aether/templates/handoff-build-error.template.md +30 -0
- package/.aether/templates/handoff-build-success.template.md +39 -0
- package/.aether/templates/handoff.template.md +40 -0
- package/{runtime → .aether}/utils/atomic-write.sh +5 -5
- package/{runtime → .aether}/utils/chamber-compare.sh +23 -10
- package/{runtime → .aether}/utils/chamber-utils.sh +32 -20
- package/{runtime → .aether}/utils/error-handler.sh +13 -1
- package/{runtime → .aether}/utils/file-lock.sh +49 -13
- package/.aether/utils/semantic-cli.sh +413 -0
- package/{runtime → .aether}/utils/xml-compose.sh +7 -1
- package/.aether/utils/xml-convert.sh +273 -0
- package/.aether/utils/xml-query.sh +201 -0
- package/.aether/utils/xml-utils.sh +110 -0
- package/{runtime → .aether}/workers.md +14 -17
- package/.claude/agents/ant/aether-ambassador.md +264 -0
- package/.claude/agents/ant/aether-archaeologist.md +322 -0
- package/.claude/agents/ant/aether-auditor.md +266 -0
- package/.claude/agents/ant/aether-builder.md +187 -0
- package/.claude/agents/ant/aether-chaos.md +268 -0
- package/.claude/agents/ant/aether-chronicler.md +304 -0
- package/.claude/agents/ant/aether-gatekeeper.md +325 -0
- package/.claude/agents/ant/aether-includer.md +373 -0
- package/.claude/agents/ant/aether-keeper.md +271 -0
- package/.claude/agents/ant/aether-measurer.md +317 -0
- package/.claude/agents/ant/aether-probe.md +210 -0
- package/.claude/agents/ant/aether-queen.md +325 -0
- package/.claude/agents/ant/aether-route-setter.md +173 -0
- package/.claude/agents/ant/aether-sage.md +353 -0
- package/.claude/agents/ant/aether-scout.md +142 -0
- package/.claude/agents/ant/aether-surveyor-disciplines.md +416 -0
- package/.claude/agents/ant/aether-surveyor-nest.md +354 -0
- package/.claude/agents/ant/aether-surveyor-pathogens.md +288 -0
- package/.claude/agents/ant/aether-surveyor-provisions.md +359 -0
- package/.claude/agents/ant/aether-tracker.md +265 -0
- package/.claude/agents/ant/aether-watcher.md +244 -0
- package/.claude/agents/ant/aether-weaver.md +247 -0
- package/.claude/commands/ant/archaeology.md +16 -7
- package/.claude/commands/ant/build.md +415 -284
- package/.claude/commands/ant/chaos.md +19 -10
- package/.claude/commands/ant/colonize.md +58 -24
- package/.claude/commands/ant/continue.md +155 -145
- package/.claude/commands/ant/council.md +15 -5
- package/.claude/commands/ant/dream.md +16 -7
- package/.claude/commands/ant/entomb.md +274 -157
- package/.claude/commands/ant/feedback.md +33 -29
- package/.claude/commands/ant/flag.md +18 -10
- package/.claude/commands/ant/flags.md +14 -6
- package/.claude/commands/ant/focus.md +29 -21
- package/.claude/commands/ant/help.md +11 -1
- package/.claude/commands/ant/history.md +10 -0
- package/.claude/commands/ant/init.md +91 -65
- package/.claude/commands/ant/interpret.md +15 -4
- package/.claude/commands/ant/lay-eggs.md +55 -7
- package/.claude/commands/ant/maturity.md +11 -1
- package/.claude/commands/ant/migrate-state.md +14 -2
- package/.claude/commands/ant/oracle.md +23 -15
- package/.claude/commands/ant/organize.md +29 -20
- package/.claude/commands/ant/pause-colony.md +17 -7
- package/.claude/commands/ant/phase.md +17 -8
- package/.claude/commands/ant/plan.md +20 -9
- package/.claude/commands/ant/redirect.md +29 -32
- package/.claude/commands/ant/resume-colony.md +19 -9
- package/.claude/commands/ant/resume.md +272 -96
- package/.claude/commands/ant/seal.md +201 -191
- package/.claude/commands/ant/status.md +71 -32
- package/.claude/commands/ant/swarm.md +26 -44
- package/.claude/commands/ant/tunnels.md +279 -105
- package/.claude/commands/ant/update.md +81 -20
- package/.claude/commands/ant/verify-castes.md +14 -4
- package/.claude/commands/ant/watch.md +13 -12
- package/.opencode/agents/aether-ambassador.md +63 -20
- package/.opencode/agents/aether-archaeologist.md +29 -12
- package/.opencode/agents/aether-auditor.md +51 -18
- package/.opencode/agents/aether-builder.md +69 -19
- package/.opencode/agents/aether-chaos.md +29 -12
- package/.opencode/agents/aether-chronicler.md +60 -18
- package/.opencode/agents/aether-gatekeeper.md +27 -18
- package/.opencode/agents/aether-includer.md +27 -18
- package/.opencode/agents/aether-keeper.md +89 -18
- package/.opencode/agents/aether-measurer.md +27 -18
- package/.opencode/agents/aether-probe.md +60 -18
- package/.opencode/agents/aether-queen.md +172 -24
- package/.opencode/agents/aether-route-setter.md +57 -12
- package/.opencode/agents/aether-sage.md +26 -18
- package/.opencode/agents/aether-scout.md +27 -19
- package/.opencode/agents/aether-surveyor-disciplines.md +53 -1
- package/.opencode/agents/aether-surveyor-nest.md +53 -1
- package/.opencode/agents/aether-surveyor-pathogens.md +51 -1
- package/.opencode/agents/aether-surveyor-provisions.md +53 -1
- package/.opencode/agents/aether-tracker.md +64 -18
- package/.opencode/agents/aether-watcher.md +66 -19
- package/.opencode/agents/aether-weaver.md +61 -18
- package/.opencode/commands/ant/build.md +406 -192
- package/.opencode/commands/ant/continue.md +66 -76
- package/.opencode/commands/ant/entomb.md +106 -45
- package/.opencode/commands/ant/init.md +46 -48
- package/.opencode/commands/ant/organize.md +5 -5
- package/.opencode/commands/ant/resume.md +334 -0
- package/.opencode/commands/ant/seal.md +33 -24
- package/.opencode/commands/ant/status.md +11 -0
- package/.opencode/commands/ant/tunnels.md +149 -0
- package/.opencode/commands/ant/update.md +59 -16
- package/CHANGELOG.md +79 -0
- package/README.md +135 -353
- package/bin/cli.js +243 -122
- package/bin/generate-commands.sh +2 -2
- package/bin/lib/init.js +13 -3
- package/bin/lib/update-transaction.js +119 -117
- package/bin/sync-to-runtime.sh +5 -137
- package/bin/validate-package.sh +84 -0
- package/package.json +9 -6
- package/.opencode/agents/aether-architect.md +0 -66
- package/.opencode/agents/aether-guardian.md +0 -107
- package/.opencode/agents/workers.md +0 -1034
- package/runtime/QUEEN_ANT_ARCHITECTURE.md +0 -402
- package/runtime/data/signatures.json +0 -41
- package/runtime/docs/AETHER-2.0-IMPLEMENTATION-PLAN.md +0 -1343
- package/runtime/docs/AETHER-PHEROMONE-SYSTEM-MASTER-SPEC.md +0 -2642
- package/runtime/docs/PHEROMONE-INJECTION.md +0 -240
- package/runtime/docs/PHEROMONE-INTEGRATION.md +0 -192
- package/runtime/docs/PHEROMONE-SYSTEM-DESIGN.md +0 -426
- package/runtime/docs/README.md +0 -94
- package/runtime/docs/VISUAL-OUTPUT-SPEC.md +0 -219
- package/runtime/docs/biological-reference.md +0 -272
- package/runtime/docs/codebase-review.md +0 -399
- package/runtime/docs/command-sync.md +0 -164
- package/runtime/docs/constraints.md +0 -116
- package/runtime/docs/implementation-learnings.md +0 -89
- package/runtime/docs/namespace.md +0 -148
- package/runtime/docs/pathogen-schema-example.json +0 -36
- package/runtime/docs/pathogen-schema.md +0 -111
- package/runtime/docs/planning-discipline.md +0 -159
- package/runtime/docs/progressive-disclosure.md +0 -184
- package/runtime/lib/queen-utils.sh +0 -729
- package/runtime/planning.md +0 -159
- package/runtime/recover.sh +0 -136
- package/runtime/utils/xml-utils.sh +0 -2196
- package/runtime/workers-new-castes.md +0 -516
- /package/{runtime → .aether/docs/disciplines}/coding-standards.md +0 -0
- /package/{runtime → .aether/docs/disciplines}/debugging.md +0 -0
- /package/{runtime → .aether/docs/disciplines}/learning.md +0 -0
- /package/{runtime → .aether/docs/disciplines}/tdd.md +0 -0
- /package/{runtime → .aether/docs/disciplines}/verification-loop.md +0 -0
- /package/{runtime → .aether/docs/disciplines}/verification.md +0 -0
- /package/{runtime → .aether}/docs/pheromones.md +0 -0
- /package/{runtime → .aether}/model-profiles.yaml +0 -0
- /package/{runtime → .aether}/schemas/aether-types.xsd +0 -0
- /package/{runtime → .aether}/schemas/colony-registry.xsd +0 -0
- /package/{runtime → .aether}/schemas/pheromone.xsd +0 -0
- /package/{runtime → .aether}/schemas/prompt.xsd +0 -0
- /package/{runtime → .aether}/schemas/queen-wisdom.xsd +0 -0
- /package/{runtime → .aether}/schemas/worker-priming.xsd +0 -0
- /package/{runtime → .aether}/templates/QUEEN.md.template +0 -0
- /package/{runtime → .aether}/utils/colorize-log.sh +0 -0
- /package/{runtime → .aether}/utils/queen-to-md.xsl +0 -0
- /package/{runtime → .aether}/utils/spawn-tree.sh +0 -0
- /package/{runtime → .aether}/utils/spawn-with-model.sh +0 -0
- /package/{runtime → .aether}/utils/state-loader.sh +0 -0
- /package/{runtime → .aether}/utils/swarm-display.sh +0 -0
- /package/{runtime → .aether}/utils/watch-spawn-tree.sh +0 -0
- /package/{runtime → .aether}/utils/xml-core.sh +0 -0
|
@@ -16,7 +16,7 @@ trap 'if type error_handler &>/dev/null; then error_handler ${LINENO} "$BASH_COM
|
|
|
16
16
|
|
|
17
17
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
18
18
|
AETHER_ROOT="$(cd "$SCRIPT_DIR/.." && pwd 2>/dev/null || echo "$SCRIPT_DIR")"
|
|
19
|
-
DATA_DIR="$AETHER_ROOT/.aether/data"
|
|
19
|
+
DATA_DIR="${DATA_DIR:-$AETHER_ROOT/.aether/data}"
|
|
20
20
|
|
|
21
21
|
# Initialize lock state before sourcing (file-lock.sh trap needs these)
|
|
22
22
|
LOCK_ACQUIRED=${LOCK_ACQUIRED:-false}
|
|
@@ -28,6 +28,7 @@ CURRENT_LOCK=${CURRENT_LOCK:-""}
|
|
|
28
28
|
[[ -f "$SCRIPT_DIR/utils/error-handler.sh" ]] && source "$SCRIPT_DIR/utils/error-handler.sh"
|
|
29
29
|
[[ -f "$SCRIPT_DIR/utils/chamber-utils.sh" ]] && source "$SCRIPT_DIR/utils/chamber-utils.sh"
|
|
30
30
|
[[ -f "$SCRIPT_DIR/utils/xml-utils.sh" ]] && source "$SCRIPT_DIR/utils/xml-utils.sh"
|
|
31
|
+
[[ -f "$SCRIPT_DIR/utils/semantic-cli.sh" ]] && source "$SCRIPT_DIR/utils/semantic-cli.sh"
|
|
31
32
|
|
|
32
33
|
# Fallback error constants if error-handler.sh wasn't sourced
|
|
33
34
|
# This prevents "unbound variable" errors in older installations
|
|
@@ -37,26 +38,13 @@ CURRENT_LOCK=${CURRENT_LOCK:-""}
|
|
|
37
38
|
: "${E_FILE_NOT_FOUND:=E_FILE_NOT_FOUND}"
|
|
38
39
|
: "${E_JSON_INVALID:=E_JSON_INVALID}"
|
|
39
40
|
: "${E_LOCK_FAILED:=E_LOCK_FAILED}"
|
|
41
|
+
: "${E_LOCK_STALE:=E_LOCK_STALE}"
|
|
40
42
|
: "${E_GIT_ERROR:=E_GIT_ERROR}"
|
|
41
43
|
: "${E_VALIDATION_FAILED:=E_VALIDATION_FAILED}"
|
|
42
44
|
: "${E_FEATURE_UNAVAILABLE:=E_FEATURE_UNAVAILABLE}"
|
|
43
45
|
: "${E_BASH_ERROR:=E_BASH_ERROR}"
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
# These checks run silently - failures are logged but don't block operation
|
|
47
|
-
if type feature_disable &>/dev/null; then
|
|
48
|
-
# Check if DATA_DIR is writable for activity logging
|
|
49
|
-
[[ -w "$DATA_DIR" ]] 2>/dev/null || feature_disable "activity_log" "DATA_DIR not writable"
|
|
50
|
-
|
|
51
|
-
# Check if git is available for git integration
|
|
52
|
-
command -v git &>/dev/null || feature_disable "git_integration" "git not installed"
|
|
53
|
-
|
|
54
|
-
# Check if jq is available for JSON processing
|
|
55
|
-
command -v jq &>/dev/null || feature_disable "json_processing" "jq not installed"
|
|
56
|
-
|
|
57
|
-
# Check if lock utilities are available
|
|
58
|
-
[[ -f "$SCRIPT_DIR/utils/file-lock.sh" ]] || feature_disable "file_locking" "lock utilities not available"
|
|
59
|
-
fi
|
|
46
|
+
: "${E_DEPENDENCY_MISSING:=E_DEPENDENCY_MISSING}"
|
|
47
|
+
: "${E_RESOURCE_NOT_FOUND:=E_RESOURCE_NOT_FOUND}"
|
|
60
48
|
|
|
61
49
|
# Fallback atomic_write if not sourced (uses temp file + mv for true atomicity)
|
|
62
50
|
if ! type atomic_write &>/dev/null; then
|
|
@@ -77,14 +65,62 @@ json_ok() { printf '{"ok":true,"result":%s}\n' "$1"; }
|
|
|
77
65
|
# Error: JSON to stderr, exit 1
|
|
78
66
|
# Use enhanced json_err from error-handler.sh if available, otherwise fallback
|
|
79
67
|
if ! type json_err &>/dev/null; then
|
|
80
|
-
# Fallback:
|
|
68
|
+
# Fallback: error-handler.sh failed to load. Emits minimal but parseable JSON.
|
|
69
|
+
# Diagnostic note tells the user their installation may be incomplete.
|
|
81
70
|
json_err() {
|
|
82
|
-
local
|
|
83
|
-
|
|
71
|
+
local code="${1:-E_UNKNOWN}"
|
|
72
|
+
local message="${2:-An unknown error occurred}"
|
|
73
|
+
printf '[aether] Warning: error-handler.sh not loaded — using minimal fallback\n' >&2
|
|
74
|
+
printf '{"ok":false,"error":{"code":"%s","message":"%s"}}\n' "$code" "$message" >&2
|
|
84
75
|
exit 1
|
|
85
76
|
}
|
|
86
77
|
fi
|
|
87
78
|
|
|
79
|
+
# Feature detection for graceful degradation
|
|
80
|
+
# ARCH-09: runs AFTER all fallback definitions (atomic_write, json_ok, json_err)
|
|
81
|
+
# so feature_disable is never called before those functions exist.
|
|
82
|
+
# These checks run silently - failures are logged but don't block operation
|
|
83
|
+
if type feature_disable &>/dev/null; then
|
|
84
|
+
# Check if DATA_DIR is writable for activity logging
|
|
85
|
+
[[ -w "$DATA_DIR" ]] 2>/dev/null || feature_disable "activity_log" "DATA_DIR not writable"
|
|
86
|
+
|
|
87
|
+
# Check if git is available for git integration
|
|
88
|
+
command -v git &>/dev/null || feature_disable "git_integration" "git not installed"
|
|
89
|
+
|
|
90
|
+
# Check if jq is available for JSON processing
|
|
91
|
+
command -v jq &>/dev/null || feature_disable "json_processing" "jq not installed"
|
|
92
|
+
|
|
93
|
+
# Check if lock utilities are available
|
|
94
|
+
[[ -f "$SCRIPT_DIR/utils/file-lock.sh" ]] || feature_disable "file_locking" "lock utilities not available"
|
|
95
|
+
fi
|
|
96
|
+
|
|
97
|
+
# Composed exit cleanup — replaces individual traps from file-lock.sh and atomic-write.sh
|
|
98
|
+
# ARCH-10: bash traps are single-valued per signal — last trap set wins.
|
|
99
|
+
# This function ensures both lock and temp cleanup run on every exit path.
|
|
100
|
+
# Must be set AFTER file-lock.sh is sourced so it overrides the individual
|
|
101
|
+
# 'trap cleanup_locks EXIT TERM INT HUP' set by file-lock.sh.
|
|
102
|
+
_aether_exit_cleanup() {
|
|
103
|
+
cleanup_locks 2>/dev/null || true
|
|
104
|
+
cleanup_temp_files 2>/dev/null || true
|
|
105
|
+
}
|
|
106
|
+
trap '_aether_exit_cleanup' EXIT TERM INT HUP
|
|
107
|
+
|
|
108
|
+
# Startup cleanup — remove temp files from dead sessions (PID-based orphan detection)
|
|
109
|
+
# ARCH-10: runs once at startup, silent (matches lock cleanup behavior)
|
|
110
|
+
_cleanup_orphaned_temp_files() {
|
|
111
|
+
local temp_dir="${TEMP_DIR:-$AETHER_ROOT/.aether/temp}"
|
|
112
|
+
[[ -d "$temp_dir" ]] || return 0
|
|
113
|
+
while IFS= read -r -d '' tmp_file; do
|
|
114
|
+
local file_pid
|
|
115
|
+
file_pid=$(basename "$tmp_file" | awk -F'.' '{print $(NF-2)}')
|
|
116
|
+
if [[ "$file_pid" =~ ^[0-9]+$ ]] && ! kill -0 "$file_pid" 2>/dev/null; then
|
|
117
|
+
rm -f "$tmp_file" 2>/dev/null || true
|
|
118
|
+
fi
|
|
119
|
+
done < <(find "$temp_dir" -maxdepth 1 -name "*.tmp" -print0 2>/dev/null)
|
|
120
|
+
}
|
|
121
|
+
# Run orphan cleanup on startup (silent — matches cleanup_locks behavior)
|
|
122
|
+
type cleanup_temp_files &>/dev/null && _cleanup_orphaned_temp_files
|
|
123
|
+
|
|
88
124
|
# --- Caste emoji helper ---
|
|
89
125
|
get_caste_emoji() {
|
|
90
126
|
case "$1" in
|
|
@@ -110,10 +146,88 @@ get_caste_emoji() {
|
|
|
110
146
|
*Probe*|*probe*|*Test*|*Excavat*|*Uncover*|*Edge*|*Case*|*Mutant*) echo "🧪🐜" ;;
|
|
111
147
|
*Tracker*|*tracker*|*Debug*|*Trace*|*Follow*|*Bug*|*Hunt*|*Root*) echo "🐛🐜" ;;
|
|
112
148
|
*Weaver*|*weaver*|*Refactor*|*Restruct*|*Transform*|*Clean*|*Pattern*|*Weave*) echo "🔄🐜" ;;
|
|
149
|
+
*Dreamer*|*dreamer*|*Dream*|*Muse*|*Imagine*|*Wonder*|*Ponder*|*Reverie*) echo "💭🐜" ;;
|
|
113
150
|
*) echo "🐜" ;;
|
|
114
151
|
esac
|
|
115
152
|
}
|
|
116
153
|
|
|
154
|
+
# --- Progress bar helper ---
|
|
155
|
+
# Usage: generate-progress-bar <current> <total> [width]
|
|
156
|
+
# Returns: "[████████░░░░░░░░] 8/20" format string
|
|
157
|
+
generate-progress-bar() {
|
|
158
|
+
local current="${1:-0}"
|
|
159
|
+
local total="${2:-1}"
|
|
160
|
+
local width="${3:-20}"
|
|
161
|
+
|
|
162
|
+
# Prevent division by zero
|
|
163
|
+
[[ "$total" -lt 1 ]] && total=1
|
|
164
|
+
[[ "$current" -lt 0 ]] && current=0
|
|
165
|
+
[[ "$current" -gt "$total" ]] && current="$total"
|
|
166
|
+
|
|
167
|
+
# Calculate filled/empty segments
|
|
168
|
+
local filled=$(( (current * width) / total ))
|
|
169
|
+
local empty=$(( width - filled ))
|
|
170
|
+
|
|
171
|
+
# Build bar with Unicode block characters
|
|
172
|
+
local bar=""
|
|
173
|
+
for ((i=0; i<filled; i++)); do bar+="█"; done
|
|
174
|
+
for ((i=0; i<empty; i++)); do bar+="░"; done
|
|
175
|
+
|
|
176
|
+
echo "[$bar] $current/$total"
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
# --- Standard banner helper ---
|
|
180
|
+
# Usage: print-standard-banner <title>
|
|
181
|
+
# Outputs a standardized banner with heavy horizontal lines (U+2501)
|
|
182
|
+
print-standard-banner() {
|
|
183
|
+
local title="$1"
|
|
184
|
+
|
|
185
|
+
# Convert title to spaced uppercase
|
|
186
|
+
local spaced_title
|
|
187
|
+
spaced_title=$(echo "$title" | tr '[:lower:]' '[:upper:]' | sed 's/./& /g' | sed 's/ $//')
|
|
188
|
+
|
|
189
|
+
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
190
|
+
echo " $spaced_title"
|
|
191
|
+
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
# --- Next Up block helper ---
|
|
195
|
+
# Usage: print-next-up <state> [current_phase] [total_phases]
|
|
196
|
+
# Outputs a Next Up block with state-based suggestions
|
|
197
|
+
print-next-up() {
|
|
198
|
+
local state="${1:-IDLE}"
|
|
199
|
+
local current_phase="${2:-0}"
|
|
200
|
+
local total_phases="${3:-0}"
|
|
201
|
+
local next_phase=$((current_phase + 1))
|
|
202
|
+
|
|
203
|
+
echo "──────────────────────────────────────────────────"
|
|
204
|
+
echo "🐜 Next Up"
|
|
205
|
+
echo "──────────────────────────────────────────────────"
|
|
206
|
+
|
|
207
|
+
case "$state" in
|
|
208
|
+
IDLE)
|
|
209
|
+
echo " /ant:init 🌱 Start a new colony"
|
|
210
|
+
echo " /ant:status 📊 Check current state"
|
|
211
|
+
;;
|
|
212
|
+
READY)
|
|
213
|
+
echo " /ant:build $next_phase 🔨 Build phase $next_phase"
|
|
214
|
+
echo " /ant:phase $next_phase 📋 Review phase details"
|
|
215
|
+
echo " /ant:focus 🎯 Guide colony attention"
|
|
216
|
+
;;
|
|
217
|
+
EXECUTING)
|
|
218
|
+
echo " /ant:continue ➡️ Continue current build"
|
|
219
|
+
echo " /ant:status 📊 Check build progress"
|
|
220
|
+
;;
|
|
221
|
+
PLANNING)
|
|
222
|
+
echo " /ant:plan 📝 Create execution plan"
|
|
223
|
+
echo " /ant:status 📊 Check current state"
|
|
224
|
+
;;
|
|
225
|
+
*)
|
|
226
|
+
echo " /ant:status 📊 Check colony state"
|
|
227
|
+
;;
|
|
228
|
+
esac
|
|
229
|
+
}
|
|
230
|
+
|
|
117
231
|
# ============================================
|
|
118
232
|
# CONTEXT UPDATE HELPER FUNCTION
|
|
119
233
|
# (Defined outside case block to fix SC2168: local outside function)
|
|
@@ -125,6 +239,19 @@ _cmd_context_update() {
|
|
|
125
239
|
local ctx_ts
|
|
126
240
|
ctx_ts=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
127
241
|
|
|
242
|
+
# Check for empty action first - show usage message
|
|
243
|
+
if [[ -z "$ctx_action" ]]; then
|
|
244
|
+
json_err "$E_VALIDATION_FAILED" "No action specified. Suggestion: Use one of: init, update-phase, activity, constraint, decision, safe-to-clear, build-start, worker-spawn, worker-complete, build-progress, build-complete"
|
|
245
|
+
fi
|
|
246
|
+
|
|
247
|
+
# Acquire lock for context-update operations (LOCK-04: prevent concurrent corruption)
|
|
248
|
+
local _ctx_lock_held=false
|
|
249
|
+
if type acquire_lock &>/dev/null && type feature_enabled &>/dev/null && feature_enabled "file_locking"; then
|
|
250
|
+
acquire_lock "$ctx_file" || json_err "$E_LOCK_FAILED" "Failed to acquire CONTEXT.md lock for context-update"
|
|
251
|
+
_ctx_lock_held=true
|
|
252
|
+
trap 'release_lock 2>/dev/null || true' EXIT
|
|
253
|
+
fi
|
|
254
|
+
|
|
128
255
|
ensure_context_dir() {
|
|
129
256
|
local dir
|
|
130
257
|
dir=$(dirname "$ctx_file")
|
|
@@ -186,7 +313,7 @@ Colony initialization in progress...
|
|
|
186
313
|
|
|
187
314
|
| Constraint | Source | Date Set |
|
|
188
315
|
|------------|--------|----------|
|
|
189
|
-
| In the Aether repo, \`.aether/\` IS the source of truth —
|
|
316
|
+
| In the Aether repo, \`.aether/\` IS the source of truth — published directly via npm (private dirs excluded by .npmignore) | CLAUDE.md | Permanent |
|
|
190
317
|
| Never push without explicit user approval | CLAUDE.md Safety | Permanent |
|
|
191
318
|
|
|
192
319
|
---
|
|
@@ -268,7 +395,7 @@ EOF
|
|
|
268
395
|
local safe_clear="${4:-NO}"
|
|
269
396
|
local safe_reason="${5:-Phase in progress}"
|
|
270
397
|
|
|
271
|
-
[[ -f "$ctx_file" ]] || { json_err "CONTEXT.md
|
|
398
|
+
[[ -f "$ctx_file" ]] || { json_err "$E_FILE_NOT_FOUND" "Couldn't find CONTEXT.md. Try: run context-update init first."; }
|
|
272
399
|
|
|
273
400
|
sed -i.bak "s/| \*\*Last Updated\*\* | .*/| **Last Updated** | $ctx_ts |/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
274
401
|
sed -i.bak "s/| \*\*Current Phase\*\* | .*/| **Current Phase** | $new_phase |/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
@@ -283,7 +410,7 @@ EOF
|
|
|
283
410
|
local result="${3:-}"
|
|
284
411
|
local files_changed="${4:-—}"
|
|
285
412
|
|
|
286
|
-
[[ -f "$ctx_file" ]] || { json_err "CONTEXT.md
|
|
413
|
+
[[ -f "$ctx_file" ]] || { json_err "$E_FILE_NOT_FOUND" "Couldn't find CONTEXT.md. Try: run context-update init first."; }
|
|
287
414
|
|
|
288
415
|
sed -i.bak "s/| \*\*Last Updated\*\* | .*/| **Last Updated** | $ctx_ts |/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
289
416
|
|
|
@@ -309,7 +436,7 @@ EOF
|
|
|
309
436
|
local safe="${2:-NO}"
|
|
310
437
|
local reason="${3:-Unknown state}"
|
|
311
438
|
|
|
312
|
-
[[ -f "$ctx_file" ]] || { json_err "CONTEXT.md
|
|
439
|
+
[[ -f "$ctx_file" ]] || { json_err "$E_FILE_NOT_FOUND" "Couldn't find CONTEXT.md. Try: run context-update init first."; }
|
|
313
440
|
|
|
314
441
|
sed -i.bak "s/| \*\*Last Updated\*\* | .*/| **Last Updated** | $ctx_ts |/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
315
442
|
sed -i.bak "s/| \*\*Safe to Clear?\*\* | .*/| **Safe to Clear?** | $safe — $reason |/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
@@ -322,7 +449,7 @@ EOF
|
|
|
322
449
|
local c_message="${3:-}"
|
|
323
450
|
local c_source="${4:-User}"
|
|
324
451
|
|
|
325
|
-
[[ -f "$ctx_file" ]] || { json_err "CONTEXT.md
|
|
452
|
+
[[ -f "$ctx_file" ]] || { json_err "$E_FILE_NOT_FOUND" "Couldn't find CONTEXT.md. Try: run context-update init first."; }
|
|
326
453
|
|
|
327
454
|
sed -i.bak "s/| \*\*Last Updated\*\* | .*/| **Last Updated** | $ctx_ts |/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
328
455
|
|
|
@@ -344,7 +471,7 @@ EOF
|
|
|
344
471
|
local rationale="${3:-}"
|
|
345
472
|
local made_by="${4:-Colony}"
|
|
346
473
|
|
|
347
|
-
[[ -f "$ctx_file" ]] || { json_err "CONTEXT.md
|
|
474
|
+
[[ -f "$ctx_file" ]] || { json_err "$E_FILE_NOT_FOUND" "Couldn't find CONTEXT.md. Try: run context-update init first."; }
|
|
348
475
|
|
|
349
476
|
sed -i.bak "s/| \*\*Last Updated\*\* | .*/| **Last Updated** | $ctx_ts |/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
350
477
|
|
|
@@ -376,7 +503,7 @@ EOF
|
|
|
376
503
|
local worker_count="${3:-0}"
|
|
377
504
|
local tasks_count="${4:-0}"
|
|
378
505
|
|
|
379
|
-
[[ -f "$ctx_file" ]] || { json_err "CONTEXT.md
|
|
506
|
+
[[ -f "$ctx_file" ]] || { json_err "$E_FILE_NOT_FOUND" "Couldn't find CONTEXT.md. Try: run context-update init first."; }
|
|
380
507
|
|
|
381
508
|
sed -i.bak "s/| \*\*Last Updated\*\* | .*/| **Last Updated** | $ctx_ts |/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
382
509
|
sed -i.bak "s/## 📍 What's In Progress/## 📍 What's In Progress\n\n**Phase $phase_id Build IN PROGRESS**\n- Workers: $worker_count | Tasks: $tasks_count\n- Started: $ctx_ts/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
@@ -390,7 +517,7 @@ EOF
|
|
|
390
517
|
local caste="${3:-}"
|
|
391
518
|
local task="${4:-}"
|
|
392
519
|
|
|
393
|
-
[[ -f "$ctx_file" ]] || { json_err "CONTEXT.md
|
|
520
|
+
[[ -f "$ctx_file" ]] || { json_err "$E_FILE_NOT_FOUND" "Couldn't find CONTEXT.md. Try: run context-update init first."; }
|
|
394
521
|
|
|
395
522
|
awk -v ant="$ant_name" -v caste="$caste" -v task="$task" -v ts="$ctx_ts" '
|
|
396
523
|
/^## 📍 What'\''s In Progress/ { in_progress=1 }
|
|
@@ -410,7 +537,7 @@ EOF
|
|
|
410
537
|
local ant_name="${2:-}"
|
|
411
538
|
local status="${3:-completed}"
|
|
412
539
|
|
|
413
|
-
[[ -f "$ctx_file" ]] || { json_err "CONTEXT.md
|
|
540
|
+
[[ -f "$ctx_file" ]] || { json_err "$E_FILE_NOT_FOUND" "Couldn't find CONTEXT.md. Try: run context-update init first."; }
|
|
414
541
|
|
|
415
542
|
sed -i.bak "s/- .*$ant_name .*$/- $ant_name: $status (updated $ctx_ts)/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
416
543
|
|
|
@@ -422,7 +549,7 @@ EOF
|
|
|
422
549
|
local total="${3:-1}"
|
|
423
550
|
local percentage=$(( completed * 100 / total ))
|
|
424
551
|
|
|
425
|
-
[[ -f "$ctx_file" ]] || { json_err "CONTEXT.md
|
|
552
|
+
[[ -f "$ctx_file" ]] || { json_err "$E_FILE_NOT_FOUND" "Couldn't find CONTEXT.md. Try: run context-update init first."; }
|
|
426
553
|
|
|
427
554
|
sed -i.bak "s/Build IN PROGRESS/Build IN PROGRESS ($percentage% complete)/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
428
555
|
|
|
@@ -433,7 +560,7 @@ EOF
|
|
|
433
560
|
local status="${2:-completed}"
|
|
434
561
|
local result="${3:-success}"
|
|
435
562
|
|
|
436
|
-
[[ -f "$ctx_file" ]] || { json_err "CONTEXT.md
|
|
563
|
+
[[ -f "$ctx_file" ]] || { json_err "$E_FILE_NOT_FOUND" "Couldn't find CONTEXT.md. Try: run context-update init first."; }
|
|
437
564
|
|
|
438
565
|
sed -i.bak "s/| \*\*Last Updated\*\* | .*/| **Last Updated** | $ctx_ts |/" "$ctx_file" && rm -f "$ctx_file.bak"
|
|
439
566
|
|
|
@@ -456,9 +583,20 @@ EOF
|
|
|
456
583
|
;;
|
|
457
584
|
|
|
458
585
|
*)
|
|
459
|
-
json_err "$E_VALIDATION_FAILED" "Unknown context action: $ctx_action"
|
|
586
|
+
json_err "$E_VALIDATION_FAILED" "Unknown context action: '$ctx_action'. Suggestion: Use one of: init, update-phase, activity, constraint, decision, safe-to-clear, build-start, worker-spawn, worker-complete, build-progress, build-complete"
|
|
460
587
|
;;
|
|
461
588
|
esac
|
|
589
|
+
|
|
590
|
+
# Release lock on success (LOCK-04)
|
|
591
|
+
# NOTE: Do NOT clear the EXIT trap here. This function RETURNS (it does not
|
|
592
|
+
# call exit), so clearing the trap would remove the safety net without benefit.
|
|
593
|
+
# The EXIT trap remains active as a true safety net for unexpected exit calls
|
|
594
|
+
# elsewhere in the process. The _ctx_lock_held variable is the primary gate
|
|
595
|
+
# for this function's own cleanup.
|
|
596
|
+
if [[ "$_ctx_lock_held" == "true" ]]; then
|
|
597
|
+
release_lock 2>/dev/null || true
|
|
598
|
+
_ctx_lock_held=false
|
|
599
|
+
fi
|
|
462
600
|
}
|
|
463
601
|
|
|
464
602
|
# --- Subcommand dispatch ---
|
|
@@ -467,17 +605,140 @@ shift 2>/dev/null || true
|
|
|
467
605
|
|
|
468
606
|
case "$cmd" in
|
|
469
607
|
help)
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
608
|
+
# Build help JSON with sections for discoverability.
|
|
609
|
+
# The flat 'commands' array is kept for backward compatibility
|
|
610
|
+
# (callers use: jq '.commands[]')
|
|
611
|
+
cat <<'HELP_EOF'
|
|
612
|
+
{
|
|
613
|
+
"ok": true,
|
|
614
|
+
"commands": ["help","version","validate-state","load-state","unload-state","error-add","error-pattern-check","error-summary","activity-log","activity-log-init","activity-log-read","learning-promote","learning-inject","generate-ant-name","spawn-log","spawn-complete","spawn-can-spawn","spawn-get-depth","spawn-tree-load","spawn-tree-active","spawn-tree-depth","update-progress","check-antipattern","error-flag-pattern","signature-scan","signature-match","flag-add","flag-check-blockers","flag-resolve","flag-acknowledge","flag-list","flag-auto-resolve","autofix-checkpoint","autofix-rollback","spawn-can-spawn-swarm","swarm-findings-init","swarm-findings-add","swarm-findings-read","swarm-solution-set","swarm-cleanup","swarm-activity-log","swarm-display-init","swarm-display-update","swarm-display-get","swarm-display-text","swarm-timing-start","swarm-timing-get","swarm-timing-eta","view-state-init","view-state-get","view-state-set","view-state-toggle","view-state-expand","view-state-collapse","grave-add","grave-check","generate-commit-message","version-check","registry-add","bootstrap-system","model-profile","model-get","model-list","chamber-create","chamber-verify","chamber-list","milestone-detect","queen-init","queen-read","queen-promote","survey-load","survey-verify","pheromone-export","pheromone-write","pheromone-count","pheromone-read","instinct-read","pheromone-prime","pheromone-expire","eternal-init","pheromone-export-xml","pheromone-import-xml","pheromone-validate-xml","wisdom-export-xml","wisdom-import-xml","registry-export-xml","registry-import-xml","force-unlock"],
|
|
615
|
+
"sections": {
|
|
616
|
+
"Core": [
|
|
617
|
+
{"name": "help", "description": "List all available commands with sections"},
|
|
618
|
+
{"name": "version", "description": "Show installed version"}
|
|
619
|
+
],
|
|
620
|
+
"Colony State": [
|
|
621
|
+
{"name": "validate-state", "description": "Validate COLONY_STATE.json or constraints.json"},
|
|
622
|
+
{"name": "load-state", "description": "Load and lock COLONY_STATE.json"},
|
|
623
|
+
{"name": "unload-state", "description": "Release COLONY_STATE.json lock"}
|
|
624
|
+
],
|
|
625
|
+
"Queen Commands": [
|
|
626
|
+
{"name": "queen-init", "description": "Initialize a new colony QUEEN.md from template"},
|
|
627
|
+
{"name": "queen-read", "description": "Read QUEEN.md wisdom as JSON for worker priming"},
|
|
628
|
+
{"name": "queen-promote", "description": "Promote a validated learning to QUEEN.md wisdom"}
|
|
629
|
+
],
|
|
630
|
+
"Model Routing": [
|
|
631
|
+
{"name": "model-profile", "description": "Manage caste-to-model assignments"},
|
|
632
|
+
{"name": "model-get", "description": "Get model assignment for a caste"},
|
|
633
|
+
{"name": "model-list", "description": "List all model assignments"}
|
|
634
|
+
],
|
|
635
|
+
"Spawn Management": [
|
|
636
|
+
{"name": "spawn-log", "description": "Log a spawn event to spawn-tree.txt"},
|
|
637
|
+
{"name": "spawn-complete", "description": "Record spawn completion in spawn-tree.txt"},
|
|
638
|
+
{"name": "spawn-can-spawn", "description": "Check if spawn budget allows another worker"},
|
|
639
|
+
{"name": "spawn-get-depth", "description": "Get spawn depth for an ant name"},
|
|
640
|
+
{"name": "spawn-tree-load", "description": "Load spawn-tree.txt as JSON"},
|
|
641
|
+
{"name": "spawn-tree-active", "description": "List currently active spawns"},
|
|
642
|
+
{"name": "spawn-tree-depth", "description": "Get depth for a named ant"}
|
|
643
|
+
],
|
|
644
|
+
"Flag Management": [
|
|
645
|
+
{"name": "flag-add", "description": "Add a flag to flags.json"},
|
|
646
|
+
{"name": "flag-check-blockers", "description": "Check for flags blocking a task"},
|
|
647
|
+
{"name": "flag-resolve", "description": "Mark a flag as resolved"},
|
|
648
|
+
{"name": "flag-acknowledge", "description": "Acknowledge a flag without resolving"},
|
|
649
|
+
{"name": "flag-list", "description": "List all flags"},
|
|
650
|
+
{"name": "flag-auto-resolve", "description": "Auto-resolve flags matching criteria"}
|
|
651
|
+
],
|
|
652
|
+
"Chamber Management": [
|
|
653
|
+
{"name": "chamber-create", "description": "Entomb a colony into a named chamber"},
|
|
654
|
+
{"name": "chamber-verify", "description": "Verify chamber integrity"},
|
|
655
|
+
{"name": "chamber-list", "description": "List all available chambers"}
|
|
656
|
+
],
|
|
657
|
+
"Swarm Operations": [
|
|
658
|
+
{"name": "swarm-findings-init", "description": "Initialize swarm findings file"},
|
|
659
|
+
{"name": "swarm-findings-add", "description": "Add a finding to swarm results"},
|
|
660
|
+
{"name": "swarm-findings-read", "description": "Read all swarm findings"},
|
|
661
|
+
{"name": "swarm-solution-set", "description": "Set the chosen swarm solution"},
|
|
662
|
+
{"name": "swarm-cleanup", "description": "Clean up swarm state files"},
|
|
663
|
+
{"name": "swarm-display-init", "description": "Initialize swarm progress display"},
|
|
664
|
+
{"name": "swarm-display-update", "description": "Update swarm display for an ant"},
|
|
665
|
+
{"name": "swarm-timing-start", "description": "Start timing for a swarm operation"},
|
|
666
|
+
{"name": "swarm-timing-get", "description": "Get elapsed time for a swarm"},
|
|
667
|
+
{"name": "swarm-timing-eta", "description": "Estimate remaining time for a swarm"}
|
|
668
|
+
],
|
|
669
|
+
"Pheromone System": [
|
|
670
|
+
{"name": "pheromone-write", "description": "Write a pheromone signal"},
|
|
671
|
+
{"name": "pheromone-read", "description": "Read pheromone signals"},
|
|
672
|
+
{"name": "pheromone-count", "description": "Count active pheromone signals"},
|
|
673
|
+
{"name": "pheromone-prime", "description": "Prime the pheromone system"},
|
|
674
|
+
{"name": "pheromone-expire", "description": "Expire old pheromone signals"},
|
|
675
|
+
{"name": "pheromone-export", "description": "Export pheromone data to JSON"},
|
|
676
|
+
{"name": "pheromone-export-xml", "description": "Export pheromone data to XML"},
|
|
677
|
+
{"name": "pheromone-import-xml", "description": "Import pheromone data from XML"},
|
|
678
|
+
{"name": "pheromone-validate-xml", "description": "Validate pheromone XML against schema"}
|
|
679
|
+
],
|
|
680
|
+
"Utilities": [
|
|
681
|
+
{"name": "generate-ant-name", "description": "Generate a unique ant name with caste prefix"},
|
|
682
|
+
{"name": "activity-log", "description": "Append an entry to the activity log"},
|
|
683
|
+
{"name": "activity-log-init", "description": "Initialize the activity log file"},
|
|
684
|
+
{"name": "activity-log-read", "description": "Read recent activity log entries"},
|
|
685
|
+
{"name": "generate-commit-message", "description": "Generate a commit message from git diff"},
|
|
686
|
+
{"name": "version-check", "description": "Check if Aether version meets requirement"},
|
|
687
|
+
{"name": "registry-add", "description": "Register a repo with Aether"},
|
|
688
|
+
{"name": "bootstrap-system", "description": "Bootstrap minimal system files if missing"},
|
|
689
|
+
{"name": "force-unlock", "description": "Emergency unlock — remove stale lock files"}
|
|
690
|
+
]
|
|
691
|
+
},
|
|
692
|
+
"description": "Aether Colony Utility Layer — deterministic ops for the ant colony"
|
|
693
|
+
}
|
|
694
|
+
HELP_EOF
|
|
473
695
|
;;
|
|
474
696
|
version)
|
|
475
697
|
json_ok '"1.0.0"'
|
|
476
698
|
;;
|
|
477
699
|
validate-state)
|
|
700
|
+
# Schema migration helper: auto-upgrades pre-3.0 state files to v3.0
|
|
701
|
+
# Additive only (never removes fields) — idempotent and safe for concurrent access
|
|
702
|
+
_migrate_colony_state() {
|
|
703
|
+
local state_file="$1"
|
|
704
|
+
[[ -f "$state_file" ]] || return 0
|
|
705
|
+
|
|
706
|
+
# First: verify file is parseable JSON at all
|
|
707
|
+
if ! jq -e . "$state_file" >/dev/null 2>&1; then
|
|
708
|
+
# Corrupt state file — backup and error
|
|
709
|
+
if type create_backup &>/dev/null; then
|
|
710
|
+
create_backup "$state_file" 2>/dev/null || true
|
|
711
|
+
fi
|
|
712
|
+
json_err "$E_JSON_INVALID" \
|
|
713
|
+
"COLONY_STATE.json is corrupted (invalid JSON). A backup was saved in .aether/data/backups/. Try: run /ant:init to reset colony state."
|
|
714
|
+
fi
|
|
715
|
+
|
|
716
|
+
local current_version
|
|
717
|
+
current_version=$(jq -r '.version // "1.0"' "$state_file" 2>/dev/null)
|
|
718
|
+
|
|
719
|
+
if [[ "$current_version" != "3.0" ]]; then
|
|
720
|
+
# Add missing v3.0 fields (additive only — idempotent and safe for concurrent access)
|
|
721
|
+
local updated
|
|
722
|
+
updated=$(jq '
|
|
723
|
+
.version = "3.0" |
|
|
724
|
+
if .signals == null then .signals = [] else . end |
|
|
725
|
+
if .graveyards == null then .graveyards = [] else . end |
|
|
726
|
+
if .events == null then .events = [] else . end
|
|
727
|
+
' "$state_file" 2>/dev/null)
|
|
728
|
+
|
|
729
|
+
if [[ -n "$updated" ]]; then
|
|
730
|
+
atomic_write "$state_file" "$updated"
|
|
731
|
+
# Notify user of migration (auto-migrate + notify pattern)
|
|
732
|
+
printf '{"ok":true,"warning":"W_MIGRATED","message":"Migrated colony state from v%s to v3.0"}\n' "$current_version" >&2
|
|
733
|
+
fi
|
|
734
|
+
fi
|
|
735
|
+
}
|
|
736
|
+
|
|
478
737
|
case "${1:-}" in
|
|
479
738
|
colony)
|
|
480
739
|
[[ -f "$DATA_DIR/COLONY_STATE.json" ]] || json_err "$E_FILE_NOT_FOUND" "COLONY_STATE.json not found" '{"file":"COLONY_STATE.json"}'
|
|
740
|
+
# Run schema migration before field validation (ensures v3.0 fields always present)
|
|
741
|
+
_migrate_colony_state "$DATA_DIR/COLONY_STATE.json"
|
|
481
742
|
json_ok "$(jq '
|
|
482
743
|
def chk(f;t): if has(f) then (if (.[f]|type) as $a | t | any(. == $a) then "pass" else "fail: \(f) is \(.[f]|type), expected \(t|join("|"))" end) else "fail: missing \(f)" end;
|
|
483
744
|
def opt(f;t): if has(f) then (if (.[f]|type) as $a | t | any(. == $a) then "pass" else "fail: \(f) is \(.[f]|type), expected \(t|join("|"))" end) else "pass" end;
|
|
@@ -492,7 +753,7 @@ EOF
|
|
|
492
753
|
opt("session_id";["string","null"]),
|
|
493
754
|
opt("initialized_at";["string","null"]),
|
|
494
755
|
opt("build_started_at";["string","null"])
|
|
495
|
-
]} | . + {pass: ([.checks[] | select(. == "pass")] | length) == (.checks | length)}
|
|
756
|
+
]} | . + {pass: (([.checks[] | select(. == "pass")] | length) == (.checks | length))}
|
|
496
757
|
' "$DATA_DIR/COLONY_STATE.json")"
|
|
497
758
|
;;
|
|
498
759
|
constraints)
|
|
@@ -502,7 +763,7 @@ EOF
|
|
|
502
763
|
{file:"constraints.json", checks:[
|
|
503
764
|
arr("focus"),
|
|
504
765
|
arr("constraints")
|
|
505
|
-
]} | . + {pass: ([.checks[] | select(. == "pass")] | length) == (.checks | length)}
|
|
766
|
+
]} | . + {pass: (([.checks[] | select(. == "pass")] | length) == (.checks | length))}
|
|
506
767
|
' "$DATA_DIR/constraints.json")"
|
|
507
768
|
;;
|
|
508
769
|
all)
|
|
@@ -727,6 +988,18 @@ EOF
|
|
|
727
988
|
echo "[$ts] $status_icon $emoji $ant_name: $status${summary:+ - $summary}" >> "$DATA_DIR/activity.log"
|
|
728
989
|
# Update spawn tree
|
|
729
990
|
echo "$ts_full|$ant_name|$status|$summary" >> "$DATA_DIR/spawn-tree.txt"
|
|
991
|
+
# Log failed spawns to COLONY_STATE.json events array for audit trail (ARCH-04)
|
|
992
|
+
if [[ "$status" == "failed" ]] || [[ "$status" == "error" ]]; then
|
|
993
|
+
spawn_complete_state_file="$DATA_DIR/COLONY_STATE.json"
|
|
994
|
+
if [[ -f "$spawn_complete_state_file" ]]; then
|
|
995
|
+
spawn_complete_updated=$(jq --arg ts "$ts_full" --arg name "$ant_name" --arg st "$status" --arg sum "${summary:-unknown}" \
|
|
996
|
+
'.events += [{"type":"spawn_failed","ant":$name,"status":$st,"summary":$sum,"timestamp":$ts}]' \
|
|
997
|
+
"$spawn_complete_state_file" 2>/dev/null)
|
|
998
|
+
if [[ -n "$spawn_complete_updated" ]]; then
|
|
999
|
+
atomic_write "$spawn_complete_state_file" "$spawn_complete_updated"
|
|
1000
|
+
fi
|
|
1001
|
+
fi
|
|
1002
|
+
fi
|
|
730
1003
|
# Return emoji-formatted result for display
|
|
731
1004
|
json_ok "\"$status_icon $emoji $ant_name: ${summary:-$status}\""
|
|
732
1005
|
;;
|
|
@@ -1172,7 +1445,6 @@ EOF
|
|
|
1172
1445
|
ts=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
1173
1446
|
|
|
1174
1447
|
# Acquire lock for atomic flag update (degrade gracefully if locking unavailable)
|
|
1175
|
-
lock_acquired=false
|
|
1176
1448
|
if type feature_enabled &>/dev/null && ! feature_enabled "file_locking"; then
|
|
1177
1449
|
json_warn "W_DEGRADED" "File locking disabled - proceeding without lock: $(type _feature_reason &>/dev/null && _feature_reason file_locking || echo 'unknown')"
|
|
1178
1450
|
else
|
|
@@ -1184,9 +1456,8 @@ EOF
|
|
|
1184
1456
|
exit 1
|
|
1185
1457
|
fi
|
|
1186
1458
|
}
|
|
1187
|
-
lock_acquired=true
|
|
1188
1459
|
# Ensure lock is always released on exit (BUG-002 fix)
|
|
1189
|
-
trap 'release_lock
|
|
1460
|
+
trap 'release_lock 2>/dev/null || true' EXIT
|
|
1190
1461
|
fi
|
|
1191
1462
|
|
|
1192
1463
|
# Map type to severity
|
|
@@ -1224,8 +1495,8 @@ EOF
|
|
|
1224
1495
|
' "$flags_file") || { json_err "$E_JSON_INVALID" "Failed to add flag"; }
|
|
1225
1496
|
|
|
1226
1497
|
atomic_write "$flags_file" "$updated"
|
|
1227
|
-
# Lock released by trap on exit (BUG-002 fix)
|
|
1228
1498
|
trap - EXIT
|
|
1499
|
+
release_lock 2>/dev/null || true
|
|
1229
1500
|
json_ok "{\"id\":\"$id\",\"type\":\"$type\",\"severity\":\"$severity\"}"
|
|
1230
1501
|
;;
|
|
1231
1502
|
flag-check-blockers)
|
|
@@ -1274,6 +1545,7 @@ EOF
|
|
|
1274
1545
|
json_warn "W_DEGRADED" "File locking disabled - proceeding without lock"
|
|
1275
1546
|
else
|
|
1276
1547
|
acquire_lock "$flags_file" || json_err "$E_LOCK_FAILED" "Failed to acquire lock on flags.json"
|
|
1548
|
+
trap 'release_lock 2>/dev/null || true' EXIT
|
|
1277
1549
|
fi
|
|
1278
1550
|
|
|
1279
1551
|
updated=$(jq --arg id "$flag_id" --arg res "$resolution" --arg ts "$ts" '
|
|
@@ -1282,14 +1554,12 @@ EOF
|
|
|
1282
1554
|
.resolution = $res
|
|
1283
1555
|
else . end]
|
|
1284
1556
|
' "$flags_file") || {
|
|
1285
|
-
if type feature_enabled &>/dev/null && feature_enabled "file_locking"; then
|
|
1286
|
-
release_lock "$flags_file"
|
|
1287
|
-
fi
|
|
1288
1557
|
json_err "$E_JSON_INVALID" "Failed to resolve flag"
|
|
1289
1558
|
}
|
|
1290
1559
|
|
|
1291
1560
|
atomic_write "$flags_file" "$updated"
|
|
1292
|
-
|
|
1561
|
+
trap - EXIT
|
|
1562
|
+
release_lock 2>/dev/null || true
|
|
1293
1563
|
json_ok "{\"resolved\":\"$flag_id\"}"
|
|
1294
1564
|
;;
|
|
1295
1565
|
flag-acknowledge)
|
|
@@ -1308,6 +1578,7 @@ EOF
|
|
|
1308
1578
|
json_warn "W_DEGRADED" "File locking disabled - proceeding without lock"
|
|
1309
1579
|
else
|
|
1310
1580
|
acquire_lock "$flags_file" || json_err "$E_LOCK_FAILED" "Failed to acquire lock on flags.json"
|
|
1581
|
+
trap 'release_lock 2>/dev/null || true' EXIT
|
|
1311
1582
|
fi
|
|
1312
1583
|
|
|
1313
1584
|
updated=$(jq --arg id "$flag_id" --arg ts "$ts" '
|
|
@@ -1315,14 +1586,12 @@ EOF
|
|
|
1315
1586
|
.acknowledged_at = $ts
|
|
1316
1587
|
else . end]
|
|
1317
1588
|
' "$flags_file") || {
|
|
1318
|
-
if type feature_enabled &>/dev/null && feature_enabled "file_locking"; then
|
|
1319
|
-
release_lock "$flags_file"
|
|
1320
|
-
fi
|
|
1321
1589
|
json_err "$E_JSON_INVALID" "Failed to acknowledge flag"
|
|
1322
1590
|
}
|
|
1323
1591
|
|
|
1324
1592
|
atomic_write "$flags_file" "$updated"
|
|
1325
|
-
|
|
1593
|
+
trap - EXIT
|
|
1594
|
+
release_lock 2>/dev/null || true
|
|
1326
1595
|
json_ok "{\"acknowledged\":\"$flag_id\"}"
|
|
1327
1596
|
;;
|
|
1328
1597
|
flag-list)
|
|
@@ -1376,14 +1645,12 @@ EOF
|
|
|
1376
1645
|
ts=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
1377
1646
|
|
|
1378
1647
|
# Acquire lock for atomic flag update (degrade gracefully if locking unavailable)
|
|
1379
|
-
lock_acquired=false
|
|
1380
1648
|
if type feature_enabled &>/dev/null && ! feature_enabled "file_locking"; then
|
|
1381
1649
|
json_warn "W_DEGRADED" "File locking disabled - proceeding without lock"
|
|
1382
1650
|
else
|
|
1383
1651
|
acquire_lock "$flags_file" || json_err "$E_LOCK_FAILED" "Failed to acquire lock on flags.json"
|
|
1384
|
-
lock_acquired=true
|
|
1385
1652
|
# Ensure lock is always released on exit (BUG-005/BUG-011 fix)
|
|
1386
|
-
trap 'release_lock
|
|
1653
|
+
trap 'release_lock 2>/dev/null || true' EXIT
|
|
1387
1654
|
fi
|
|
1388
1655
|
|
|
1389
1656
|
# Count how many will be resolved
|
|
@@ -1404,10 +1671,8 @@ EOF
|
|
|
1404
1671
|
}
|
|
1405
1672
|
|
|
1406
1673
|
atomic_write "$flags_file" "$updated"
|
|
1407
|
-
|
|
1408
|
-
|
|
1409
|
-
trap - EXIT
|
|
1410
|
-
fi
|
|
1674
|
+
trap - EXIT
|
|
1675
|
+
release_lock 2>/dev/null || true
|
|
1411
1676
|
json_ok "{\"resolved\":$count,\"trigger\":\"$trigger\"}"
|
|
1412
1677
|
;;
|
|
1413
1678
|
generate-ant-name)
|
|
@@ -1456,7 +1721,7 @@ EOF
|
|
|
1456
1721
|
if git rev-parse --git-dir >/dev/null 2>&1; then
|
|
1457
1722
|
# Check if there are changes to Aether-managed files only
|
|
1458
1723
|
# Target directories that Aether is allowed to modify
|
|
1459
|
-
target_dirs=".aether .claude/commands/ant .claude/commands/st .opencode
|
|
1724
|
+
target_dirs=".aether .claude/commands/ant .claude/commands/st .opencode bin"
|
|
1460
1725
|
has_changes=false
|
|
1461
1726
|
|
|
1462
1727
|
for dir in $target_dirs; do
|
|
@@ -1535,7 +1800,7 @@ EOF
|
|
|
1535
1800
|
|
|
1536
1801
|
current=0
|
|
1537
1802
|
if [[ -f "$DATA_DIR/spawn-tree.txt" ]]; then
|
|
1538
|
-
current=$(grep -c "|swarm:$swarm_id$" "$DATA_DIR/spawn-tree.txt" 2>/dev/null ||
|
|
1803
|
+
current=$(grep -c "|swarm:$swarm_id$" "$DATA_DIR/spawn-tree.txt" 2>/dev/null) || current=0
|
|
1539
1804
|
fi
|
|
1540
1805
|
|
|
1541
1806
|
if [[ $current -lt $swarm_cap ]]; then
|
|
@@ -1870,6 +2135,33 @@ Files: ${files_changed} files changed"
|
|
|
1870
2135
|
fi
|
|
1871
2136
|
;;
|
|
1872
2137
|
|
|
2138
|
+
version-check-cached)
|
|
2139
|
+
# Cached version of version-check — skips if checked within TTL (3600s = 1 hour)
|
|
2140
|
+
# Usage: version-check-cached
|
|
2141
|
+
cache_file="$AETHER_ROOT/.aether/data/.version-check-cache"
|
|
2142
|
+
now=$(date +%s)
|
|
2143
|
+
|
|
2144
|
+
if [[ -f "$cache_file" ]]; then
|
|
2145
|
+
cached_at=$(cat "$cache_file" 2>/dev/null || echo "0")
|
|
2146
|
+
age=$((now - cached_at))
|
|
2147
|
+
if [[ $age -lt 3600 ]]; then
|
|
2148
|
+
# Within TTL — skip silently
|
|
2149
|
+
json_ok '""'
|
|
2150
|
+
exit 0
|
|
2151
|
+
fi
|
|
2152
|
+
fi
|
|
2153
|
+
|
|
2154
|
+
# Cache miss or stale — run actual check
|
|
2155
|
+
mkdir -p "$(dirname "$cache_file")" 2>/dev/null || true
|
|
2156
|
+
result=$("$0" version-check 2>/dev/null) || true
|
|
2157
|
+
echo "$now" > "$cache_file" 2>/dev/null || true
|
|
2158
|
+
if [[ -n "$result" ]]; then
|
|
2159
|
+
echo "$result"
|
|
2160
|
+
else
|
|
2161
|
+
json_ok '""'
|
|
2162
|
+
fi
|
|
2163
|
+
;;
|
|
2164
|
+
|
|
1873
2165
|
registry-add)
|
|
1874
2166
|
# Add or update a repo entry in ~/.aether/registry.json
|
|
1875
2167
|
# Usage: registry-add <repo_path> <version>
|
|
@@ -1924,15 +2216,14 @@ Files: ${files_changed} files changed"
|
|
|
1924
2216
|
# Allowlist of system files to copy (relative to system/)
|
|
1925
2217
|
allowlist=(
|
|
1926
2218
|
"aether-utils.sh"
|
|
1927
|
-
"coding-standards.md"
|
|
1928
|
-
"debugging.md"
|
|
1929
|
-
"DISCIPLINES.md"
|
|
1930
|
-
"learning.md"
|
|
1931
|
-
"
|
|
1932
|
-
"
|
|
1933
|
-
"
|
|
1934
|
-
"
|
|
1935
|
-
"verification.md"
|
|
2219
|
+
"docs/disciplines/coding-standards.md"
|
|
2220
|
+
"docs/disciplines/debugging.md"
|
|
2221
|
+
"docs/disciplines/DISCIPLINES.md"
|
|
2222
|
+
"docs/disciplines/learning.md"
|
|
2223
|
+
"docs/disciplines/tdd.md"
|
|
2224
|
+
"docs/disciplines/verification-loop.md"
|
|
2225
|
+
"docs/disciplines/verification.md"
|
|
2226
|
+
"docs/QUEEN_ANT_ARCHITECTURE.md"
|
|
1936
2227
|
"workers.md"
|
|
1937
2228
|
"docs/constraints.md"
|
|
1938
2229
|
"docs/pathogen-schema-example.json"
|
|
@@ -2143,15 +2434,29 @@ NODESCRIPT
|
|
|
2143
2434
|
model-get)
|
|
2144
2435
|
# Shortcut: model-get <caste>
|
|
2145
2436
|
caste="${1:-}"
|
|
2146
|
-
[[ -z "$caste" ]] && json_err "$E_VALIDATION_FAILED" "Usage: model-get <caste
|
|
2147
|
-
|
|
2148
|
-
# Delegate to model-profile get
|
|
2149
|
-
|
|
2437
|
+
[[ -z "$caste" ]] && json_err "$E_VALIDATION_FAILED" "Usage: model-get <caste>. Try: provide a caste name (e.g., builder, scout, surveyor)."
|
|
2438
|
+
|
|
2439
|
+
# Delegate to model-profile get via subprocess (not exec) so errors can be captured
|
|
2440
|
+
set +e
|
|
2441
|
+
result=$(bash "$0" model-profile get "$caste" 2>&1)
|
|
2442
|
+
exit_code=$?
|
|
2443
|
+
set -e
|
|
2444
|
+
if [[ $exit_code -ne 0 ]]; then
|
|
2445
|
+
json_err "$E_BASH_ERROR" "Couldn't get model assignment for caste '$caste'. Try: check that .aether/model-profiles.yaml exists and is valid YAML."
|
|
2446
|
+
fi
|
|
2447
|
+
echo "$result"
|
|
2150
2448
|
;;
|
|
2151
2449
|
|
|
2152
2450
|
model-list)
|
|
2153
|
-
# Shortcut: list all models
|
|
2154
|
-
|
|
2451
|
+
# Shortcut: list all models via subprocess (not exec) so errors can be captured
|
|
2452
|
+
set +e
|
|
2453
|
+
result=$(bash "$0" model-profile list 2>&1)
|
|
2454
|
+
exit_code=$?
|
|
2455
|
+
set -e
|
|
2456
|
+
if [[ $exit_code -ne 0 ]]; then
|
|
2457
|
+
json_err "$E_BASH_ERROR" "Couldn't list model assignments. Try: run 'aether verify-models' to check model configuration."
|
|
2458
|
+
fi
|
|
2459
|
+
echo "$result"
|
|
2155
2460
|
;;
|
|
2156
2461
|
|
|
2157
2462
|
# ============================================
|
|
@@ -2697,6 +3002,106 @@ ANTLOGO
|
|
|
2697
3002
|
json_ok "{\"displayed\":true,\"ants\":$total_active}"
|
|
2698
3003
|
;;
|
|
2699
3004
|
|
|
3005
|
+
swarm-display-text)
|
|
3006
|
+
# Plain-text swarm display for Claude conversation (no ANSI codes)
|
|
3007
|
+
# Usage: swarm-display-text [swarm_id]
|
|
3008
|
+
swarm_id="${1:-default-swarm}"
|
|
3009
|
+
display_file="$DATA_DIR/swarm-display.json"
|
|
3010
|
+
|
|
3011
|
+
# Check for display file
|
|
3012
|
+
if [[ ! -f "$display_file" ]]; then
|
|
3013
|
+
echo "🐜 Colony idle"
|
|
3014
|
+
json_ok '{"displayed":false,"reason":"no_data"}'
|
|
3015
|
+
exit 0
|
|
3016
|
+
fi
|
|
3017
|
+
|
|
3018
|
+
# Check for jq
|
|
3019
|
+
if ! command -v jq >/dev/null 2>&1; then
|
|
3020
|
+
echo "🐜 Swarm active (details unavailable)"
|
|
3021
|
+
json_ok '{"displayed":true,"warning":"jq_missing"}'
|
|
3022
|
+
exit 0
|
|
3023
|
+
fi
|
|
3024
|
+
|
|
3025
|
+
# Read swarm data — handle both flat total_active and nested .summary.total_active
|
|
3026
|
+
total_active=$(jq -r '(.total_active // .summary.total_active // 0)' "$display_file" 2>/dev/null || echo "0")
|
|
3027
|
+
|
|
3028
|
+
if [[ "$total_active" -eq 0 ]]; then
|
|
3029
|
+
echo "🐜 Colony idle"
|
|
3030
|
+
json_ok '{"displayed":true,"ants":0}'
|
|
3031
|
+
exit 0
|
|
3032
|
+
fi
|
|
3033
|
+
|
|
3034
|
+
# Compact header
|
|
3035
|
+
echo "🐜 COLONY ACTIVITY"
|
|
3036
|
+
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
3037
|
+
|
|
3038
|
+
# Caste emoji lookup
|
|
3039
|
+
get_emoji() {
|
|
3040
|
+
case "$1" in
|
|
3041
|
+
builder) echo "🔨🐜" ;;
|
|
3042
|
+
watcher) echo "👁️🐜" ;;
|
|
3043
|
+
scout) echo "🔍🐜" ;;
|
|
3044
|
+
chaos) echo "🎲🐜" ;;
|
|
3045
|
+
prime) echo "👑🐜" ;;
|
|
3046
|
+
oracle) echo "🔮🐜" ;;
|
|
3047
|
+
route_setter) echo "🧭🐜" ;;
|
|
3048
|
+
archaeologist) echo "🏺🐜" ;;
|
|
3049
|
+
surveyor) echo "📊🐜" ;;
|
|
3050
|
+
*) echo "🐜" ;;
|
|
3051
|
+
esac
|
|
3052
|
+
}
|
|
3053
|
+
|
|
3054
|
+
# Format tool counts (only non-zero)
|
|
3055
|
+
format_tools_text() {
|
|
3056
|
+
local r="${1:-0}" g="${2:-0}" e="${3:-0}" b="${4:-0}"
|
|
3057
|
+
local result=""
|
|
3058
|
+
[[ "$r" -gt 0 ]] && result="${result}📖${r} "
|
|
3059
|
+
[[ "$g" -gt 0 ]] && result="${result}🔍${g} "
|
|
3060
|
+
[[ "$e" -gt 0 ]] && result="${result}✏️${e} "
|
|
3061
|
+
[[ "$b" -gt 0 ]] && result="${result}⚡${b}"
|
|
3062
|
+
echo "$result"
|
|
3063
|
+
}
|
|
3064
|
+
|
|
3065
|
+
# Progress bar using block characters (no ANSI)
|
|
3066
|
+
render_bar_text() {
|
|
3067
|
+
local pct="${1:-0}" w="${2:-10}"
|
|
3068
|
+
[[ "$pct" -lt 0 ]] && pct=0
|
|
3069
|
+
[[ "$pct" -gt 100 ]] && pct=100
|
|
3070
|
+
local filled=$((pct * w / 100))
|
|
3071
|
+
local empty=$((w - filled))
|
|
3072
|
+
local bar=""
|
|
3073
|
+
for ((i=0; i<filled; i++)); do bar+="█"; done
|
|
3074
|
+
for ((i=0; i<empty; i++)); do bar+="░"; done
|
|
3075
|
+
echo "[$bar] ${pct}%"
|
|
3076
|
+
}
|
|
3077
|
+
|
|
3078
|
+
# Render each ant (max 5)
|
|
3079
|
+
jq -r '.active_ants[0:5][] | "\(.name)|\(.caste)|\(.task // "")|\(.tools.read // 0)|\(.tools.grep // 0)|\(.tools.edit // 0)|\(.tools.bash // 0)|\(.progress // 0)"' "$display_file" 2>/dev/null | while IFS='|' read -r name caste task r g e b progress; do
|
|
3080
|
+
emoji=$(get_emoji "$caste")
|
|
3081
|
+
tools=$(format_tools_text "$r" "$g" "$e" "$b")
|
|
3082
|
+
bar=$(render_bar_text "${progress:-0}" 10)
|
|
3083
|
+
|
|
3084
|
+
# Truncate task to 25 chars
|
|
3085
|
+
[[ ${#task} -gt 25 ]] && task="${task:0:22}..."
|
|
3086
|
+
|
|
3087
|
+
echo "${emoji} ${name} ${bar} ${task}"
|
|
3088
|
+
[[ -n "$tools" ]] && echo " ${tools}"
|
|
3089
|
+
echo ""
|
|
3090
|
+
done
|
|
3091
|
+
|
|
3092
|
+
# Overflow indicator
|
|
3093
|
+
if [[ "$total_active" -gt 5 ]]; then
|
|
3094
|
+
echo " +$((total_active - 5)) more ants..."
|
|
3095
|
+
echo ""
|
|
3096
|
+
fi
|
|
3097
|
+
|
|
3098
|
+
# Footer
|
|
3099
|
+
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
3100
|
+
echo "${total_active} ants active"
|
|
3101
|
+
|
|
3102
|
+
json_ok "{\"displayed\":true,\"ants\":$total_active}"
|
|
3103
|
+
;;
|
|
3104
|
+
|
|
2700
3105
|
swarm-timing-start)
|
|
2701
3106
|
# Record start time for an ant
|
|
2702
3107
|
# Usage: swarm-timing-start <ant_name>
|
|
@@ -2970,12 +3375,12 @@ ANTLOGO
|
|
|
2970
3375
|
queen_file="$AETHER_ROOT/.aether/docs/QUEEN.md"
|
|
2971
3376
|
|
|
2972
3377
|
# Check multiple locations for template
|
|
2973
|
-
# Order:
|
|
3378
|
+
# Order: hub (system/) -> dev (.aether/) -> repo local -> legacy
|
|
2974
3379
|
template_file=""
|
|
2975
3380
|
for path in \
|
|
2976
|
-
"$
|
|
2977
|
-
"$
|
|
2978
|
-
"$
|
|
3381
|
+
"$HOME/.aether/system/templates/QUEEN.md.template" \
|
|
3382
|
+
"$AETHER_ROOT/.aether/templates/QUEEN.md.template" \
|
|
3383
|
+
"$HOME/.aether/templates/QUEEN.md.template"; do
|
|
2979
3384
|
if [[ -f "$path" ]]; then
|
|
2980
3385
|
template_file="$path"
|
|
2981
3386
|
break
|
|
@@ -2993,7 +3398,9 @@ ANTLOGO
|
|
|
2993
3398
|
|
|
2994
3399
|
# Check if template was found
|
|
2995
3400
|
if [[ -z "$template_file" ]]; then
|
|
2996
|
-
json_err "$E_FILE_NOT_FOUND"
|
|
3401
|
+
json_err "$E_FILE_NOT_FOUND" \
|
|
3402
|
+
"Template not found. Run: npm install -g aether && aether install to restore it." \
|
|
3403
|
+
'{"templates_checked":["~/.aether/system/templates/QUEEN.md.template",".aether/templates/QUEEN.md.template","~/.aether/templates/QUEEN.md.template"]}'
|
|
2997
3404
|
exit 1
|
|
2998
3405
|
fi
|
|
2999
3406
|
|
|
@@ -3028,6 +3435,12 @@ ANTLOGO
|
|
|
3028
3435
|
metadata='{"version":"unknown","last_evolved":null,"colonies_contributed":[],"promotion_thresholds":{},"stats":{}}'
|
|
3029
3436
|
fi
|
|
3030
3437
|
|
|
3438
|
+
# Gate 1: Validate metadata is parseable JSON BEFORE using as --argjson
|
|
3439
|
+
if ! echo "$metadata" | jq -e . >/dev/null 2>&1; then
|
|
3440
|
+
json_err "$E_JSON_INVALID" \
|
|
3441
|
+
"QUEEN.md has a malformed METADATA block — the JSON between <!-- METADATA and --> is invalid. Try: fix the JSON in .aether/docs/QUEEN.md or run queen-init to reset."
|
|
3442
|
+
fi
|
|
3443
|
+
|
|
3031
3444
|
# Extract sections content for worker priming
|
|
3032
3445
|
# Use awk to parse markdown sections - remove header line and trailing section header
|
|
3033
3446
|
philosophies=$(awk '/^## 📜 Philosophies$/,/^## /' "$queen_file" | tail -n +2 | sed '$d' | sed '/^$/d' | jq -Rs '.')
|
|
@@ -3062,6 +3475,41 @@ ANTLOGO
|
|
|
3062
3475
|
}
|
|
3063
3476
|
}')
|
|
3064
3477
|
|
|
3478
|
+
# Gate 2: Validate assembled result before returning
|
|
3479
|
+
if [[ -z "$result" ]] || ! echo "$result" | jq -e . >/dev/null 2>&1; then
|
|
3480
|
+
json_err "$E_JSON_INVALID" \
|
|
3481
|
+
"Couldn't assemble queen-read output. QUEEN.md may have formatting issues. Try: run queen-init to reset."
|
|
3482
|
+
fi
|
|
3483
|
+
json_ok "$result"
|
|
3484
|
+
;;
|
|
3485
|
+
|
|
3486
|
+
pheromone-read)
|
|
3487
|
+
# Read active pheromones (FOCUS/REDIRECT) from constraints.json
|
|
3488
|
+
# Used to inject active signals into worker prompts
|
|
3489
|
+
constraints_file="$AETHER_ROOT/.aether/data/constraints.json"
|
|
3490
|
+
|
|
3491
|
+
# Initialize defaults (no local - script-level)
|
|
3492
|
+
priorities='[]'
|
|
3493
|
+
avoid='[]'
|
|
3494
|
+
|
|
3495
|
+
# Check if constraints file exists
|
|
3496
|
+
if [[ -f "$constraints_file" ]]; then
|
|
3497
|
+
# Read focus array as priorities
|
|
3498
|
+
priorities=$(jq -c '.focus // []' "$constraints_file" 2>/dev/null || echo '[]')
|
|
3499
|
+
|
|
3500
|
+
# Read constraints array, extract content and source
|
|
3501
|
+
avoid=$(jq -c '[.constraints[]? | {content: .content, source: .source}] // []' "$constraints_file" 2>/dev/null || echo '[]')
|
|
3502
|
+
fi
|
|
3503
|
+
|
|
3504
|
+
# Build JSON output
|
|
3505
|
+
result=$(jq -n \
|
|
3506
|
+
--argjson priorities "$priorities" \
|
|
3507
|
+
--argjson avoid "$avoid" \
|
|
3508
|
+
'{
|
|
3509
|
+
priorities: $priorities,
|
|
3510
|
+
avoid: $avoid
|
|
3511
|
+
}')
|
|
3512
|
+
|
|
3065
3513
|
json_ok "$result"
|
|
3066
3514
|
;;
|
|
3067
3515
|
|
|
@@ -3339,8 +3787,6 @@ ${entry}" "$queen_file" > "$tmp_file"
|
|
|
3339
3787
|
is_system=true
|
|
3340
3788
|
elif [[ "$file" == .opencode/agents/*.md ]] || [[ "$file" == .opencode/agents/**/*.md ]]; then
|
|
3341
3789
|
is_system=true
|
|
3342
|
-
elif [[ "$file" == runtime/* ]]; then
|
|
3343
|
-
is_system=true
|
|
3344
3790
|
elif [[ "$file" == bin/* ]]; then
|
|
3345
3791
|
is_system=true
|
|
3346
3792
|
fi
|
|
@@ -3670,8 +4116,1055 @@ ${entry}" "$queen_file" > "$tmp_file"
|
|
|
3670
4116
|
if type pheromone-export &>/dev/null; then
|
|
3671
4117
|
pheromone-export "$input_json" "$output_xml" "$schema_file"
|
|
3672
4118
|
else
|
|
3673
|
-
json_err "$E_DEPENDENCY_MISSING" "xml-utils.sh not available
|
|
4119
|
+
json_err "$E_DEPENDENCY_MISSING" "xml-utils.sh not available. Try: run aether update to restore utility scripts."
|
|
4120
|
+
fi
|
|
4121
|
+
;;
|
|
4122
|
+
|
|
4123
|
+
pheromone-write)
|
|
4124
|
+
# Write a pheromone signal to pheromones.json
|
|
4125
|
+
# Usage: pheromone-write <type> <content> [--strength N] [--ttl TTL] [--source SOURCE] [--reason REASON]
|
|
4126
|
+
# type: FOCUS, REDIRECT, or FEEDBACK
|
|
4127
|
+
# content: signal text (required, max 500 chars)
|
|
4128
|
+
# --strength: 0.0-1.0 (defaults: REDIRECT=0.9, FOCUS=0.8, FEEDBACK=0.7)
|
|
4129
|
+
# --ttl: phase_end (default), 2h, 1d, 7d, 30d, etc.
|
|
4130
|
+
# --source: user (default), worker:builder, system
|
|
4131
|
+
# --reason: human-readable explanation
|
|
4132
|
+
|
|
4133
|
+
pw_type="${1:-}"
|
|
4134
|
+
pw_content="${2:-}"
|
|
4135
|
+
|
|
4136
|
+
# Validate type
|
|
4137
|
+
if [[ -z "$pw_type" ]]; then
|
|
4138
|
+
json_err "$E_VALIDATION_FAILED" "pheromone-write requires <type> argument (FOCUS, REDIRECT, or FEEDBACK)"
|
|
4139
|
+
fi
|
|
4140
|
+
|
|
4141
|
+
pw_type=$(echo "$pw_type" | tr '[:lower:]' '[:upper:]')
|
|
4142
|
+
case "$pw_type" in
|
|
4143
|
+
FOCUS|REDIRECT|FEEDBACK) ;;
|
|
4144
|
+
*) json_err "$E_VALIDATION_FAILED" "Invalid pheromone type: $pw_type. Must be FOCUS, REDIRECT, or FEEDBACK" ;;
|
|
4145
|
+
esac
|
|
4146
|
+
|
|
4147
|
+
if [[ -z "$pw_content" ]]; then
|
|
4148
|
+
json_err "$E_VALIDATION_FAILED" "pheromone-write requires <content> argument"
|
|
4149
|
+
fi
|
|
4150
|
+
|
|
4151
|
+
# Parse optional flags from remaining args (after type and content)
|
|
4152
|
+
pw_strength=""
|
|
4153
|
+
pw_ttl="phase_end"
|
|
4154
|
+
pw_source="user"
|
|
4155
|
+
pw_reason=""
|
|
4156
|
+
|
|
4157
|
+
shift 2 # shift past type and content
|
|
4158
|
+
while [[ $# -gt 0 ]]; do
|
|
4159
|
+
case "$1" in
|
|
4160
|
+
--strength) pw_strength="$2"; shift 2 ;;
|
|
4161
|
+
--ttl) pw_ttl="$2"; shift 2 ;;
|
|
4162
|
+
--source) pw_source="$2"; shift 2 ;;
|
|
4163
|
+
--reason) pw_reason="$2"; shift 2 ;;
|
|
4164
|
+
*) shift ;;
|
|
4165
|
+
esac
|
|
4166
|
+
done
|
|
4167
|
+
|
|
4168
|
+
# Apply default strength by type
|
|
4169
|
+
if [[ -z "$pw_strength" ]]; then
|
|
4170
|
+
case "$pw_type" in
|
|
4171
|
+
REDIRECT) pw_strength="0.9" ;;
|
|
4172
|
+
FOCUS) pw_strength="0.8" ;;
|
|
4173
|
+
FEEDBACK) pw_strength="0.7" ;;
|
|
4174
|
+
esac
|
|
4175
|
+
fi
|
|
4176
|
+
|
|
4177
|
+
# Apply default reason by type
|
|
4178
|
+
if [[ -z "$pw_reason" ]]; then
|
|
4179
|
+
pw_type_lower_r=$(echo "$pw_type" | tr '[:upper:]' '[:lower:]')
|
|
4180
|
+
pw_reason="User emitted via /ant:${pw_type_lower_r}"
|
|
4181
|
+
fi
|
|
4182
|
+
|
|
4183
|
+
# Set priority by type
|
|
4184
|
+
case "$pw_type" in
|
|
4185
|
+
REDIRECT) pw_priority="high" ;;
|
|
4186
|
+
FOCUS) pw_priority="normal" ;;
|
|
4187
|
+
FEEDBACK) pw_priority="low" ;;
|
|
4188
|
+
esac
|
|
4189
|
+
|
|
4190
|
+
# Generate ID and timestamps
|
|
4191
|
+
pw_epoch=$(date +%s)
|
|
4192
|
+
pw_epoch_ms="${pw_epoch}000"
|
|
4193
|
+
pw_type_lower=$(echo "$pw_type" | tr '[:upper:]' '[:lower:]')
|
|
4194
|
+
pw_id="sig_${pw_type_lower}_${pw_epoch_ms}"
|
|
4195
|
+
pw_created=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
4196
|
+
|
|
4197
|
+
# Compute expires_at from TTL
|
|
4198
|
+
if [[ "$pw_ttl" == "phase_end" ]]; then
|
|
4199
|
+
pw_expires="phase_end"
|
|
4200
|
+
else
|
|
4201
|
+
pw_ttl_secs=0
|
|
4202
|
+
if [[ "$pw_ttl" =~ ^([0-9]+)m$ ]]; then
|
|
4203
|
+
pw_ttl_secs=$(( ${BASH_REMATCH[1]} * 60 ))
|
|
4204
|
+
elif [[ "$pw_ttl" =~ ^([0-9]+)h$ ]]; then
|
|
4205
|
+
pw_ttl_secs=$(( ${BASH_REMATCH[1]} * 3600 ))
|
|
4206
|
+
elif [[ "$pw_ttl" =~ ^([0-9]+)d$ ]]; then
|
|
4207
|
+
pw_ttl_secs=$(( ${BASH_REMATCH[1]} * 86400 ))
|
|
4208
|
+
fi
|
|
4209
|
+
if [[ $pw_ttl_secs -gt 0 ]]; then
|
|
4210
|
+
pw_expires_epoch=$(( pw_epoch + pw_ttl_secs ))
|
|
4211
|
+
pw_expires=$(date -u -r "$pw_expires_epoch" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || \
|
|
4212
|
+
date -u -d "@$pw_expires_epoch" +"%Y-%m-%dT%H:%M:%SZ" 2>/dev/null || \
|
|
4213
|
+
echo "phase_end")
|
|
4214
|
+
else
|
|
4215
|
+
pw_expires="phase_end"
|
|
4216
|
+
fi
|
|
4217
|
+
fi
|
|
4218
|
+
|
|
4219
|
+
pw_file="$DATA_DIR/pheromones.json"
|
|
4220
|
+
|
|
4221
|
+
# Initialize pheromones.json if missing
|
|
4222
|
+
if [[ ! -f "$pw_file" ]]; then
|
|
4223
|
+
pw_colony_id="aether-dev"
|
|
4224
|
+
if [[ -f "$DATA_DIR/COLONY_STATE.json" ]]; then
|
|
4225
|
+
pw_colony_id=$(jq -r '.session_id // "aether-dev"' "$DATA_DIR/COLONY_STATE.json" 2>/dev/null || echo "aether-dev")
|
|
4226
|
+
fi
|
|
4227
|
+
printf '{\n "version": "1.0.0",\n "colony_id": "%s",\n "generated_at": "%s",\n "signals": []\n}\n' \
|
|
4228
|
+
"$pw_colony_id" "$pw_created" > "$pw_file"
|
|
4229
|
+
fi
|
|
4230
|
+
|
|
4231
|
+
# Build signal object and append to pheromones.json
|
|
4232
|
+
pw_signal=$(jq -n \
|
|
4233
|
+
--arg id "$pw_id" \
|
|
4234
|
+
--arg type "$pw_type" \
|
|
4235
|
+
--arg priority "$pw_priority" \
|
|
4236
|
+
--arg source "$pw_source" \
|
|
4237
|
+
--arg created_at "$pw_created" \
|
|
4238
|
+
--arg expires_at "$pw_expires" \
|
|
4239
|
+
--argjson active true \
|
|
4240
|
+
--argjson strength "$pw_strength" \
|
|
4241
|
+
--arg reason "$pw_reason" \
|
|
4242
|
+
--arg content "$pw_content" \
|
|
4243
|
+
'{id: $id, type: $type, priority: $priority, source: $source, created_at: $created_at, expires_at: $expires_at, active: $active, strength: ($strength | tonumber), reason: $reason, content: {text: $content}}')
|
|
4244
|
+
|
|
4245
|
+
pw_updated=$(jq --argjson sig "$pw_signal" '.signals += [$sig]' "$pw_file" 2>/dev/null)
|
|
4246
|
+
if [[ -z "$pw_updated" ]]; then
|
|
4247
|
+
json_err "${E_JSON_INVALID:-E_JSON_INVALID}" "Failed to update pheromones.json — jq parse error"
|
|
4248
|
+
fi
|
|
4249
|
+
echo "$pw_updated" > "$pw_file"
|
|
4250
|
+
|
|
4251
|
+
# Backward compatibility: also write to constraints.json
|
|
4252
|
+
pw_cfile="$DATA_DIR/constraints.json"
|
|
4253
|
+
if [[ "$pw_type" == "FOCUS" ]]; then
|
|
4254
|
+
if [[ ! -f "$pw_cfile" ]]; then
|
|
4255
|
+
echo '{"version":"1.0","focus":[],"constraints":[]}' > "$pw_cfile"
|
|
4256
|
+
fi
|
|
4257
|
+
pw_cfile_updated=$(jq --arg txt "$pw_content" '
|
|
4258
|
+
.focus += [$txt] |
|
|
4259
|
+
if (.focus | length) > 5 then .focus = .focus[-5:] else . end
|
|
4260
|
+
' "$pw_cfile" 2>/dev/null)
|
|
4261
|
+
[[ -n "$pw_cfile_updated" ]] && echo "$pw_cfile_updated" > "$pw_cfile"
|
|
4262
|
+
elif [[ "$pw_type" == "REDIRECT" ]]; then
|
|
4263
|
+
if [[ ! -f "$pw_cfile" ]]; then
|
|
4264
|
+
echo '{"version":"1.0","focus":[],"constraints":[]}' > "$pw_cfile"
|
|
4265
|
+
fi
|
|
4266
|
+
pw_constraint=$(jq -n \
|
|
4267
|
+
--arg id "c_${pw_epoch}" \
|
|
4268
|
+
--arg content "$pw_content" \
|
|
4269
|
+
--arg source "user:redirect" \
|
|
4270
|
+
--arg created_at "$pw_created" \
|
|
4271
|
+
'{id: $id, type: "AVOID", content: $content, source: $source, created_at: $created_at}')
|
|
4272
|
+
pw_cfile_updated=$(jq --argjson c "$pw_constraint" '
|
|
4273
|
+
.constraints += [$c] |
|
|
4274
|
+
if (.constraints | length) > 10 then .constraints = .constraints[-10:] else . end
|
|
4275
|
+
' "$pw_cfile" 2>/dev/null)
|
|
4276
|
+
[[ -n "$pw_cfile_updated" ]] && echo "$pw_cfile_updated" > "$pw_cfile"
|
|
4277
|
+
fi
|
|
4278
|
+
|
|
4279
|
+
# Get active signal count
|
|
4280
|
+
pw_active_count=$(jq '[.signals[] | select(.active == true)] | length' "$pw_file" 2>/dev/null || echo "0")
|
|
4281
|
+
|
|
4282
|
+
json_ok "{\"signal_id\":\"$pw_id\",\"type\":\"$pw_type\",\"active_count\":$pw_active_count}"
|
|
4283
|
+
;;
|
|
4284
|
+
|
|
4285
|
+
pheromone-count)
|
|
4286
|
+
# Count active pheromone signals by type
|
|
4287
|
+
# Usage: pheromone-count
|
|
4288
|
+
# Returns: JSON with per-type counts
|
|
4289
|
+
|
|
4290
|
+
pc_file="$DATA_DIR/pheromones.json"
|
|
4291
|
+
|
|
4292
|
+
if [[ ! -f "$pc_file" ]]; then
|
|
4293
|
+
json_ok '{"focus":0,"redirect":0,"feedback":0,"total":0}'
|
|
4294
|
+
else
|
|
4295
|
+
pc_result=$(jq -c '{
|
|
4296
|
+
focus: ([.signals[] | select(.active == true and .type == "FOCUS")] | length),
|
|
4297
|
+
redirect: ([.signals[] | select(.active == true and .type == "REDIRECT")] | length),
|
|
4298
|
+
feedback: ([.signals[] | select(.active == true and .type == "FEEDBACK")] | length),
|
|
4299
|
+
total: ([.signals[] | select(.active == true)] | length)
|
|
4300
|
+
}' "$pc_file" 2>/dev/null)
|
|
4301
|
+
if [[ -z "$pc_result" ]]; then
|
|
4302
|
+
json_ok '{"focus":0,"redirect":0,"feedback":0,"total":0}'
|
|
4303
|
+
else
|
|
4304
|
+
json_ok "$pc_result"
|
|
4305
|
+
fi
|
|
4306
|
+
fi
|
|
4307
|
+
;;
|
|
4308
|
+
|
|
4309
|
+
pheromone-read)
|
|
4310
|
+
# Read pheromones from colony data with decay calculation
|
|
4311
|
+
# Usage: pheromone-read [type]
|
|
4312
|
+
# type: Filter by pheromone type (focus, redirect, feedback) or 'all' (default: all)
|
|
4313
|
+
# Returns: JSON object with pheromones array including effective_strength
|
|
4314
|
+
|
|
4315
|
+
pher_type="${1:-all}"
|
|
4316
|
+
pher_file="$DATA_DIR/pheromones.json"
|
|
4317
|
+
|
|
4318
|
+
# Check if file exists
|
|
4319
|
+
if [[ ! -f "$pher_file" ]]; then
|
|
4320
|
+
json_err "$E_FILE_NOT_FOUND" "Pheromones file not found. Run /ant:colonize first to initialize the colony."
|
|
4321
|
+
fi
|
|
4322
|
+
|
|
4323
|
+
# Get current epoch for decay calculation
|
|
4324
|
+
pher_now=$(date +%s)
|
|
4325
|
+
|
|
4326
|
+
# Apply decay and expiry at read time
|
|
4327
|
+
# Decay rates: FOCUS=30d, REDIRECT=60d, FEEDBACK/PATTERN=90d
|
|
4328
|
+
# effective_strength = original_strength * (1 - elapsed_days / decay_days)
|
|
4329
|
+
# If effective_strength < 0.1, mark inactive
|
|
4330
|
+
# Also check expires_at: if not "phase_end" and past expiry, mark inactive
|
|
4331
|
+
pher_type_upper=$(echo "$pher_type" | tr '[:lower:]' '[:upper:]')
|
|
4332
|
+
|
|
4333
|
+
pher_result=$(jq -c \
|
|
4334
|
+
--argjson now "$pher_now" \
|
|
4335
|
+
--arg type_filter "$pher_type_upper" \
|
|
4336
|
+
'
|
|
4337
|
+
# Rough ISO-8601 to epoch: accumulate years*365d + month*30d + days + time
|
|
4338
|
+
def to_epoch(ts):
|
|
4339
|
+
if ts == null or ts == "" or ts == "phase_end" then null
|
|
4340
|
+
else
|
|
4341
|
+
(ts | split("T")) as $parts |
|
|
4342
|
+
($parts[0] | split("-")) as $d |
|
|
4343
|
+
($parts[1] | rtrimstr("Z") | split(":")) as $t |
|
|
4344
|
+
(($d[0] | tonumber) - 1970) * 365 * 86400 +
|
|
4345
|
+
(($d[1] | tonumber) - 1) * 30 * 86400 +
|
|
4346
|
+
(($d[2] | tonumber) - 1) * 86400 +
|
|
4347
|
+
($t[0] | tonumber) * 3600 +
|
|
4348
|
+
($t[1] | tonumber) * 60 +
|
|
4349
|
+
($t[2] | rtrimstr("Z") | tonumber)
|
|
4350
|
+
end;
|
|
4351
|
+
|
|
4352
|
+
def decay_days(t):
|
|
4353
|
+
if t == "FOCUS" then 30
|
|
4354
|
+
elif t == "REDIRECT" then 60
|
|
4355
|
+
else 90
|
|
4356
|
+
end;
|
|
4357
|
+
|
|
4358
|
+
.signals | map(
|
|
4359
|
+
(to_epoch(.created_at)) as $created_epoch |
|
|
4360
|
+
(if $created_epoch != null then ($now - $created_epoch) / 86400 else 0 end) as $elapsed_days |
|
|
4361
|
+
(decay_days(.type)) as $dd |
|
|
4362
|
+
((.strength // 0.8) * (1 - ($elapsed_days / $dd))) as $eff_raw |
|
|
4363
|
+
(if $eff_raw < 0 then 0 else $eff_raw end) as $eff |
|
|
4364
|
+
(to_epoch(.expires_at)) as $exp_epoch |
|
|
4365
|
+
($exp_epoch != null and $exp_epoch <= $now) as $expired |
|
|
4366
|
+
($eff < 0.1 or $expired) as $deactivate |
|
|
4367
|
+
. + {
|
|
4368
|
+
effective_strength: (($eff * 100 | round) / 100),
|
|
4369
|
+
active: (if $deactivate then false else (.active // true) end)
|
|
4370
|
+
}
|
|
4371
|
+
) |
|
|
4372
|
+
map(select(.active == true)) |
|
|
4373
|
+
if $type_filter != "ALL" then
|
|
4374
|
+
map(select(.type == $type_filter))
|
|
4375
|
+
else
|
|
4376
|
+
.
|
|
4377
|
+
end
|
|
4378
|
+
' "$pher_file" 2>/dev/null)
|
|
4379
|
+
|
|
4380
|
+
if [[ -z "$pher_result" || "$pher_result" == "null" ]]; then
|
|
4381
|
+
json_ok '{"version":"1.0.0","signals":[]}'
|
|
4382
|
+
else
|
|
4383
|
+
pher_version=$(jq -r '.version // "1.0.0"' "$pher_file" 2>/dev/null || echo "1.0.0")
|
|
4384
|
+
pher_colony=$(jq -r '.colony_id // "unknown"' "$pher_file" 2>/dev/null || echo "unknown")
|
|
4385
|
+
json_ok "{\"version\":\"$pher_version\",\"colony_id\":\"$pher_colony\",\"signals\":$pher_result}"
|
|
4386
|
+
fi
|
|
4387
|
+
;;
|
|
4388
|
+
|
|
4389
|
+
instinct-read)
|
|
4390
|
+
# Read learned instincts from COLONY_STATE.json memory
|
|
4391
|
+
# Usage: instinct-read [--min-confidence N] [--max N] [--domain DOMAIN]
|
|
4392
|
+
# Returns: JSON with filtered, confidence-sorted instincts
|
|
4393
|
+
|
|
4394
|
+
ir_min_confidence="0.5"
|
|
4395
|
+
ir_max="5"
|
|
4396
|
+
ir_domain=""
|
|
4397
|
+
|
|
4398
|
+
# Parse flags from positional args
|
|
4399
|
+
ir_shift=1
|
|
4400
|
+
while [[ $ir_shift -le $# ]]; do
|
|
4401
|
+
eval "ir_arg=\${$ir_shift}"
|
|
4402
|
+
ir_shift=$((ir_shift + 1))
|
|
4403
|
+
case "$ir_arg" in
|
|
4404
|
+
--min-confidence)
|
|
4405
|
+
eval "ir_min_confidence=\${$ir_shift}"
|
|
4406
|
+
ir_shift=$((ir_shift + 1))
|
|
4407
|
+
;;
|
|
4408
|
+
--max)
|
|
4409
|
+
eval "ir_max=\${$ir_shift}"
|
|
4410
|
+
ir_shift=$((ir_shift + 1))
|
|
4411
|
+
;;
|
|
4412
|
+
--domain)
|
|
4413
|
+
eval "ir_domain=\${$ir_shift}"
|
|
4414
|
+
ir_shift=$((ir_shift + 1))
|
|
4415
|
+
;;
|
|
4416
|
+
esac
|
|
4417
|
+
done
|
|
4418
|
+
|
|
4419
|
+
ir_state_file="$DATA_DIR/COLONY_STATE.json"
|
|
4420
|
+
|
|
4421
|
+
if [[ ! -f "$ir_state_file" ]]; then
|
|
4422
|
+
json_err "$E_FILE_NOT_FOUND" "COLONY_STATE.json not found. Run /ant:init first."
|
|
4423
|
+
fi
|
|
4424
|
+
|
|
4425
|
+
# Check if memory.instincts exists
|
|
4426
|
+
ir_has_instincts=$(jq 'if .memory.instincts then "yes" else "no" end' "$ir_state_file" 2>/dev/null || echo "no")
|
|
4427
|
+
if [[ "$ir_has_instincts" != '"yes"' ]]; then
|
|
4428
|
+
json_ok '{"instincts":[],"total":0,"filtered":0}'
|
|
4429
|
+
fi
|
|
4430
|
+
|
|
4431
|
+
ir_result=$(jq -c \
|
|
4432
|
+
--argjson min_conf "$ir_min_confidence" \
|
|
4433
|
+
--argjson max_count "$ir_max" \
|
|
4434
|
+
--arg domain_filter "$ir_domain" \
|
|
4435
|
+
'
|
|
4436
|
+
(.memory.instincts // []) as $all |
|
|
4437
|
+
($all | length) as $total |
|
|
4438
|
+
$all
|
|
4439
|
+
| map(select(
|
|
4440
|
+
(.confidence // 0) >= $min_conf
|
|
4441
|
+
and (.status // "hypothesis") != "disproven"
|
|
4442
|
+
and (if $domain_filter != "" then (.domain // "") == $domain_filter else true end)
|
|
4443
|
+
))
|
|
4444
|
+
| sort_by(-.confidence)
|
|
4445
|
+
| .[:$max_count]
|
|
4446
|
+
| {
|
|
4447
|
+
instincts: .,
|
|
4448
|
+
total: $total,
|
|
4449
|
+
filtered: (. | length)
|
|
4450
|
+
}
|
|
4451
|
+
' "$ir_state_file" 2>/dev/null)
|
|
4452
|
+
|
|
4453
|
+
if [[ -z "$ir_result" || "$ir_result" == "null" ]]; then
|
|
4454
|
+
json_ok '{"instincts":[],"total":0,"filtered":0}'
|
|
4455
|
+
else
|
|
4456
|
+
json_ok "$ir_result"
|
|
4457
|
+
fi
|
|
4458
|
+
;;
|
|
4459
|
+
|
|
4460
|
+
pheromone-prime)
|
|
4461
|
+
# Combine active pheromone signals and learned instincts into a prompt-ready block
|
|
4462
|
+
# Usage: pheromone-prime
|
|
4463
|
+
# Returns: JSON with signal_count, instinct_count, prompt_section, log_line
|
|
4464
|
+
|
|
4465
|
+
pp_pher_file="$DATA_DIR/pheromones.json"
|
|
4466
|
+
pp_state_file="$DATA_DIR/COLONY_STATE.json"
|
|
4467
|
+
pp_now=$(date +%s)
|
|
4468
|
+
|
|
4469
|
+
# Read active signals (same decay logic as pheromone-read)
|
|
4470
|
+
pp_signals="[]"
|
|
4471
|
+
if [[ -f "$pp_pher_file" ]]; then
|
|
4472
|
+
pp_signals=$(jq -c \
|
|
4473
|
+
--argjson now "$pp_now" \
|
|
4474
|
+
'
|
|
4475
|
+
def to_epoch(ts):
|
|
4476
|
+
if ts == null or ts == "" or ts == "phase_end" then null
|
|
4477
|
+
else
|
|
4478
|
+
(ts | split("T")) as $parts |
|
|
4479
|
+
($parts[0] | split("-")) as $d |
|
|
4480
|
+
($parts[1] | rtrimstr("Z") | split(":")) as $t |
|
|
4481
|
+
(($d[0] | tonumber) - 1970) * 365 * 86400 +
|
|
4482
|
+
(($d[1] | tonumber) - 1) * 30 * 86400 +
|
|
4483
|
+
(($d[2] | tonumber) - 1) * 86400 +
|
|
4484
|
+
($t[0] | tonumber) * 3600 +
|
|
4485
|
+
($t[1] | tonumber) * 60 +
|
|
4486
|
+
($t[2] | rtrimstr("Z") | tonumber)
|
|
4487
|
+
end;
|
|
4488
|
+
|
|
4489
|
+
def decay_days(t):
|
|
4490
|
+
if t == "FOCUS" then 30
|
|
4491
|
+
elif t == "REDIRECT" then 60
|
|
4492
|
+
else 90
|
|
4493
|
+
end;
|
|
4494
|
+
|
|
4495
|
+
.signals | map(
|
|
4496
|
+
(to_epoch(.created_at)) as $created_epoch |
|
|
4497
|
+
(if $created_epoch != null then ($now - $created_epoch) / 86400 else 0 end) as $elapsed_days |
|
|
4498
|
+
(decay_days(.type)) as $dd |
|
|
4499
|
+
((.strength // 0.8) * (1 - ($elapsed_days / $dd))) as $eff_raw |
|
|
4500
|
+
(if $eff_raw < 0 then 0 else $eff_raw end) as $eff |
|
|
4501
|
+
(to_epoch(.expires_at)) as $exp_epoch |
|
|
4502
|
+
($exp_epoch != null and $exp_epoch <= $now) as $expired |
|
|
4503
|
+
($eff < 0.1 or $expired) as $deactivate |
|
|
4504
|
+
. + {
|
|
4505
|
+
effective_strength: (($eff * 100 | round) / 100),
|
|
4506
|
+
active: (if $deactivate then false else (.active // true) end)
|
|
4507
|
+
}
|
|
4508
|
+
) |
|
|
4509
|
+
map(select(.active == true))
|
|
4510
|
+
' "$pp_pher_file" 2>/dev/null || echo "[]")
|
|
4511
|
+
fi
|
|
4512
|
+
|
|
4513
|
+
if [[ -z "$pp_signals" || "$pp_signals" == "null" ]]; then
|
|
4514
|
+
pp_signals="[]"
|
|
4515
|
+
fi
|
|
4516
|
+
|
|
4517
|
+
# Read instincts (confidence >= 0.5, not disproven, max 5)
|
|
4518
|
+
pp_instincts="[]"
|
|
4519
|
+
if [[ -f "$pp_state_file" ]]; then
|
|
4520
|
+
pp_instincts=$(jq -c \
|
|
4521
|
+
'
|
|
4522
|
+
(.memory.instincts // [])
|
|
4523
|
+
| map(select(
|
|
4524
|
+
(.confidence // 0) >= 0.5
|
|
4525
|
+
and (.status // "hypothesis") != "disproven"
|
|
4526
|
+
))
|
|
4527
|
+
| sort_by(-.confidence)
|
|
4528
|
+
| .[:5]
|
|
4529
|
+
' "$pp_state_file" 2>/dev/null || echo "[]")
|
|
4530
|
+
fi
|
|
4531
|
+
|
|
4532
|
+
if [[ -z "$pp_instincts" || "$pp_instincts" == "null" ]]; then
|
|
4533
|
+
pp_instincts="[]"
|
|
4534
|
+
fi
|
|
4535
|
+
|
|
4536
|
+
pp_signal_count=$(echo "$pp_signals" | jq 'length' 2>/dev/null || echo "0")
|
|
4537
|
+
pp_instinct_count=$(echo "$pp_instincts" | jq 'length' 2>/dev/null || echo "0")
|
|
4538
|
+
|
|
4539
|
+
# Build prompt section
|
|
4540
|
+
if [[ "$pp_signal_count" -eq 0 && "$pp_instinct_count" -eq 0 ]]; then
|
|
4541
|
+
pp_section=""
|
|
4542
|
+
pp_log_line="Primed: 0 signals, 0 instincts"
|
|
4543
|
+
else
|
|
4544
|
+
pp_section="--- ACTIVE SIGNALS (Colony Guidance) ---"$'\n'
|
|
4545
|
+
|
|
4546
|
+
# FOCUS signals
|
|
4547
|
+
pp_focus=$(echo "$pp_signals" | jq -r 'map(select(.type == "FOCUS")) | .[] | "[" + ((.effective_strength * 10 | round) / 10 | tostring) + "] " + (.content.text // (if (.content | type) == "string" then .content else "" end))' 2>/dev/null || echo "")
|
|
4548
|
+
if [[ -n "$pp_focus" ]]; then
|
|
4549
|
+
pp_section+=$'\n'"FOCUS (Pay attention to):"$'\n'"$pp_focus"$'\n'
|
|
4550
|
+
fi
|
|
4551
|
+
|
|
4552
|
+
# REDIRECT signals
|
|
4553
|
+
pp_redirect=$(echo "$pp_signals" | jq -r 'map(select(.type == "REDIRECT")) | .[] | "[" + ((.effective_strength * 10 | round) / 10 | tostring) + "] " + (.content.text // (if (.content | type) == "string" then .content else "" end))' 2>/dev/null || echo "")
|
|
4554
|
+
if [[ -n "$pp_redirect" ]]; then
|
|
4555
|
+
pp_section+=$'\n'"REDIRECT (HARD CONSTRAINTS - MUST follow):"$'\n'"$pp_redirect"$'\n'
|
|
4556
|
+
fi
|
|
4557
|
+
|
|
4558
|
+
# FEEDBACK signals
|
|
4559
|
+
pp_feedback=$(echo "$pp_signals" | jq -r 'map(select(.type == "FEEDBACK")) | .[] | "[" + ((.effective_strength * 10 | round) / 10 | tostring) + "] " + (.content.text // (if (.content | type) == "string" then .content else "" end))' 2>/dev/null || echo "")
|
|
4560
|
+
if [[ -n "$pp_feedback" ]]; then
|
|
4561
|
+
pp_section+=$'\n'"FEEDBACK (Flexible guidance):"$'\n'"$pp_feedback"$'\n'
|
|
4562
|
+
fi
|
|
4563
|
+
|
|
4564
|
+
# Instincts section
|
|
4565
|
+
if [[ "$pp_instinct_count" -gt 0 ]]; then
|
|
4566
|
+
pp_section+=$'\n'"--- INSTINCTS (Learned Behaviors) ---"$'\n'
|
|
4567
|
+
pp_section+="Weight by confidence - higher = stronger guidance:"$'\n'
|
|
4568
|
+
pp_instinct_lines=$(echo "$pp_instincts" | jq -r '.[] | "[" + ((.confidence * 10 | round) / 10 | tostring) + "] When " + .trigger + " -> " + .action + " (" + (.domain // "general") + ")"' 2>/dev/null || echo "")
|
|
4569
|
+
if [[ -n "$pp_instinct_lines" ]]; then
|
|
4570
|
+
pp_section+=$'\n'"$pp_instinct_lines"$'\n'
|
|
4571
|
+
fi
|
|
4572
|
+
fi
|
|
4573
|
+
|
|
4574
|
+
pp_section+=$'\n'"--- END COLONY CONTEXT ---"
|
|
4575
|
+
|
|
4576
|
+
pp_log_line="Primed: ${pp_signal_count} signals, ${pp_instinct_count} instincts"
|
|
3674
4577
|
fi
|
|
4578
|
+
|
|
4579
|
+
# Escape section for JSON embedding (use printf to avoid appending extra newline)
|
|
4580
|
+
pp_section_json=$(printf '%s' "$pp_section" | jq -Rs '.' 2>/dev/null || echo '""')
|
|
4581
|
+
pp_log_json=$(printf '%s' "$pp_log_line" | jq -Rs '.' 2>/dev/null || echo '"Primed: 0 signals, 0 instincts"')
|
|
4582
|
+
|
|
4583
|
+
json_ok "{\"signal_count\":$pp_signal_count,\"instinct_count\":$pp_instinct_count,\"prompt_section\":$pp_section_json,\"log_line\":$pp_log_json}"
|
|
4584
|
+
;;
|
|
4585
|
+
|
|
4586
|
+
pheromone-expire)
|
|
4587
|
+
# Archive expired pheromone signals to midden
|
|
4588
|
+
# Usage: pheromone-expire [--phase-end-only]
|
|
4589
|
+
#
|
|
4590
|
+
# Two modes:
|
|
4591
|
+
# --phase-end-only Only expire signals where expires_at == "phase_end"
|
|
4592
|
+
# (no flag) Expire signals where expires_at is an ISO-8601 timestamp
|
|
4593
|
+
# <= now, AND signals where effective_strength < 0.1
|
|
4594
|
+
|
|
4595
|
+
phe_phase_end_only="false"
|
|
4596
|
+
while [[ $# -gt 0 ]]; do
|
|
4597
|
+
case "$1" in
|
|
4598
|
+
--phase-end-only) phe_phase_end_only="true"; shift ;;
|
|
4599
|
+
*) shift ;;
|
|
4600
|
+
esac
|
|
4601
|
+
done
|
|
4602
|
+
|
|
4603
|
+
phe_pheromones_file="$DATA_DIR/pheromones.json"
|
|
4604
|
+
phe_midden_dir="$DATA_DIR/midden"
|
|
4605
|
+
phe_midden_file="$phe_midden_dir/midden.json"
|
|
4606
|
+
|
|
4607
|
+
# Handle missing pheromones.json gracefully
|
|
4608
|
+
if [[ ! -f "$phe_pheromones_file" ]]; then
|
|
4609
|
+
json_ok '{"expired_count":0,"remaining_active":0,"midden_total":0}'
|
|
4610
|
+
exit 0
|
|
4611
|
+
fi
|
|
4612
|
+
|
|
4613
|
+
# Ensure midden directory and file exist
|
|
4614
|
+
mkdir -p "$phe_midden_dir"
|
|
4615
|
+
if [[ ! -f "$phe_midden_file" ]]; then
|
|
4616
|
+
printf '%s\n' '{"version":"1.0.0","archived_at_count":0,"signals":[]}' > "$phe_midden_file"
|
|
4617
|
+
fi
|
|
4618
|
+
|
|
4619
|
+
phe_now_epoch=$(date +%s)
|
|
4620
|
+
phe_archived_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
4621
|
+
|
|
4622
|
+
# Compute pause_duration from COLONY_STATE.json (pause-aware TTL)
|
|
4623
|
+
phe_pause_duration=0
|
|
4624
|
+
if [[ -f "$DATA_DIR/COLONY_STATE.json" ]]; then
|
|
4625
|
+
phe_paused_at=$(jq -r '.paused_at // empty' "$DATA_DIR/COLONY_STATE.json" 2>/dev/null || true)
|
|
4626
|
+
phe_resumed_at=$(jq -r '.resumed_at // empty' "$DATA_DIR/COLONY_STATE.json" 2>/dev/null || true)
|
|
4627
|
+
if [[ -n "$phe_paused_at" && -n "$phe_resumed_at" ]]; then
|
|
4628
|
+
phe_paused_epoch=$(date -j -f "%Y-%m-%dT%H:%M:%SZ" "$phe_paused_at" +%s 2>/dev/null || date -d "$phe_paused_at" +%s 2>/dev/null || echo 0)
|
|
4629
|
+
phe_resumed_epoch=$(date -j -f "%Y-%m-%dT%H:%M:%SZ" "$phe_resumed_at" +%s 2>/dev/null || date -d "$phe_resumed_at" +%s 2>/dev/null || echo 0)
|
|
4630
|
+
if [[ "$phe_resumed_epoch" -gt "$phe_paused_epoch" ]]; then
|
|
4631
|
+
phe_pause_duration=$(( phe_resumed_epoch - phe_paused_epoch ))
|
|
4632
|
+
fi
|
|
4633
|
+
fi
|
|
4634
|
+
fi
|
|
4635
|
+
|
|
4636
|
+
# Identify expired signal IDs
|
|
4637
|
+
# We'll use jq to find signals to expire, then update in bash
|
|
4638
|
+
if [[ "$phe_phase_end_only" == "true" ]]; then
|
|
4639
|
+
# Only expire signals where expires_at == "phase_end"
|
|
4640
|
+
phe_expired_ids=$(jq -r '.signals[] | select(.active == true and .expires_at == "phase_end") | .id' "$phe_pheromones_file" 2>/dev/null || true)
|
|
4641
|
+
else
|
|
4642
|
+
# Expire time-based expired signals (pause-aware) AND decay-expired signals
|
|
4643
|
+
phe_expired_ids=$(jq -r --argjson now "$phe_now_epoch" --argjson pause_secs "$phe_pause_duration" '
|
|
4644
|
+
.signals[] |
|
|
4645
|
+
select(.active == true) |
|
|
4646
|
+
select(
|
|
4647
|
+
(.expires_at != "phase_end" and .expires_at != null and .expires_at != "") and
|
|
4648
|
+
(
|
|
4649
|
+
# ISO-8601 timestamp expiry (pause-aware: add pause_duration to expires_at before comparing)
|
|
4650
|
+
(
|
|
4651
|
+
.expires_at |
|
|
4652
|
+
# Convert ISO-8601 to approximate epoch via string parsing
|
|
4653
|
+
(
|
|
4654
|
+
(split("T")[0] | split("-")) as $d |
|
|
4655
|
+
(split("T")[1] | split(":")) as $t |
|
|
4656
|
+
($d[0] | tonumber) as $y |
|
|
4657
|
+
($d[1] | tonumber) as $mo |
|
|
4658
|
+
($d[2] | tonumber) as $day |
|
|
4659
|
+
($t[0] | tonumber) as $h |
|
|
4660
|
+
($t[1] | tonumber) as $m |
|
|
4661
|
+
(($t[2] // "0") | gsub("[^0-9]";"") | if . == "" then 0 else tonumber end) as $s |
|
|
4662
|
+
# Rough epoch: years*365.25*86400 + months*30.44*86400 + day*86400 + time
|
|
4663
|
+
(($y - 1970) * 31557600) + (($mo - 1) * 2629800) + (($day - 1) * 86400) + ($h * 3600) + ($m * 60) + $s
|
|
4664
|
+
)
|
|
4665
|
+
) + $pause_secs <= $now
|
|
4666
|
+
)
|
|
4667
|
+
) |
|
|
4668
|
+
.id
|
|
4669
|
+
' "$phe_pheromones_file" 2>/dev/null || true)
|
|
4670
|
+
fi
|
|
4671
|
+
|
|
4672
|
+
# Count expired signals
|
|
4673
|
+
phe_expired_count=0
|
|
4674
|
+
if [[ -n "$phe_expired_ids" ]]; then
|
|
4675
|
+
phe_expired_count=$(echo "$phe_expired_ids" | grep -c . 2>/dev/null || echo 0)
|
|
4676
|
+
fi
|
|
4677
|
+
|
|
4678
|
+
# If nothing to expire, return counts
|
|
4679
|
+
if [[ "$phe_expired_count" -eq 0 ]]; then
|
|
4680
|
+
phe_remaining=$(jq '[.signals[] | select(.active == true)] | length' "$phe_pheromones_file" 2>/dev/null || echo 0)
|
|
4681
|
+
phe_midden_total=$(jq '.signals | length' "$phe_midden_file" 2>/dev/null || echo 0)
|
|
4682
|
+
json_ok "{\"expired_count\":0,\"remaining_active\":$phe_remaining,\"midden_total\":$phe_midden_total}"
|
|
4683
|
+
exit 0
|
|
4684
|
+
fi
|
|
4685
|
+
|
|
4686
|
+
# Build jq args for IDs to expire
|
|
4687
|
+
phe_id_array=$(echo "$phe_expired_ids" | jq -R . | jq -s . 2>/dev/null || echo '[]')
|
|
4688
|
+
|
|
4689
|
+
# Extract expired signal objects (with archived_at added)
|
|
4690
|
+
phe_expired_objects=$(jq --argjson ids "$phe_id_array" --arg archived_at "$phe_archived_at" '
|
|
4691
|
+
[.signals[] | select(.id as $id | $ids | any(. == $id)) | . + {"archived_at": $archived_at, "active": false}]
|
|
4692
|
+
' "$phe_pheromones_file" 2>/dev/null || echo '[]')
|
|
4693
|
+
|
|
4694
|
+
# Update pheromones.json: set active=false for expired signals (do NOT remove them)
|
|
4695
|
+
phe_updated_pheromones=$(jq --argjson ids "$phe_id_array" '
|
|
4696
|
+
.signals = [.signals[] | if (.id as $id | $ids | any(. == $id)) then .active = false else . end]
|
|
4697
|
+
' "$phe_pheromones_file" 2>/dev/null)
|
|
4698
|
+
|
|
4699
|
+
if [[ -n "$phe_updated_pheromones" ]]; then
|
|
4700
|
+
printf '%s\n' "$phe_updated_pheromones" > "$phe_pheromones_file"
|
|
4701
|
+
fi
|
|
4702
|
+
|
|
4703
|
+
# Append expired signals to midden.json
|
|
4704
|
+
phe_midden_updated=$(jq --argjson new_signals "$phe_expired_objects" '
|
|
4705
|
+
.signals += $new_signals |
|
|
4706
|
+
.archived_at_count = (.signals | length)
|
|
4707
|
+
' "$phe_midden_file" 2>/dev/null)
|
|
4708
|
+
|
|
4709
|
+
if [[ -n "$phe_midden_updated" ]]; then
|
|
4710
|
+
printf '%s\n' "$phe_midden_updated" > "$phe_midden_file"
|
|
4711
|
+
fi
|
|
4712
|
+
|
|
4713
|
+
phe_remaining_active=$(jq '[.signals[] | select(.active == true)] | length' "$phe_pheromones_file" 2>/dev/null || echo 0)
|
|
4714
|
+
phe_midden_total=$(jq '.signals | length' "$phe_midden_file" 2>/dev/null || echo 0)
|
|
4715
|
+
|
|
4716
|
+
json_ok "{\"expired_count\":$phe_expired_count,\"remaining_active\":$phe_remaining_active,\"midden_total\":$phe_midden_total}"
|
|
4717
|
+
;;
|
|
4718
|
+
|
|
4719
|
+
eternal-init)
|
|
4720
|
+
# Initialize the ~/.aether/eternal/ directory and memory.json schema
|
|
4721
|
+
# Usage: eternal-init
|
|
4722
|
+
# Idempotent: safe to call multiple times
|
|
4723
|
+
|
|
4724
|
+
ei_eternal_dir="$HOME/.aether/eternal"
|
|
4725
|
+
ei_memory_file="$ei_eternal_dir/memory.json"
|
|
4726
|
+
ei_already_existed="false"
|
|
4727
|
+
|
|
4728
|
+
mkdir -p "$ei_eternal_dir"
|
|
4729
|
+
|
|
4730
|
+
if [[ -f "$ei_memory_file" ]]; then
|
|
4731
|
+
ei_already_existed="true"
|
|
4732
|
+
else
|
|
4733
|
+
ei_created_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
4734
|
+
printf '%s\n' "{
|
|
4735
|
+
\"version\": \"1.0.0\",
|
|
4736
|
+
\"created_at\": \"$ei_created_at\",
|
|
4737
|
+
\"colonies\": [],
|
|
4738
|
+
\"high_value_signals\": [],
|
|
4739
|
+
\"cross_session_patterns\": []
|
|
4740
|
+
}" > "$ei_memory_file"
|
|
4741
|
+
fi
|
|
4742
|
+
|
|
4743
|
+
json_ok "{\"dir\":\"$ei_eternal_dir\",\"initialized\":true,\"already_existed\":$ei_already_existed}"
|
|
4744
|
+
;;
|
|
4745
|
+
|
|
4746
|
+
# ============================================================================
|
|
4747
|
+
# XML Exchange Commands
|
|
4748
|
+
# ============================================================================
|
|
4749
|
+
|
|
4750
|
+
pheromone-export-xml)
|
|
4751
|
+
# Export pheromones.json to XML format
|
|
4752
|
+
# Usage: pheromone-export-xml [output_file]
|
|
4753
|
+
# Default output: .aether/exchange/pheromones.xml
|
|
4754
|
+
|
|
4755
|
+
pex_output="${1:-$SCRIPT_DIR/exchange/pheromones.xml}"
|
|
4756
|
+
pex_pheromones="$DATA_DIR/pheromones.json"
|
|
4757
|
+
|
|
4758
|
+
# Graceful degradation: check for xmllint
|
|
4759
|
+
if ! command -v xmllint >/dev/null 2>&1; then
|
|
4760
|
+
json_err "$E_FEATURE_UNAVAILABLE" "xmllint is not installed. Try: xcode-select --install on macOS."
|
|
4761
|
+
fi
|
|
4762
|
+
|
|
4763
|
+
# Check pheromones.json exists
|
|
4764
|
+
if [[ ! -f "$pex_pheromones" ]]; then
|
|
4765
|
+
json_err "$E_FILE_NOT_FOUND" "Couldn't find pheromones.json. Try: run /ant:init first."
|
|
4766
|
+
fi
|
|
4767
|
+
|
|
4768
|
+
# Ensure output directory exists
|
|
4769
|
+
mkdir -p "$(dirname "$pex_output")"
|
|
4770
|
+
|
|
4771
|
+
# Source the exchange script
|
|
4772
|
+
source "$SCRIPT_DIR/exchange/pheromone-xml.sh"
|
|
4773
|
+
|
|
4774
|
+
# Call the export function
|
|
4775
|
+
xml-pheromone-export "$pex_pheromones" "$pex_output"
|
|
4776
|
+
;;
|
|
4777
|
+
|
|
4778
|
+
pheromone-import-xml)
|
|
4779
|
+
# Import pheromone signals from XML into pheromones.json
|
|
4780
|
+
# Usage: pheromone-import-xml <xml_file> [colony_prefix]
|
|
4781
|
+
# When colony_prefix is provided, imported signal IDs are tagged with "${prefix}:" before merge
|
|
4782
|
+
|
|
4783
|
+
pix_xml="${1:-}"
|
|
4784
|
+
pix_colony_prefix="${2:-}"
|
|
4785
|
+
pix_pheromones="$DATA_DIR/pheromones.json"
|
|
4786
|
+
|
|
4787
|
+
if [[ -z "$pix_xml" ]]; then
|
|
4788
|
+
json_err "$E_VALIDATION_FAILED" "Missing XML file argument. Try: pheromone-import-xml <xml_file> [colony_prefix]."
|
|
4789
|
+
fi
|
|
4790
|
+
|
|
4791
|
+
if [[ ! -f "$pix_xml" ]]; then
|
|
4792
|
+
json_err "$E_FILE_NOT_FOUND" "XML file not found: $pix_xml. Try: check the file path."
|
|
4793
|
+
fi
|
|
4794
|
+
|
|
4795
|
+
# Graceful degradation: check for xmllint
|
|
4796
|
+
if ! command -v xmllint >/dev/null 2>&1; then
|
|
4797
|
+
json_err "$E_FEATURE_UNAVAILABLE" "xmllint is not installed. Try: xcode-select --install on macOS."
|
|
4798
|
+
fi
|
|
4799
|
+
|
|
4800
|
+
# Source the exchange script
|
|
4801
|
+
source "$SCRIPT_DIR/exchange/pheromone-xml.sh"
|
|
4802
|
+
|
|
4803
|
+
# Import XML to get JSON signals
|
|
4804
|
+
pix_imported=$(xml-pheromone-import "$pix_xml")
|
|
4805
|
+
|
|
4806
|
+
# Extract actual signal array from result.json | fromjson | .signals
|
|
4807
|
+
# (result.signals is an integer count — must unpack result.json to get the array)
|
|
4808
|
+
pix_raw_signals=$(echo "$pix_imported" | jq -r '.result.json // "{}"' | jq -c '.signals // []' 2>/dev/null || echo '[]')
|
|
4809
|
+
|
|
4810
|
+
# Apply colony prefix to imported signal IDs (when provided)
|
|
4811
|
+
# This prevents ID collisions and tags signals with their source colony
|
|
4812
|
+
if [[ -n "$pix_colony_prefix" ]]; then
|
|
4813
|
+
pix_prefixed_signals=$(echo "$pix_raw_signals" | jq --arg prefix "$pix_colony_prefix" '[.[] | .id = ($prefix + ":" + .id)]' 2>/dev/null || echo '[]')
|
|
4814
|
+
else
|
|
4815
|
+
pix_prefixed_signals="$pix_raw_signals"
|
|
4816
|
+
fi
|
|
4817
|
+
|
|
4818
|
+
# If pheromones.json exists, merge; otherwise create
|
|
4819
|
+
if [[ -f "$pix_pheromones" ]]; then
|
|
4820
|
+
# Merge: imported signals first, existing signals last
|
|
4821
|
+
# map(last) keeps current colony's version on ID collision — current colony always wins
|
|
4822
|
+
pix_merged=$(jq -s --argjson new_signals "$pix_prefixed_signals" '
|
|
4823
|
+
.[0] as $existing |
|
|
4824
|
+
{
|
|
4825
|
+
signals: ([$new_signals[], $existing.signals[]] | group_by(.id) | map(last)),
|
|
4826
|
+
version: $existing.version,
|
|
4827
|
+
colony_id: $existing.colony_id
|
|
4828
|
+
}
|
|
4829
|
+
' "$pix_pheromones" 2>/dev/null)
|
|
4830
|
+
|
|
4831
|
+
if [[ -n "$pix_merged" ]]; then
|
|
4832
|
+
printf '%s\n' "$pix_merged" > "$pix_pheromones"
|
|
4833
|
+
fi
|
|
4834
|
+
fi
|
|
4835
|
+
|
|
4836
|
+
pix_count=$(echo "$pix_raw_signals" | jq 'length' 2>/dev/null || echo 0)
|
|
4837
|
+
json_ok "{\"imported\":true,\"signal_count\":$pix_count,\"source\":\"$pix_xml\"}"
|
|
4838
|
+
;;
|
|
4839
|
+
|
|
4840
|
+
pheromone-validate-xml)
|
|
4841
|
+
# Validate pheromone XML against XSD schema
|
|
4842
|
+
# Usage: pheromone-validate-xml <xml_file>
|
|
4843
|
+
|
|
4844
|
+
pvx_xml="${1:-}"
|
|
4845
|
+
pvx_xsd="$SCRIPT_DIR/schemas/pheromone.xsd"
|
|
4846
|
+
|
|
4847
|
+
if [[ -z "$pvx_xml" ]]; then
|
|
4848
|
+
json_err "$E_VALIDATION_FAILED" "Missing XML file argument. Try: pheromone-validate-xml <xml_file>."
|
|
4849
|
+
fi
|
|
4850
|
+
|
|
4851
|
+
if [[ ! -f "$pvx_xml" ]]; then
|
|
4852
|
+
json_err "$E_FILE_NOT_FOUND" "XML file not found: $pvx_xml. Try: check the file path."
|
|
4853
|
+
fi
|
|
4854
|
+
|
|
4855
|
+
# Graceful degradation: check for xmllint
|
|
4856
|
+
if ! command -v xmllint >/dev/null 2>&1; then
|
|
4857
|
+
json_err "$E_FEATURE_UNAVAILABLE" "xmllint is not installed. Try: xcode-select --install on macOS."
|
|
4858
|
+
fi
|
|
4859
|
+
|
|
4860
|
+
# Source the exchange script
|
|
4861
|
+
source "$SCRIPT_DIR/exchange/pheromone-xml.sh"
|
|
4862
|
+
|
|
4863
|
+
# Call validate function
|
|
4864
|
+
xml-pheromone-validate "$pvx_xml" "$pvx_xsd"
|
|
4865
|
+
;;
|
|
4866
|
+
|
|
4867
|
+
wisdom-export-xml)
|
|
4868
|
+
# Export queen wisdom to XML format
|
|
4869
|
+
# Usage: wisdom-export-xml [input_json] [output_xml]
|
|
4870
|
+
# Default input: .aether/data/queen-wisdom.json
|
|
4871
|
+
# Default output: .aether/exchange/queen-wisdom.xml
|
|
4872
|
+
|
|
4873
|
+
wex_input="${1:-$DATA_DIR/queen-wisdom.json}"
|
|
4874
|
+
wex_output="${2:-$SCRIPT_DIR/exchange/queen-wisdom.xml}"
|
|
4875
|
+
|
|
4876
|
+
# Graceful degradation: check for xmllint
|
|
4877
|
+
if ! command -v xmllint >/dev/null 2>&1; then
|
|
4878
|
+
json_err "$E_FEATURE_UNAVAILABLE" "xmllint is not installed. Try: xcode-select --install on macOS."
|
|
4879
|
+
fi
|
|
4880
|
+
|
|
4881
|
+
# Look for wisdom data: check specified file, then COLONY_STATE memory
|
|
4882
|
+
if [[ ! -f "$wex_input" ]]; then
|
|
4883
|
+
# Try to extract from COLONY_STATE.json memory field
|
|
4884
|
+
if [[ -f "$DATA_DIR/COLONY_STATE.json" ]]; then
|
|
4885
|
+
wex_memory=$(jq '.memory // {}' "$DATA_DIR/COLONY_STATE.json" 2>/dev/null || echo '{}')
|
|
4886
|
+
if [[ "$wex_memory" != "{}" && "$wex_memory" != "null" ]]; then
|
|
4887
|
+
# Create minimal wisdom JSON from colony memory
|
|
4888
|
+
wex_created_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
4889
|
+
printf '%s\n' "{
|
|
4890
|
+
\"version\": \"1.0.0\",
|
|
4891
|
+
\"metadata\": {\"created\": \"$wex_created_at\", \"colony_id\": \"$(jq -r '.goal // \"unknown\"' "$DATA_DIR/COLONY_STATE.json" 2>/dev/null)\"},
|
|
4892
|
+
\"philosophies\": [],
|
|
4893
|
+
\"patterns\": $(echo "$wex_memory" | jq '[.instincts // [] | .[] | {\"id\": (. | @base64), \"content\": ., \"confidence\": 0.7, \"domain\": \"general\", \"source\": \"colony_memory\"}]' 2>/dev/null || echo '[]')
|
|
4894
|
+
}" > "$wex_input"
|
|
4895
|
+
fi
|
|
4896
|
+
fi
|
|
4897
|
+
fi
|
|
4898
|
+
|
|
4899
|
+
# If still no wisdom data, create minimal skeleton
|
|
4900
|
+
if [[ ! -f "$wex_input" ]]; then
|
|
4901
|
+
wex_created_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
4902
|
+
mkdir -p "$(dirname "$wex_input")"
|
|
4903
|
+
printf '%s\n' "{
|
|
4904
|
+
\"version\": \"1.0.0\",
|
|
4905
|
+
\"metadata\": {\"created\": \"$wex_created_at\", \"colony_id\": \"unknown\"},
|
|
4906
|
+
\"philosophies\": [],
|
|
4907
|
+
\"patterns\": []
|
|
4908
|
+
}" > "$wex_input"
|
|
4909
|
+
fi
|
|
4910
|
+
|
|
4911
|
+
# Ensure output directory exists
|
|
4912
|
+
mkdir -p "$(dirname "$wex_output")"
|
|
4913
|
+
|
|
4914
|
+
# Source the exchange script
|
|
4915
|
+
source "$SCRIPT_DIR/exchange/wisdom-xml.sh"
|
|
4916
|
+
|
|
4917
|
+
# Call the export function
|
|
4918
|
+
xml-wisdom-export "$wex_input" "$wex_output"
|
|
4919
|
+
;;
|
|
4920
|
+
|
|
4921
|
+
wisdom-import-xml)
|
|
4922
|
+
# Import wisdom from XML into JSON format
|
|
4923
|
+
# Usage: wisdom-import-xml <xml_file> [output_json]
|
|
4924
|
+
|
|
4925
|
+
wix_xml="${1:-}"
|
|
4926
|
+
wix_output="${2:-$DATA_DIR/queen-wisdom.json}"
|
|
4927
|
+
|
|
4928
|
+
if [[ -z "$wix_xml" ]]; then
|
|
4929
|
+
json_err "$E_VALIDATION_FAILED" "Missing XML file argument. Try: wisdom-import-xml <xml_file> [output_json]."
|
|
4930
|
+
fi
|
|
4931
|
+
|
|
4932
|
+
if [[ ! -f "$wix_xml" ]]; then
|
|
4933
|
+
json_err "$E_FILE_NOT_FOUND" "XML file not found: $wix_xml. Try: check the file path."
|
|
4934
|
+
fi
|
|
4935
|
+
|
|
4936
|
+
# Graceful degradation: check for xmllint
|
|
4937
|
+
if ! command -v xmllint >/dev/null 2>&1; then
|
|
4938
|
+
json_err "$E_FEATURE_UNAVAILABLE" "xmllint is not installed. Try: xcode-select --install on macOS."
|
|
4939
|
+
fi
|
|
4940
|
+
|
|
4941
|
+
# Ensure output directory exists
|
|
4942
|
+
mkdir -p "$(dirname "$wix_output")"
|
|
4943
|
+
|
|
4944
|
+
# Source the exchange script
|
|
4945
|
+
source "$SCRIPT_DIR/exchange/wisdom-xml.sh"
|
|
4946
|
+
|
|
4947
|
+
# Call the import function
|
|
4948
|
+
xml-wisdom-import "$wix_xml" "$wix_output"
|
|
4949
|
+
;;
|
|
4950
|
+
|
|
4951
|
+
registry-export-xml)
|
|
4952
|
+
# Export colony registry to XML format
|
|
4953
|
+
# Usage: registry-export-xml [input_json] [output_xml]
|
|
4954
|
+
# Default input: .aether/data/colony-registry.json
|
|
4955
|
+
# Default output: .aether/exchange/colony-registry.xml
|
|
4956
|
+
|
|
4957
|
+
rex_input="${1:-$DATA_DIR/colony-registry.json}"
|
|
4958
|
+
rex_output="${2:-$SCRIPT_DIR/exchange/colony-registry.xml}"
|
|
4959
|
+
|
|
4960
|
+
# Graceful degradation: check for xmllint
|
|
4961
|
+
if ! command -v xmllint >/dev/null 2>&1; then
|
|
4962
|
+
json_err "$E_FEATURE_UNAVAILABLE" "xmllint is not installed. Try: xcode-select --install on macOS."
|
|
4963
|
+
fi
|
|
4964
|
+
|
|
4965
|
+
# If no registry file exists, generate from chambers
|
|
4966
|
+
if [[ ! -f "$rex_input" ]]; then
|
|
4967
|
+
rex_chambers_dir="$AETHER_ROOT/.aether/chambers"
|
|
4968
|
+
rex_generated_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
4969
|
+
rex_colonies="[]"
|
|
4970
|
+
|
|
4971
|
+
if [[ -d "$rex_chambers_dir" ]]; then
|
|
4972
|
+
# Scan chambers for manifest.json files
|
|
4973
|
+
rex_colonies=$(
|
|
4974
|
+
for manifest in "$rex_chambers_dir"/*/manifest.json; do
|
|
4975
|
+
[[ -f "$manifest" ]] || continue
|
|
4976
|
+
jq -c '{
|
|
4977
|
+
id: (.colony_id // .goal // "unknown"),
|
|
4978
|
+
name: (.goal // "Unnamed Colony"),
|
|
4979
|
+
created_at: (.created_at // "unknown"),
|
|
4980
|
+
sealed_at: (.sealed_at // null),
|
|
4981
|
+
status: (if .sealed_at then "sealed" else "active" end),
|
|
4982
|
+
chamber: input_filename
|
|
4983
|
+
}' "$manifest" 2>/dev/null || true
|
|
4984
|
+
done | jq -s '.' 2>/dev/null || echo '[]'
|
|
4985
|
+
)
|
|
4986
|
+
fi
|
|
4987
|
+
|
|
4988
|
+
mkdir -p "$(dirname "$rex_input")"
|
|
4989
|
+
printf '%s\n' "{
|
|
4990
|
+
\"version\": \"1.0.0\",
|
|
4991
|
+
\"generated_at\": \"$rex_generated_at\",
|
|
4992
|
+
\"colonies\": $rex_colonies
|
|
4993
|
+
}" > "$rex_input"
|
|
4994
|
+
fi
|
|
4995
|
+
|
|
4996
|
+
# Ensure output directory exists
|
|
4997
|
+
mkdir -p "$(dirname "$rex_output")"
|
|
4998
|
+
|
|
4999
|
+
# Source the exchange script
|
|
5000
|
+
source "$SCRIPT_DIR/exchange/registry-xml.sh"
|
|
5001
|
+
|
|
5002
|
+
# Call the export function
|
|
5003
|
+
xml-registry-export "$rex_input" "$rex_output"
|
|
5004
|
+
;;
|
|
5005
|
+
|
|
5006
|
+
registry-import-xml)
|
|
5007
|
+
# Import colony registry from XML into JSON format
|
|
5008
|
+
# Usage: registry-import-xml <xml_file> [output_json]
|
|
5009
|
+
|
|
5010
|
+
rix_xml="${1:-}"
|
|
5011
|
+
rix_output="${2:-$DATA_DIR/colony-registry.json}"
|
|
5012
|
+
|
|
5013
|
+
if [[ -z "$rix_xml" ]]; then
|
|
5014
|
+
json_err "$E_VALIDATION_FAILED" "Missing XML file argument. Try: registry-import-xml <xml_file> [output_json]."
|
|
5015
|
+
fi
|
|
5016
|
+
|
|
5017
|
+
if [[ ! -f "$rix_xml" ]]; then
|
|
5018
|
+
json_err "$E_FILE_NOT_FOUND" "XML file not found: $rix_xml. Try: check the file path."
|
|
5019
|
+
fi
|
|
5020
|
+
|
|
5021
|
+
# Graceful degradation: check for xmllint
|
|
5022
|
+
if ! command -v xmllint >/dev/null 2>&1; then
|
|
5023
|
+
json_err "$E_FEATURE_UNAVAILABLE" "xmllint is not installed. Try: xcode-select --install on macOS."
|
|
5024
|
+
fi
|
|
5025
|
+
|
|
5026
|
+
# Ensure output directory exists
|
|
5027
|
+
mkdir -p "$(dirname "$rix_output")"
|
|
5028
|
+
|
|
5029
|
+
# Source the exchange script
|
|
5030
|
+
source "$SCRIPT_DIR/exchange/registry-xml.sh"
|
|
5031
|
+
|
|
5032
|
+
# Call the import function
|
|
5033
|
+
xml-registry-import "$rix_xml" "$rix_output"
|
|
5034
|
+
;;
|
|
5035
|
+
|
|
5036
|
+
colony-archive-xml)
|
|
5037
|
+
# Export combined colony archive XML containing pheromones, wisdom, and registry
|
|
5038
|
+
# Usage: colony-archive-xml [output_file]
|
|
5039
|
+
# Default output: .aether/exchange/colony-archive.xml
|
|
5040
|
+
# Always filters to active-only pheromone signals
|
|
5041
|
+
|
|
5042
|
+
# Graceful degradation: check for xmllint
|
|
5043
|
+
if ! command -v xmllint >/dev/null 2>&1; then
|
|
5044
|
+
json_err "$E_FEATURE_UNAVAILABLE" "xmllint is not installed. Try: xcode-select --install on macOS."
|
|
5045
|
+
fi
|
|
5046
|
+
|
|
5047
|
+
cax_output="${1:-$SCRIPT_DIR/exchange/colony-archive.xml}"
|
|
5048
|
+
mkdir -p "$(dirname "$cax_output")"
|
|
5049
|
+
|
|
5050
|
+
# Step 1: Filter active-only pheromone signals to a temp file
|
|
5051
|
+
cax_tmp_pheromones=$(mktemp)
|
|
5052
|
+
if [[ -f "$DATA_DIR/pheromones.json" ]]; then
|
|
5053
|
+
jq '{
|
|
5054
|
+
version: .version,
|
|
5055
|
+
colony_id: .colony_id,
|
|
5056
|
+
generated_at: .generated_at,
|
|
5057
|
+
signals: [.signals[] | select(.active == true)]
|
|
5058
|
+
}' "$DATA_DIR/pheromones.json" > "$cax_tmp_pheromones" 2>/dev/null
|
|
5059
|
+
else
|
|
5060
|
+
printf '%s\n' '{"version":"1.0","colony_id":"unknown","generated_at":"","signals":[]}' > "$cax_tmp_pheromones"
|
|
5061
|
+
fi
|
|
5062
|
+
|
|
5063
|
+
# Step 2: Export each section to temp XML files
|
|
5064
|
+
cax_tmp_dir=$(mktemp -d)
|
|
5065
|
+
|
|
5066
|
+
# Pheromone section (using filtered active-only)
|
|
5067
|
+
source "$SCRIPT_DIR/exchange/pheromone-xml.sh"
|
|
5068
|
+
xml-pheromone-export "$cax_tmp_pheromones" "$cax_tmp_dir/pheromones.xml" 2>/dev/null || true
|
|
5069
|
+
|
|
5070
|
+
# Wisdom section — reuse wisdom-export-xml fallback logic
|
|
5071
|
+
source "$SCRIPT_DIR/exchange/wisdom-xml.sh"
|
|
5072
|
+
cax_wisdom_input="$DATA_DIR/queen-wisdom.json"
|
|
5073
|
+
if [[ ! -f "$cax_wisdom_input" ]]; then
|
|
5074
|
+
# Try extracting from COLONY_STATE.json memory field
|
|
5075
|
+
if [[ -f "$DATA_DIR/COLONY_STATE.json" ]]; then
|
|
5076
|
+
cax_wex_memory=$(jq '.memory // {}' "$DATA_DIR/COLONY_STATE.json" 2>/dev/null || echo '{}')
|
|
5077
|
+
if [[ "$cax_wex_memory" != "{}" && "$cax_wex_memory" != "null" ]]; then
|
|
5078
|
+
cax_wex_created_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
5079
|
+
cax_wisdom_input="$cax_tmp_dir/wisdom-input.json"
|
|
5080
|
+
printf '%s\n' "{
|
|
5081
|
+
\"version\": \"1.0.0\",
|
|
5082
|
+
\"metadata\": {\"created\": \"$cax_wex_created_at\", \"colony_id\": \"$(jq -r '.goal // \"unknown\"' "$DATA_DIR/COLONY_STATE.json" 2>/dev/null)\"},
|
|
5083
|
+
\"philosophies\": [],
|
|
5084
|
+
\"patterns\": $(echo "$cax_wex_memory" | jq '[.instincts // [] | .[] | {"id": (. | @base64), "content": ., "confidence": 0.7, "domain": "general", "source": "colony_memory"}]' 2>/dev/null || echo '[]')
|
|
5085
|
+
}" > "$cax_wisdom_input"
|
|
5086
|
+
fi
|
|
5087
|
+
fi
|
|
5088
|
+
fi
|
|
5089
|
+
if [[ -f "$cax_wisdom_input" ]]; then
|
|
5090
|
+
xml-wisdom-export "$cax_wisdom_input" "$cax_tmp_dir/wisdom.xml" 2>/dev/null || true
|
|
5091
|
+
fi
|
|
5092
|
+
|
|
5093
|
+
# Registry section — reuse registry-export-xml on-demand generation logic
|
|
5094
|
+
source "$SCRIPT_DIR/exchange/registry-xml.sh"
|
|
5095
|
+
cax_registry_input="$DATA_DIR/colony-registry.json"
|
|
5096
|
+
if [[ ! -f "$cax_registry_input" ]]; then
|
|
5097
|
+
cax_rex_chambers_dir="$AETHER_ROOT/.aether/chambers"
|
|
5098
|
+
cax_rex_generated_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
5099
|
+
cax_rex_colonies="[]"
|
|
5100
|
+
if [[ -d "$cax_rex_chambers_dir" ]]; then
|
|
5101
|
+
cax_rex_colonies=$(
|
|
5102
|
+
for manifest in "$cax_rex_chambers_dir"/*/manifest.json; do
|
|
5103
|
+
[[ -f "$manifest" ]] || continue
|
|
5104
|
+
jq -c '{
|
|
5105
|
+
id: (.colony_id // .goal // "unknown"),
|
|
5106
|
+
name: (.goal // "Unnamed Colony"),
|
|
5107
|
+
created_at: (.created_at // "unknown"),
|
|
5108
|
+
sealed_at: (.sealed_at // null),
|
|
5109
|
+
status: (if .sealed_at then "sealed" else "active" end),
|
|
5110
|
+
chamber: input_filename
|
|
5111
|
+
}' "$manifest" 2>/dev/null || true
|
|
5112
|
+
done | jq -s '.' 2>/dev/null || echo '[]'
|
|
5113
|
+
)
|
|
5114
|
+
fi
|
|
5115
|
+
cax_registry_input="$cax_tmp_dir/registry-input.json"
|
|
5116
|
+
printf '%s\n' "{
|
|
5117
|
+
\"version\": \"1.0.0\",
|
|
5118
|
+
\"generated_at\": \"$cax_rex_generated_at\",
|
|
5119
|
+
\"colonies\": $cax_rex_colonies
|
|
5120
|
+
}" > "$cax_registry_input"
|
|
5121
|
+
fi
|
|
5122
|
+
xml-registry-export "$cax_registry_input" "$cax_tmp_dir/registry.xml" 2>/dev/null || true
|
|
5123
|
+
|
|
5124
|
+
# Step 3: Build combined XML
|
|
5125
|
+
cax_colony_id=$(jq -r '.goal // "unknown"' "$DATA_DIR/COLONY_STATE.json" 2>/dev/null | tr '[:upper:]' '[:lower:]' | tr -cs '[:alnum:]' '-' | sed 's/^-//;s/-$//')
|
|
5126
|
+
[[ -z "$cax_colony_id" || "$cax_colony_id" == "unknown" ]] && cax_colony_id="unknown"
|
|
5127
|
+
cax_sealed_at=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
|
|
5128
|
+
cax_pheromone_count=$(jq '.signals | length' "$cax_tmp_pheromones" 2>/dev/null || echo 0)
|
|
5129
|
+
|
|
5130
|
+
{
|
|
5131
|
+
printf '<?xml version="1.0" encoding="UTF-8"?>\n'
|
|
5132
|
+
printf '<colony-archive\n'
|
|
5133
|
+
printf ' xmlns="http://aether.colony/schemas/archive/1.0"\n'
|
|
5134
|
+
printf ' colony_id="%s"\n' "$cax_colony_id"
|
|
5135
|
+
printf ' sealed_at="%s"\n' "$cax_sealed_at"
|
|
5136
|
+
printf ' version="1.0.0"\n'
|
|
5137
|
+
printf ' pheromone_count="%s">\n' "$cax_pheromone_count"
|
|
5138
|
+
|
|
5139
|
+
# Append pheromone section (strip XML declaration)
|
|
5140
|
+
if [[ -f "$cax_tmp_dir/pheromones.xml" ]]; then
|
|
5141
|
+
sed '1{/^<?xml/d;}' "$cax_tmp_dir/pheromones.xml"
|
|
5142
|
+
fi
|
|
5143
|
+
|
|
5144
|
+
# Append wisdom section (strip XML declaration)
|
|
5145
|
+
if [[ -f "$cax_tmp_dir/wisdom.xml" ]]; then
|
|
5146
|
+
sed '1{/^<?xml/d;}' "$cax_tmp_dir/wisdom.xml"
|
|
5147
|
+
fi
|
|
5148
|
+
|
|
5149
|
+
# Append registry section (strip XML declaration)
|
|
5150
|
+
if [[ -f "$cax_tmp_dir/registry.xml" ]]; then
|
|
5151
|
+
sed '1{/^<?xml/d;}' "$cax_tmp_dir/registry.xml"
|
|
5152
|
+
fi
|
|
5153
|
+
|
|
5154
|
+
printf '</colony-archive>\n'
|
|
5155
|
+
} > "$cax_output"
|
|
5156
|
+
|
|
5157
|
+
# Step 4: Validate well-formedness
|
|
5158
|
+
if xmllint --noout "$cax_output" 2>/dev/null; then
|
|
5159
|
+
cax_valid=true
|
|
5160
|
+
else
|
|
5161
|
+
cax_valid=false
|
|
5162
|
+
fi
|
|
5163
|
+
|
|
5164
|
+
# Step 5: Cleanup temp files
|
|
5165
|
+
rm -rf "$cax_tmp_dir" "$cax_tmp_pheromones"
|
|
5166
|
+
|
|
5167
|
+
json_ok "{\"path\":\"$cax_output\",\"valid\":$cax_valid,\"colony_id\":\"$cax_colony_id\",\"pheromone_count\":$cax_pheromone_count}"
|
|
3675
5168
|
;;
|
|
3676
5169
|
|
|
3677
5170
|
# ============================================================================
|
|
@@ -3684,7 +5177,24 @@ ${entry}" "$queen_file" > "$tmp_file"
|
|
|
3684
5177
|
session_id="${2:-$(date +%s)_$(openssl rand -hex 4 2>/dev/null || echo $$)}"
|
|
3685
5178
|
goal="${3:-}"
|
|
3686
5179
|
|
|
5180
|
+
# ARCH-03: Rotate spawn-tree.txt at session start to prevent unbounded growth.
|
|
5181
|
+
# Archives previous session's tree to a timestamped file; caps archive count at 5.
|
|
5182
|
+
_rotate_spawn_tree() {
|
|
5183
|
+
local tree_file="$DATA_DIR/spawn-tree.txt"
|
|
5184
|
+
[[ -f "$tree_file" ]] && [[ -s "$tree_file" ]] || return 0
|
|
5185
|
+
mkdir -p "$DATA_DIR/spawn-tree-archive"
|
|
5186
|
+
local archive_ts
|
|
5187
|
+
archive_ts=$(date +%Y%m%d_%H%M%S)
|
|
5188
|
+
cp "$tree_file" "$DATA_DIR/spawn-tree-archive/spawn-tree.${archive_ts}.txt" 2>/dev/null || true
|
|
5189
|
+
> "$tree_file" # Truncate in-place — preserves file handle for tail -f watchers
|
|
5190
|
+
# Keep only 5 archives
|
|
5191
|
+
ls -t "$DATA_DIR/spawn-tree-archive"/spawn-tree.*.txt 2>/dev/null \
|
|
5192
|
+
| tail -n +6 | xargs rm -f 2>/dev/null || true
|
|
5193
|
+
}
|
|
5194
|
+
_rotate_spawn_tree
|
|
5195
|
+
|
|
3687
5196
|
session_file="$DATA_DIR/session.json"
|
|
5197
|
+
baseline=$(git rev-parse HEAD 2>/dev/null || echo "")
|
|
3688
5198
|
|
|
3689
5199
|
cat > "$session_file" << EOF
|
|
3690
5200
|
{
|
|
@@ -3697,6 +5207,7 @@ ${entry}" "$queen_file" > "$tmp_file"
|
|
|
3697
5207
|
"current_milestone": "First Mound",
|
|
3698
5208
|
"suggested_next": "/ant:plan",
|
|
3699
5209
|
"context_cleared": false,
|
|
5210
|
+
"baseline_commit": "$baseline",
|
|
3700
5211
|
"resumed_at": null,
|
|
3701
5212
|
"active_todos": [],
|
|
3702
5213
|
"summary": "Session initialized"
|
|
@@ -3740,6 +5251,9 @@ EOF
|
|
|
3740
5251
|
current_milestone=$(jq -r '.milestone // "First Mound"' "$DATA_DIR/COLONY_STATE.json" 2>/dev/null || echo "$current_milestone")
|
|
3741
5252
|
fi
|
|
3742
5253
|
|
|
5254
|
+
# Capture current git HEAD for drift detection
|
|
5255
|
+
baseline=$(git rev-parse HEAD 2>/dev/null || echo "")
|
|
5256
|
+
|
|
3743
5257
|
# Build updated session
|
|
3744
5258
|
echo "$current_session" | jq --arg cmd "$cmd_run" \
|
|
3745
5259
|
--arg ts "$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \
|
|
@@ -3749,6 +5263,7 @@ EOF
|
|
|
3749
5263
|
--argjson phase "$current_phase" \
|
|
3750
5264
|
--arg milestone "$current_milestone" \
|
|
3751
5265
|
--argjson todos "$todos" \
|
|
5266
|
+
--arg baseline "$baseline" \
|
|
3752
5267
|
'.last_command = $cmd |
|
|
3753
5268
|
.last_command_at = $ts |
|
|
3754
5269
|
.suggested_next = $suggested |
|
|
@@ -3756,7 +5271,8 @@ EOF
|
|
|
3756
5271
|
.colony_goal = $goal |
|
|
3757
5272
|
.current_phase = $phase |
|
|
3758
5273
|
.current_milestone = $milestone |
|
|
3759
|
-
.active_todos = $todos
|
|
5274
|
+
.active_todos = $todos |
|
|
5275
|
+
.baseline_commit = $baseline' > "$session_file"
|
|
3760
5276
|
|
|
3761
5277
|
json_ok "{\"updated\":true,\"command\":\"$cmd_run\"}"
|
|
3762
5278
|
;;
|
|
@@ -3790,18 +5306,18 @@ EOF
|
|
|
3790
5306
|
;;
|
|
3791
5307
|
|
|
3792
5308
|
session-is-stale)
|
|
3793
|
-
# Check if session is stale (returns
|
|
5309
|
+
# Check if session is stale (returns JSON with is_stale boolean)
|
|
3794
5310
|
session_file="$DATA_DIR/session.json"
|
|
3795
5311
|
|
|
3796
5312
|
if [[ ! -f "$session_file" ]]; then
|
|
3797
|
-
|
|
5313
|
+
json_ok '{"is_stale":true}'
|
|
3798
5314
|
exit 0
|
|
3799
5315
|
fi
|
|
3800
5316
|
|
|
3801
5317
|
last_cmd_ts=$(jq -r '.last_command_at // .started_at // empty' "$session_file" 2>/dev/null)
|
|
3802
5318
|
|
|
3803
5319
|
if [[ -z "$last_cmd_ts" ]]; then
|
|
3804
|
-
|
|
5320
|
+
json_ok '{"is_stale":true}'
|
|
3805
5321
|
exit 0
|
|
3806
5322
|
fi
|
|
3807
5323
|
|
|
@@ -3809,7 +5325,11 @@ EOF
|
|
|
3809
5325
|
now_epoch=$(date +%s)
|
|
3810
5326
|
age_hours=$(( (now_epoch - last_epoch) / 3600 ))
|
|
3811
5327
|
|
|
3812
|
-
[[ $age_hours -gt 24 ]]
|
|
5328
|
+
if [[ $age_hours -gt 24 ]]; then
|
|
5329
|
+
json_ok '{"is_stale":true}'
|
|
5330
|
+
else
|
|
5331
|
+
json_ok '{"is_stale":false}'
|
|
5332
|
+
fi
|
|
3813
5333
|
;;
|
|
3814
5334
|
|
|
3815
5335
|
session-clear)
|
|
@@ -3841,16 +5361,34 @@ EOF
|
|
|
3841
5361
|
'.resumed_at = $ts | .context_cleared = false' "$session_file" > "$session_file.tmp" && mv "$session_file.tmp" "$session_file"
|
|
3842
5362
|
json_ok "{\"resumed\":true,\"timestamp\":\"$(date -u +"%Y-%m-%dT%H:%M:%SZ")\"}"
|
|
3843
5363
|
else
|
|
3844
|
-
json_err "$E_RESOURCE_NOT_FOUND" "No session to mark as resumed"
|
|
5364
|
+
json_err "$E_RESOURCE_NOT_FOUND" "No active session to mark as resumed. Try: run /ant:init to start a new session."
|
|
3845
5365
|
fi
|
|
3846
5366
|
;;
|
|
3847
5367
|
|
|
3848
5368
|
session-summary)
|
|
3849
|
-
# Get human-readable
|
|
5369
|
+
# Get session summary (human-readable or JSON)
|
|
3850
5370
|
session_file="$DATA_DIR/session.json"
|
|
5371
|
+
json_mode="false"
|
|
5372
|
+
|
|
5373
|
+
# Parse --json flag (command name already shifted by main dispatch)
|
|
5374
|
+
while [[ $# -gt 0 ]]; do
|
|
5375
|
+
case "$1" in
|
|
5376
|
+
--json)
|
|
5377
|
+
json_mode="true"
|
|
5378
|
+
shift
|
|
5379
|
+
;;
|
|
5380
|
+
*)
|
|
5381
|
+
shift
|
|
5382
|
+
;;
|
|
5383
|
+
esac
|
|
5384
|
+
done
|
|
3851
5385
|
|
|
3852
5386
|
if [[ ! -f "$session_file" ]]; then
|
|
3853
|
-
|
|
5387
|
+
if [[ "$json_mode" == "true" ]]; then
|
|
5388
|
+
json_ok '{"exists":false,"goal":null,"phase":0}'
|
|
5389
|
+
else
|
|
5390
|
+
echo "No active session found."
|
|
5391
|
+
fi
|
|
3854
5392
|
exit 0
|
|
3855
5393
|
fi
|
|
3856
5394
|
|
|
@@ -3862,15 +5400,151 @@ EOF
|
|
|
3862
5400
|
suggested=$(jq -r '.suggested_next // "None"' "$session_file")
|
|
3863
5401
|
cleared=$(jq -r '.context_cleared // false' "$session_file")
|
|
3864
5402
|
|
|
3865
|
-
|
|
3866
|
-
|
|
3867
|
-
|
|
3868
|
-
|
|
3869
|
-
|
|
3870
|
-
|
|
3871
|
-
|
|
3872
|
-
|
|
3873
|
-
|
|
5403
|
+
if [[ "$json_mode" == "true" ]]; then
|
|
5404
|
+
# Escape goal for JSON
|
|
5405
|
+
goal_escaped=$(echo "$goal" | jq -Rs . | tr -d '\n')
|
|
5406
|
+
milestone_escaped=$(echo "$milestone" | jq -Rs . | tr -d '\n')
|
|
5407
|
+
last_cmd_escaped=$(echo "$last_cmd" | jq -Rs . | tr -d '\n')
|
|
5408
|
+
last_at_escaped=$(echo "$last_at" | jq -Rs . | tr -d '\n')
|
|
5409
|
+
suggested_escaped=$(echo "$suggested" | jq -Rs . | tr -d '\n')
|
|
5410
|
+
json_ok "{\"exists\":true,\"goal\":$goal_escaped,\"phase\":$phase,\"milestone\":$milestone_escaped,\"last_command\":$last_cmd_escaped,\"last_active\":$last_at_escaped,\"suggested_next\":$suggested_escaped,\"context_cleared\":$cleared}"
|
|
5411
|
+
else
|
|
5412
|
+
echo "Session Summary"
|
|
5413
|
+
echo "=================="
|
|
5414
|
+
echo "Goal: $goal"
|
|
5415
|
+
[[ "$phase" != "0" ]] && echo "Phase: $phase"
|
|
5416
|
+
echo "Milestone: $milestone"
|
|
5417
|
+
echo "Last Command: $last_cmd"
|
|
5418
|
+
echo "Last Active: $last_at"
|
|
5419
|
+
[[ "$suggested" != "None" ]] && echo "Suggested Next: $suggested"
|
|
5420
|
+
[[ "$cleared" == "true" ]] && echo "Status: Context was cleared"
|
|
5421
|
+
fi
|
|
5422
|
+
;;
|
|
5423
|
+
|
|
5424
|
+
generate-progress-bar)
|
|
5425
|
+
generate-progress-bar "$@"
|
|
5426
|
+
;;
|
|
5427
|
+
print-standard-banner)
|
|
5428
|
+
print-standard-banner "$@"
|
|
5429
|
+
;;
|
|
5430
|
+
print-next-up)
|
|
5431
|
+
print-next-up "$@"
|
|
5432
|
+
;;
|
|
5433
|
+
|
|
5434
|
+
# ============================================
|
|
5435
|
+
# LOCK MANAGEMENT
|
|
5436
|
+
# ============================================
|
|
5437
|
+
|
|
5438
|
+
force-unlock)
|
|
5439
|
+
# Emergency lock cleanup — removes all lock files
|
|
5440
|
+
# Usage: force-unlock [--yes]
|
|
5441
|
+
# Without --yes, lists locks and asks for confirmation in interactive mode
|
|
5442
|
+
lock_dir="${AETHER_ROOT:-.}/.aether/locks"
|
|
5443
|
+
auto_yes=false
|
|
5444
|
+
[[ "${1:-}" == "--yes" ]] && auto_yes=true
|
|
5445
|
+
|
|
5446
|
+
if [[ ! -d "$lock_dir" ]]; then
|
|
5447
|
+
json_ok '{"removed":0,"message":"No locks directory found"}'
|
|
5448
|
+
exit 0
|
|
5449
|
+
fi
|
|
5450
|
+
|
|
5451
|
+
lock_files=$(find "$lock_dir" -name "*.lock" -o -name "*.lock.pid" 2>/dev/null)
|
|
5452
|
+
|
|
5453
|
+
if [[ -z "$lock_files" ]]; then
|
|
5454
|
+
json_ok '{"removed":0,"message":"No lock files found"}'
|
|
5455
|
+
exit 0
|
|
5456
|
+
fi
|
|
5457
|
+
|
|
5458
|
+
lock_count=$(echo "$lock_files" | grep -c '\.lock$' || echo "0")
|
|
5459
|
+
|
|
5460
|
+
if [[ "$auto_yes" != "true" ]]; then
|
|
5461
|
+
if [[ -t 2 ]]; then
|
|
5462
|
+
echo "" >&2
|
|
5463
|
+
echo "Lock files found in $lock_dir:" >&2
|
|
5464
|
+
echo "$lock_files" | while read -r f; do
|
|
5465
|
+
[[ "$f" == *.pid ]] && continue
|
|
5466
|
+
pid_content=$(cat "${f}.pid" 2>/dev/null || echo "unknown")
|
|
5467
|
+
echo " $f (PID: $pid_content)" >&2
|
|
5468
|
+
done
|
|
5469
|
+
printf "Remove all %d lock(s)? [y/N] " "$lock_count" >&2
|
|
5470
|
+
read -r response < /dev/tty
|
|
5471
|
+
if [[ ! "$response" =~ ^[Yy]$ ]]; then
|
|
5472
|
+
json_ok '{"removed":0,"message":"Cancelled by user"}'
|
|
5473
|
+
exit 0
|
|
5474
|
+
fi
|
|
5475
|
+
else
|
|
5476
|
+
json_err "$E_VALIDATION_FAILED" "force-unlock requires --yes flag in non-interactive mode"
|
|
5477
|
+
fi
|
|
5478
|
+
fi
|
|
5479
|
+
|
|
5480
|
+
rm -f "$lock_dir"/*.lock "$lock_dir"/*.lock.pid 2>/dev/null || true
|
|
5481
|
+
export LOCK_ACQUIRED=false
|
|
5482
|
+
export CURRENT_LOCK=""
|
|
5483
|
+
json_ok "{\"removed\":$lock_count,\"message\":\"All locks cleared\"}"
|
|
5484
|
+
;;
|
|
5485
|
+
|
|
5486
|
+
#=============================================================================
|
|
5487
|
+
# SEMANTIC COMMANDS
|
|
5488
|
+
#=============================================================================
|
|
5489
|
+
|
|
5490
|
+
semantic-init)
|
|
5491
|
+
# Initialize semantic store
|
|
5492
|
+
semantic-init
|
|
5493
|
+
;;
|
|
5494
|
+
|
|
5495
|
+
semantic-index)
|
|
5496
|
+
# Index text for semantic search
|
|
5497
|
+
# Usage: semantic-index <text> <source> [entry_id]
|
|
5498
|
+
text="${2:-}"
|
|
5499
|
+
source="${3:-unknown}"
|
|
5500
|
+
entry_id="${4:-}"
|
|
5501
|
+
|
|
5502
|
+
if [[ -z "$text" ]]; then
|
|
5503
|
+
json_err "$E_VALIDATION_FAILED" "semantic-index requires text argument"
|
|
5504
|
+
exit 1
|
|
5505
|
+
fi
|
|
5506
|
+
|
|
5507
|
+
semantic-index "$text" "$source" "$entry_id"
|
|
5508
|
+
;;
|
|
5509
|
+
|
|
5510
|
+
semantic-search)
|
|
5511
|
+
# Search for similar entries
|
|
5512
|
+
# Usage: semantic-search <query> [top_k] [threshold] [source_filter]
|
|
5513
|
+
query="${2:-}"
|
|
5514
|
+
top_k="${3:-5}"
|
|
5515
|
+
threshold="${4:-0.5}"
|
|
5516
|
+
source_filter="${5:-}"
|
|
5517
|
+
|
|
5518
|
+
if [[ -z "$query" ]]; then
|
|
5519
|
+
json_err "$E_VALIDATION_FAILED" "semantic-search requires query argument"
|
|
5520
|
+
exit 1
|
|
5521
|
+
fi
|
|
5522
|
+
|
|
5523
|
+
semantic-search "$query" "$top_k" "$threshold" "$source_filter"
|
|
5524
|
+
;;
|
|
5525
|
+
|
|
5526
|
+
semantic-rebuild)
|
|
5527
|
+
# Rebuild semantic index from all data sources
|
|
5528
|
+
semantic-rebuild
|
|
5529
|
+
;;
|
|
5530
|
+
|
|
5531
|
+
semantic-status)
|
|
5532
|
+
# Get semantic layer status
|
|
5533
|
+
semantic-status
|
|
5534
|
+
;;
|
|
5535
|
+
|
|
5536
|
+
semantic-context)
|
|
5537
|
+
# Get context for task (for worker injection)
|
|
5538
|
+
# Usage: semantic-context <task_description> [max_results]
|
|
5539
|
+
task="${2:-}"
|
|
5540
|
+
max_results="${3:-3}"
|
|
5541
|
+
|
|
5542
|
+
if [[ -z "$task" ]]; then
|
|
5543
|
+
json_ok "[]" "No task provided"
|
|
5544
|
+
exit 0
|
|
5545
|
+
fi
|
|
5546
|
+
|
|
5547
|
+
semantic-get-context "$task" "$max_results"
|
|
3874
5548
|
;;
|
|
3875
5549
|
|
|
3876
5550
|
*)
|