@psiclawops/hypermem 0.8.1 → 0.8.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/install.sh ADDED
@@ -0,0 +1,516 @@
1
+ #!/usr/bin/env bash
2
+ # HyperMem Installer
3
+ # curl -fsSL https://raw.githubusercontent.com/PsiClawOps/hypermem/main/install.sh | bash
4
+ set -euo pipefail
5
+
6
+ # ─────────────────────────────────────────────
7
+ # Colors
8
+ # ─────────────────────────────────────────────
9
+ RED='\033[0;31m'
10
+ GREEN='\033[0;32m'
11
+ YELLOW='\033[1;33m'
12
+ CYAN='\033[0;36m'
13
+ BOLD='\033[1m'
14
+ DIM='\033[2m'
15
+ NC='\033[0m'
16
+
17
+ # ─────────────────────────────────────────────
18
+ # Banner
19
+ # ─────────────────────────────────────────────
20
+ banner() {
21
+ echo ""
22
+ echo -e "${CYAN}${BOLD}"
23
+ echo " ██╗ ██╗██╗ ██╗██████╗ ███████╗██████╗ ███╗ ███╗███████╗███╗ ███╗"
24
+ echo " ██║ ██║╚██╗ ██╔╝██╔══██╗██╔════╝██╔══██╗████╗ ████║██╔════╝████╗ ████║"
25
+ echo " ███████║ ╚████╔╝ ██████╔╝█████╗ ██████╔╝██╔████╔██║█████╗ ██╔████╔██║"
26
+ echo " ██╔══██║ ╚██╔╝ ██╔═══╝ ██╔══╝ ██╔══██╗██║╚██╔╝██║██╔══╝ ██║╚██╔╝██║"
27
+ echo " ██║ ██║ ██║ ██║ ███████╗██║ ██║██║ ╚═╝ ██║███████╗██║ ╚═╝ ██║"
28
+ echo " ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝"
29
+ echo -e "${NC}"
30
+ echo -e " ${DIM}The memory layer for OpenClaw agents${NC}"
31
+ echo ""
32
+ }
33
+
34
+ # ─────────────────────────────────────────────
35
+ # Helpers
36
+ # ─────────────────────────────────────────────
37
+ info() { echo -e " ${CYAN}→${NC} $*"; }
38
+ success() { echo -e " ${GREEN}✓${NC} $*"; }
39
+ warn() { echo -e " ${YELLOW}⚠${NC} $*"; }
40
+ error() { echo -e " ${RED}✗${NC} $*" >&2; }
41
+ die() { error "$*"; exit 1; }
42
+
43
+ prompt() {
44
+ # prompt <var_name> <question> [default]
45
+ local var="$1" question="$2" default="${3:-}"
46
+ if [[ -n "$default" ]]; then
47
+ echo -ne " ${BOLD}${question}${NC} ${DIM}[${default}]${NC} "
48
+ else
49
+ echo -ne " ${BOLD}${question}${NC} "
50
+ fi
51
+ read -r reply </dev/tty || { [[ -n "$default" ]] && reply="$default" || die "Cannot read input (not a terminal?). Run: bash <(curl -fsSL ...) instead."; }
52
+ [[ -z "$reply" && -n "$default" ]] && reply="$default"
53
+ printf -v "$var" '%s' "$reply"
54
+ }
55
+
56
+ confirm() {
57
+ # confirm <question> — returns 0 for yes, 1 for no
58
+ echo -ne " ${BOLD}$1${NC} ${DIM}[y/N]${NC} "
59
+ read -r reply </dev/tty || return 1
60
+ [[ "$reply" =~ ^[Yy] ]]
61
+ }
62
+
63
+ # ─────────────────────────────────────────────
64
+ # Preflight
65
+ # ─────────────────────────────────────────────
66
+ preflight() {
67
+ echo -e "\n${BOLD} Preflight checks${NC}"
68
+
69
+ # bash version
70
+ if (( BASH_VERSINFO[0] < 4 )); then
71
+ die "bash 4+ required (you have $BASH_VERSION)"
72
+ fi
73
+ success "bash $BASH_VERSION"
74
+
75
+ # curl
76
+ command -v curl &>/dev/null || die "curl is required"
77
+ success "curl $(curl --version | head -1 | awk '{print $2}')"
78
+
79
+ # git (optional — only needed for dev installs)
80
+
81
+ # node
82
+ command -v node &>/dev/null || die "Node.js is required (v22+)"
83
+ NODE_VERSION=$(node --version | sed 's/v//')
84
+ NODE_MAJOR=$(echo "$NODE_VERSION" | cut -d. -f1)
85
+ (( NODE_MAJOR >= 22 )) || die "Node.js v22+ required (you have v$NODE_VERSION — HyperMem requires v22+)"
86
+ success "node v$NODE_VERSION"
87
+
88
+ # npm
89
+ command -v npm &>/dev/null || die "npm is required"
90
+ success "npm $(npm --version)"
91
+ }
92
+
93
+ # ─────────────────────────────────────────────
94
+ # Hardware detection
95
+ # ─────────────────────────────────────────────
96
+ detect_hardware() {
97
+ echo -e "\n${BOLD} Detecting hardware${NC}"
98
+
99
+ HAS_NVIDIA=false
100
+ HAS_AMD=false
101
+ HAS_OLLAMA=false
102
+ HAS_API_KEY=false
103
+ DETECTED_TIER=1
104
+
105
+ # NVIDIA GPU
106
+ if command -v nvidia-smi &>/dev/null; then
107
+ GPU_NAME=$(nvidia-smi --query-gpu=name --format=csv,noheader 2>/dev/null | head -1 || echo "unknown")
108
+ HAS_NVIDIA=true
109
+ success "NVIDIA GPU: $GPU_NAME"
110
+ else
111
+ info "No NVIDIA GPU detected"
112
+ fi
113
+
114
+ # AMD GPU
115
+ if command -v rocm-smi &>/dev/null || lspci 2>/dev/null | grep -qi 'amd.*display\|radeon'; then
116
+ HAS_AMD=true
117
+ success "AMD GPU detected"
118
+ fi
119
+
120
+ # Ollama
121
+ if command -v ollama &>/dev/null; then
122
+ OLLAMA_VERSION=$(ollama --version 2>/dev/null || echo "unknown")
123
+ HAS_OLLAMA=true
124
+ success "Ollama: $OLLAMA_VERSION"
125
+ else
126
+ info "Ollama not found"
127
+ fi
128
+
129
+ # API key (OpenRouter or OpenAI-compatible) — informational only, does NOT affect recommendation
130
+ if [[ -n "${OPENROUTER_API_KEY:-}" || -n "${HYPERMEM_EMBED_API_KEY:-}" ]]; then
131
+ HAS_API_KEY=true
132
+ info "Embedding API key detected (Tier 4 available)"
133
+ else
134
+ info "No embedding API key in environment"
135
+ fi
136
+
137
+ # Recommend tier based on hardware only — operator opts into Tier 4 explicitly
138
+ if $HAS_OLLAMA && ($HAS_NVIDIA || $HAS_AMD); then
139
+ DETECTED_TIER=3
140
+ elif $HAS_NVIDIA || $HAS_AMD; then
141
+ # GPU present but no Ollama — recommend Tier 2, note Tier 3 needs Ollama
142
+ DETECTED_TIER=2
143
+ else
144
+ # CPU-only or unknown — Tier 2 runs everywhere via WASM, no Ollama needed
145
+ DETECTED_TIER=2
146
+ fi
147
+ }
148
+
149
+ # ─────────────────────────────────────────────
150
+ # Tier selection
151
+ # ─────────────────────────────────────────────
152
+ select_tier() {
153
+ echo -e "\n${BOLD} Memory tier selection${NC}\n"
154
+
155
+ echo -e " ${DIM}Choose how HyperMem handles semantic memory retrieval:${NC}\n"
156
+
157
+ echo -e " ${BOLD}1)${NC} ${GREEN}FTS5 + BM25${NC} ${DIM}(Tier 1 — keyword search only)${NC}"
158
+ echo -e " No embedder. Fast, zero extra dependencies."
159
+ echo -e " Best for: minimal setups, very low spec hardware.\n"
160
+
161
+ echo -e " ${BOLD}2)${NC} ${GREEN}MiniLM-L6-v2${NC} ${DIM}(Tier 2 — lightweight semantic)${NC}"
162
+ echo -e " 384-dimension embedder, runs in Node via WASM. No GPU, no Ollama."
163
+ echo -e " Best for: CPU-only servers, Raspberry Pi, low-memory VMs.\n"
164
+
165
+ echo -e " ${BOLD}3)${NC} ${GREEN}nomic-embed-text${NC} ${DIM}(Tier 3 — GPU-accelerated local)${NC}"
166
+ echo -e " 768-dimension embedder via Ollama. GPU strongly recommended."
167
+ echo -e " Best for: local workstations with a GPU, self-hosted setups.\n"
168
+
169
+ echo -e " ${BOLD}4)${NC} ${GREEN}qwen3-embedding:8b${NC} ${DIM}(Tier 4 — API, top quality)${NC}"
170
+ echo -e " 4096-dimension embedder via OpenRouter (or any OpenAI-compatible API)."
171
+ echo -e " Best for: production deployments, highest retrieval quality.\n"
172
+
173
+ echo -e " ${DIM}Recommended for your hardware: ${NC}${BOLD}Tier ${DETECTED_TIER}${NC}"
174
+ $HAS_API_KEY && echo -e " ${DIM}(Tier 4 also available — API key detected in environment)${NC}"
175
+ echo ""
176
+
177
+ while true; do
178
+ prompt TIER_INPUT "Select tier (1-4):" "$DETECTED_TIER"
179
+ if [[ "$TIER_INPUT" =~ ^[1-4]$ ]]; then
180
+ SELECTED_TIER="$TIER_INPUT"
181
+ break
182
+ fi
183
+ warn "Enter a number between 1 and 4"
184
+ done
185
+
186
+ echo ""
187
+ case "$SELECTED_TIER" in
188
+ 1) success "Tier 1: FTS5+BM25 — no embedder needed" ;;
189
+ 2) success "Tier 2: MiniLM-L6-v2 via @huggingface/transformers" ;;
190
+ 3) success "Tier 3: nomic-embed-text via Ollama" ;;
191
+ 4) success "Tier 4: qwen3-embedding:8b via OpenRouter" ;;
192
+ esac
193
+ }
194
+
195
+ # ─────────────────────────────────────────────
196
+ # Install HyperMem
197
+ # ─────────────────────────────────────────────
198
+ INSTALL_DIR="${HYPERMEM_INSTALL_DIR:-$HOME/.hypermem}"
199
+
200
+ install_hypermem() {
201
+ echo -e "\n${BOLD} Installing HyperMem${NC}"
202
+
203
+ mkdir -p "$INSTALL_DIR"
204
+
205
+ # Initialize package.json if this is a fresh install
206
+ if [[ ! -f "$INSTALL_DIR/package.json" ]]; then
207
+ info "Initializing install directory..."
208
+ npm --prefix "$INSTALL_DIR" init -y --silent 2>/dev/null
209
+ fi
210
+
211
+ if [[ -d "$INSTALL_DIR/node_modules/@psiclawops/hypermem" ]]; then
212
+ if confirm "HyperMem already found at $INSTALL_DIR — update it?"; then
213
+ info "Updating packages..."
214
+ npm --prefix "$INSTALL_DIR" install --silent @psiclawops/hypermem@latest @psiclawops/hypercompositor@latest @psiclawops/hypermem-memory@latest
215
+ else
216
+ info "Using existing installation"
217
+ fi
218
+ else
219
+ info "Installing @psiclawops/hypermem..."
220
+ npm --prefix "$INSTALL_DIR" install --silent @psiclawops/hypermem@latest
221
+
222
+ info "Installing @psiclawops/hypercompositor..."
223
+ npm --prefix "$INSTALL_DIR" install --silent @psiclawops/hypercompositor@latest
224
+
225
+ info "Installing @psiclawops/hypermem-memory..."
226
+ npm --prefix "$INSTALL_DIR" install --silent @psiclawops/hypermem-memory@latest
227
+ fi
228
+
229
+ # Convenience vars for plugin registration
230
+ PLUGIN_DIR="$INSTALL_DIR/node_modules/@psiclawops/hypercompositor"
231
+ MEMORY_PLUGIN_DIR="$INSTALL_DIR/node_modules/@psiclawops/hypermem-memory"
232
+
233
+ success "HyperMem installed at $INSTALL_DIR"
234
+ }
235
+
236
+ # ─────────────────────────────────────────────
237
+ # Tier-specific setup
238
+ # ─────────────────────────────────────────────
239
+ setup_tier() {
240
+ echo -e "\n${BOLD} Setting up Tier ${SELECTED_TIER}${NC}"
241
+
242
+ case "$SELECTED_TIER" in
243
+ 1)
244
+ # Nothing to install
245
+ EMBED_PROVIDER="none"
246
+ EMBED_MODEL="none"
247
+ EMBED_DIMS=0
248
+ success "No embedder required"
249
+ ;;
250
+
251
+ 2)
252
+ info "Installing @huggingface/transformers (WASM embedder)..."
253
+ npm --prefix "$INSTALL_DIR" install --silent @huggingface/transformers@3
254
+ EMBED_PROVIDER="transformers"
255
+ EMBED_MODEL="Xenova/all-MiniLM-L6-v2"
256
+ EMBED_DIMS=384
257
+ success "MiniLM-L6-v2 will download on first use (~90MB)"
258
+ ;;
259
+
260
+ 3)
261
+ if ! command -v ollama &>/dev/null; then
262
+ die "Ollama is required for Tier 3. Install it from https://ollama.com then re-run."
263
+ fi
264
+ info "Pulling nomic-embed-text via Ollama..."
265
+ ollama pull nomic-embed-text
266
+ EMBED_PROVIDER="ollama"
267
+ EMBED_MODEL="nomic-embed-text"
268
+ EMBED_DIMS=768
269
+ success "nomic-embed-text ready"
270
+ ;;
271
+
272
+ 4)
273
+ # Get API key
274
+ API_KEY="${OPENROUTER_API_KEY:-${HYPERMEM_EMBED_API_KEY:-}}"
275
+ if [[ -z "$API_KEY" ]]; then
276
+ echo ""
277
+ echo -e " ${DIM}Get a key at https://openrouter.ai/keys${NC}"
278
+ prompt API_KEY "OpenRouter API key:"
279
+ [[ -z "$API_KEY" ]] && die "API key required for Tier 4"
280
+ else
281
+ success "Using API key from environment"
282
+ fi
283
+ EMBED_PROVIDER="openai"
284
+ EMBED_MODEL="qwen/qwen3-embedding:8b"
285
+ EMBED_DIMS=4096
286
+ EMBED_API_KEY="$API_KEY"
287
+ EMBED_BASE_URL="https://openrouter.ai/api/v1"
288
+ success "Tier 4 configured — qwen3-embedding:8b via OpenRouter"
289
+ ;;
290
+ esac
291
+ }
292
+
293
+ # ─────────────────────────────────────────────
294
+ # Write config
295
+ # ─────────────────────────────────────────────
296
+ write_config() {
297
+ echo -e "\n${BOLD} Writing config${NC}"
298
+
299
+ CONFIG_DIR="$HOME/.openclaw/hypermem"
300
+ mkdir -p "$CONFIG_DIR"
301
+ CONFIG_FILE="$CONFIG_DIR/config.json"
302
+
303
+ # Build embedding block
304
+ if [[ "$SELECTED_TIER" == "1" ]]; then
305
+ EMBED_BLOCK='"embedding": { "provider": "none" }'
306
+ elif [[ "$SELECTED_TIER" == "2" ]]; then
307
+ EMBED_BLOCK="\"embedding\": { \"provider\": \"transformers\", \"model\": \"$EMBED_MODEL\", \"dimensions\": $EMBED_DIMS }"
308
+ elif [[ "$SELECTED_TIER" == "3" ]]; then
309
+ EMBED_BLOCK="\"embedding\": { \"provider\": \"ollama\", \"model\": \"$EMBED_MODEL\", \"dimensions\": $EMBED_DIMS, \"ollamaUrl\": \"http://localhost:11434\" }"
310
+ else
311
+ EMBED_BLOCK="\"embedding\": { \"provider\": \"openai\", \"model\": \"$EMBED_MODEL\", \"dimensions\": $EMBED_DIMS, \"openaiBaseUrl\": \"$EMBED_BASE_URL\", \"openaiApiKey\": \"$EMBED_API_KEY\" }"
312
+ fi
313
+
314
+ cat > "$CONFIG_FILE" <<EOF
315
+ {
316
+ "installDir": "$INSTALL_DIR",
317
+ "tier": $SELECTED_TIER,
318
+ "contextWindowSize": 128000,
319
+ "contextWindowReserve": 0.25,
320
+ "deferToolPruning": false,
321
+ "verboseLogging": false,
322
+ "contextWindowOverrides": {},
323
+ "warmCacheReplayThresholdMs": 120000,
324
+ "subagentWarming": "light",
325
+ "compositor": {
326
+ "budgetFraction": 0.703,
327
+ "reserveFraction": 0.25,
328
+ "historyFraction": 0.4,
329
+ "memoryFraction": 0.4,
330
+ "defaultTokenBudget": 90000,
331
+ "maxHistoryMessages": 500,
332
+ "maxFacts": 30,
333
+ "maxExpertisePatterns": 6,
334
+ "maxCrossSessionContext": 4000,
335
+ "maxTotalTriggerTokens": 4000,
336
+ "maxRecentToolPairs": 3,
337
+ "maxProseToolPairs": 10,
338
+ "warmHistoryBudgetFraction": 0.4,
339
+ "contextWindowReserve": 0.25,
340
+ "dynamicReserveTurnHorizon": 5,
341
+ "dynamicReserveMax": 0.5,
342
+ "dynamicReserveEnabled": true,
343
+ "keystoneHistoryFraction": 0.2,
344
+ "keystoneMaxMessages": 15,
345
+ "keystoneMinSignificance": 0.5,
346
+ "targetBudgetFraction": 0.65,
347
+ "enableFOS": true,
348
+ "enableMOD": true,
349
+ "hyperformProfile": "standard",
350
+ "wikiTokenCap": 600,
351
+ "zigzagOrdering": true
352
+ },
353
+ "eviction": {
354
+ "enabled": true,
355
+ "imageAgeTurns": 2,
356
+ "toolResultAgeTurns": 4,
357
+ "minTokensToEvict": 200,
358
+ "keepPreviewChars": 120
359
+ },
360
+ "maintenance": {
361
+ "periodicInterval": 300000,
362
+ "maxActiveConversations": 5,
363
+ "recentConversationCooldownMs": 30000,
364
+ "maxCandidatesPerPass": 200
365
+ },
366
+ $EMBED_BLOCK,
367
+ "vectorStore": {
368
+ "enabled": $([ "$SELECTED_TIER" -gt 1 ] && echo true || echo false)
369
+ }
370
+ }
371
+ EOF
372
+
373
+ success "Config written to $CONFIG_FILE"
374
+ }
375
+
376
+ # ─────────────────────────────────────────────
377
+ # OpenClaw plugin registration
378
+ # ─────────────────────────────────────────────
379
+ register_plugin() {
380
+ if ! command -v openclaw &>/dev/null; then
381
+ warn "OpenClaw CLI not found, skipping plugin registration"
382
+ echo -e " ${DIM}Run these manually after installing OpenClaw:${NC}"
383
+ echo -e " ${DIM} openclaw plugins install file:$PLUGIN_DIR${NC}"
384
+ echo -e " ${DIM} openclaw plugins install file:$MEMORY_PLUGIN_DIR${NC}"
385
+ return
386
+ fi
387
+
388
+ echo ""
389
+ if confirm "Register HyperMem plugins with OpenClaw?"; then
390
+ # Context engine plugin (hypercompositor)
391
+ info "Registering context engine plugin (hypercompositor)..."
392
+ if openclaw plugins install "file:$PLUGIN_DIR" 2>/dev/null; then
393
+ success "hypercompositor registered"
394
+ else
395
+ warn "Context engine registration failed — run: openclaw plugins install file:$PLUGIN_DIR"
396
+ fi
397
+
398
+ # Memory plugin (hypermem)
399
+ info "Registering memory plugin (hypermem)..."
400
+ if openclaw plugins install "file:$MEMORY_PLUGIN_DIR" 2>/dev/null; then
401
+ success "hypermem registered"
402
+ else
403
+ warn "Memory plugin registration failed — run: openclaw plugins install file:$MEMORY_PLUGIN_DIR"
404
+ fi
405
+
406
+ # Configure plugin slots
407
+ info "Configuring plugin slots..."
408
+ local SLOT_OK=true
409
+ openclaw config set plugins.slots.contextEngine hypercompositor 2>/dev/null || SLOT_OK=false
410
+ openclaw config set plugins.slots.memory hypermem 2>/dev/null || SLOT_OK=false
411
+ if $SLOT_OK; then
412
+ success "Plugin slots configured"
413
+ else
414
+ warn "Slot config failed — set manually:"
415
+ echo -e " ${DIM} openclaw config set plugins.slots.contextEngine hypercompositor${NC}"
416
+ echo -e " ${DIM} openclaw config set plugins.slots.memory hypermem${NC}"
417
+ fi
418
+
419
+ success "Restart OpenClaw to activate: openclaw gateway restart"
420
+ fi
421
+ }
422
+
423
+ # ─────────────────────────────────────────────
424
+ # Smoke test
425
+ # ─────────────────────────────────────────────
426
+ smoke_test() {
427
+ echo -e "\n${BOLD} Smoke test${NC}"
428
+
429
+ # Verify config parses and has required fields
430
+ node --input-type=module <<EOF 2>/dev/null && success "Config loads cleanly" || warn "Config load failed — check $HOME/.openclaw/hypermem/config.json"
431
+ import { readFileSync, existsSync } from 'fs';
432
+ const cfg = JSON.parse(readFileSync('$HOME/.openclaw/hypermem/config.json', 'utf8'));
433
+ if (!cfg.tier) throw new Error('missing tier');
434
+ if (!cfg.installDir) throw new Error('missing installDir');
435
+ if (!existsSync(cfg.installDir)) throw new Error('installDir does not exist: ' + cfg.installDir);
436
+ EOF
437
+
438
+ # Verify HyperMem core module loads
439
+ node --input-type=module <<EOF 2>/dev/null \
440
+ && success "HyperMem core module loads" \
441
+ || warn "HyperMem core module load failed — check $INSTALL_DIR/node_modules/@psiclawops/hypermem"
442
+ import { createRequire } from 'module';
443
+ const require = createRequire(import.meta.url);
444
+ require('$INSTALL_DIR/node_modules/@psiclawops/hypermem/dist/index.js');
445
+ EOF
446
+
447
+ # Verify context engine plugin dist exists
448
+ [[ -f "$PLUGIN_DIR/dist/index.js" ]] \
449
+ && success "hypercompositor plugin built" \
450
+ || warn "hypercompositor plugin not built — reinstall: npm --prefix $INSTALL_DIR install @psiclawops/hypercompositor@latest"
451
+
452
+ # Verify memory plugin dist exists
453
+ [[ -f "$MEMORY_PLUGIN_DIR/dist/index.js" ]] \
454
+ && success "hypermem memory plugin built" \
455
+ || warn "hypermem memory plugin not built — reinstall: npm --prefix $INSTALL_DIR install @psiclawops/hypermem-memory@latest"
456
+
457
+ # Tier 2: verify transformers package is present
458
+ if [[ "$SELECTED_TIER" == "2" ]]; then
459
+ [[ -d "$INSTALL_DIR/node_modules/@huggingface/transformers" ]] \
460
+ && success "@huggingface/transformers present" \
461
+ || warn "@huggingface/transformers missing — run: npm --prefix $INSTALL_DIR install @huggingface/transformers@3"
462
+ fi
463
+
464
+ # Tier 3: verify nomic model is available in Ollama
465
+ if [[ "$SELECTED_TIER" == "3" ]] && command -v ollama &>/dev/null; then
466
+ ollama list 2>/dev/null | grep -q 'nomic-embed-text' \
467
+ && success "nomic-embed-text present in Ollama" \
468
+ || warn "nomic-embed-text not found in Ollama — run: ollama pull nomic-embed-text"
469
+ fi
470
+ }
471
+
472
+ # ─────────────────────────────────────────────
473
+ # Summary
474
+ # ─────────────────────────────────────────────
475
+ summary() {
476
+ echo ""
477
+ echo -e "${CYAN}${BOLD} ─────────────────────────────────────────${NC}"
478
+ echo -e "${CYAN}${BOLD} HyperMem installed${NC}"
479
+ echo -e "${CYAN}${BOLD} ─────────────────────────────────────────${NC}"
480
+ echo ""
481
+ echo -e " ${BOLD}Tier:${NC} $SELECTED_TIER"
482
+ echo -e " ${BOLD}Install:${NC} $INSTALL_DIR"
483
+ echo -e " ${BOLD}Config:${NC} $HOME/.openclaw/hypermem/config.json"
484
+ echo -e " ${BOLD}Plugins:${NC} hypercompositor (context-engine) + hypermem (memory)"
485
+ echo ""
486
+
487
+ case "$SELECTED_TIER" in
488
+ 1) echo -e " ${DIM}FTS5+BM25 keyword search active. No embedder.${NC}" ;;
489
+ 2) echo -e " ${DIM}MiniLM-L6-v2 will download on first embedding call.${NC}" ;;
490
+ 3) echo -e " ${DIM}nomic-embed-text ready via Ollama.${NC}" ;;
491
+ 4) echo -e " ${DIM}qwen3-embedding:8b via OpenRouter. API key stored in config.${NC}" ;;
492
+ esac
493
+
494
+ echo ""
495
+ echo -e " ${DIM}Upgrade tier anytime: re-run this installer and select a higher tier.${NC}"
496
+ echo -e " ${DIM}Docs: https://github.com/psiclawops/hypermem${NC}"
497
+ echo ""
498
+ }
499
+
500
+ # ─────────────────────────────────────────────
501
+ # Main
502
+ # ─────────────────────────────────────────────
503
+ main() {
504
+ banner
505
+ preflight
506
+ detect_hardware
507
+ select_tier
508
+ install_hypermem
509
+ setup_tier
510
+ write_config
511
+ register_plugin
512
+ smoke_test
513
+ summary
514
+ }
515
+
516
+ main "$@"
@@ -0,0 +1,24 @@
1
+ /**
2
+ * HyperMem Memory Plugin
3
+ *
4
+ * Thin adapter that bridges HyperMem's retrieval capabilities into
5
+ * OpenClaw's memory slot contract (`kind: "memory"`).
6
+ *
7
+ * The context engine plugin (hypercompositor) owns the full lifecycle:
8
+ * ingest, assemble, compact, afterTurn, bootstrap, dispose.
9
+ *
10
+ * This plugin owns the memory slot contract:
11
+ * - registerMemoryCapability() with runtime + publicArtifacts
12
+ * - memory_search tool backing via MemorySearchManager
13
+ * - Public artifacts for memory-wiki bridge
14
+ *
15
+ * Both plugins share the same HyperMem singleton (loaded from repo dist).
16
+ */
17
+ declare const _default: {
18
+ id: string;
19
+ name: string;
20
+ description: string;
21
+ configSchema: import("openclaw/plugin-sdk").OpenClawPluginConfigSchema;
22
+ register: NonNullable<import("openclaw/plugin-sdk/plugin-entry").OpenClawPluginDefinition["register"]>;
23
+ } & Pick<import("openclaw/plugin-sdk/plugin-entry").OpenClawPluginDefinition, "kind" | "reload" | "nodeHostCommands" | "securityAuditCollectors">;
24
+ export default _default;
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;GAUG;;;;;;;;AA0VH,wBAmBG"}