@vectorize-io/hindsight-openclaw 0.4.5 → 0.4.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -7,13 +7,13 @@ Biomimetic long-term memory for [OpenClaw](https://openclaw.ai) using [Hindsight
7
7
  ```bash
8
8
  # 1. Configure your LLM provider
9
9
  export OPENAI_API_KEY="sk-your-key"
10
- clawdbot config set 'agents.defaults.models."openai/gpt-4o-mini"' '{}'
10
+ openclaw config set 'agents.defaults.models."openai/gpt-4o-mini"' '{}'
11
11
 
12
12
  # 2. Install and enable the plugin
13
- clawdbot plugins install @vectorize-io/hindsight-openclaw
13
+ openclaw plugins install @vectorize-io/hindsight-openclaw
14
14
 
15
15
  # 3. Start OpenClaw
16
- clawdbot gateway
16
+ openclaw gateway
17
17
  ```
18
18
 
19
19
  That's it! The plugin will automatically start capturing and recalling memories.
@@ -15,4 +15,5 @@ export declare class HindsightEmbedManager {
15
15
  private waitForReady;
16
16
  getBaseUrl(): string;
17
17
  isRunning(): boolean;
18
+ private writeConfigEnv;
18
19
  }
@@ -1,4 +1,5 @@
1
1
  import { spawn } from 'child_process';
2
+ import { promises as fs } from 'fs';
2
3
  import { join } from 'path';
3
4
  import { homedir } from 'os';
4
5
  export class HindsightEmbedManager {
@@ -16,7 +17,7 @@ export class HindsightEmbedManager {
16
17
  ) {
17
18
  this.port = 8889; // hindsight-embed uses fixed port 8889
18
19
  this.baseUrl = `http://127.0.0.1:8889`;
19
- this.embedDir = join(homedir(), '.clawdbot', 'hindsight-embed');
20
+ this.embedDir = join(homedir(), '.openclaw', 'hindsight-embed');
20
21
  this.llmProvider = llmProvider;
21
22
  this.llmApiKey = llmApiKey;
22
23
  this.llmModel = llmModel;
@@ -35,6 +36,8 @@ export class HindsightEmbedManager {
35
36
  if (this.llmModel) {
36
37
  env['HINDSIGHT_EMBED_LLM_MODEL'] = this.llmModel;
37
38
  }
39
+ // Write env vars to ~/.hindsight/config.env for daemon persistence
40
+ await this.writeConfigEnv(env);
38
41
  // Start hindsight-embed daemon (it manages itself)
39
42
  const embedPackage = this.embedVersion ? `hindsight-embed@${this.embedVersion}` : 'hindsight-embed@latest';
40
43
  const startDaemon = spawn('uvx', [embedPackage, 'daemon', 'start'], {
@@ -117,4 +120,58 @@ export class HindsightEmbedManager {
117
120
  isRunning() {
118
121
  return this.process !== null;
119
122
  }
123
+ async writeConfigEnv(env) {
124
+ const hindsightDir = join(homedir(), '.hindsight');
125
+ const embedConfigPath = join(hindsightDir, 'embed');
126
+ // Ensure directory exists
127
+ await fs.mkdir(hindsightDir, { recursive: true });
128
+ // Read existing config to preserve extra settings
129
+ let existingContent = '';
130
+ let extraSettings = [];
131
+ try {
132
+ existingContent = await fs.readFile(embedConfigPath, 'utf-8');
133
+ // Extract non-LLM settings (like FORCE_CPU flags)
134
+ const lines = existingContent.split('\n');
135
+ for (const line of lines) {
136
+ const trimmed = line.trim();
137
+ if (trimmed && !trimmed.startsWith('#') &&
138
+ !trimmed.startsWith('HINDSIGHT_EMBED_LLM_') &&
139
+ !trimmed.startsWith('HINDSIGHT_EMBED_BANK_ID') &&
140
+ !trimmed.startsWith('HINDSIGHT_EMBED_DAEMON_IDLE_TIMEOUT')) {
141
+ extraSettings.push(line);
142
+ }
143
+ }
144
+ }
145
+ catch {
146
+ // File doesn't exist yet, that's fine
147
+ }
148
+ // Build config file with header
149
+ const configLines = [
150
+ '# Hindsight Embed Configuration',
151
+ '# Generated by OpenClaw Hindsight plugin',
152
+ '',
153
+ ];
154
+ // Add LLM config
155
+ if (env.HINDSIGHT_EMBED_LLM_PROVIDER) {
156
+ configLines.push(`HINDSIGHT_EMBED_LLM_PROVIDER=${env.HINDSIGHT_EMBED_LLM_PROVIDER}`);
157
+ }
158
+ if (env.HINDSIGHT_EMBED_LLM_MODEL) {
159
+ configLines.push(`HINDSIGHT_EMBED_LLM_MODEL=${env.HINDSIGHT_EMBED_LLM_MODEL}`);
160
+ }
161
+ if (env.HINDSIGHT_EMBED_LLM_API_KEY) {
162
+ configLines.push(`HINDSIGHT_EMBED_LLM_API_KEY=${env.HINDSIGHT_EMBED_LLM_API_KEY}`);
163
+ }
164
+ if (env.HINDSIGHT_EMBED_DAEMON_IDLE_TIMEOUT) {
165
+ configLines.push(`HINDSIGHT_EMBED_DAEMON_IDLE_TIMEOUT=${env.HINDSIGHT_EMBED_DAEMON_IDLE_TIMEOUT}`);
166
+ }
167
+ // Add extra settings if they exist
168
+ if (extraSettings.length > 0) {
169
+ configLines.push('');
170
+ configLines.push('# Additional settings');
171
+ configLines.push(...extraSettings);
172
+ }
173
+ // Write to file
174
+ await fs.writeFile(embedConfigPath, configLines.join('\n') + '\n', 'utf-8');
175
+ console.log(`[Hindsight] Wrote config to ${embedConfigPath}`);
176
+ }
120
177
  }
package/dist/index.js CHANGED
@@ -84,8 +84,8 @@ function detectLLMConfig(api) {
84
84
  `Please set one of these environment variables:\n${keyInstructions}\n\n` +
85
85
  `You can set them in your shell profile (~/.zshrc or ~/.bashrc):\n` +
86
86
  ` export ANTHROPIC_API_KEY="your-key-here"\n\n` +
87
- `Or run Moltbot with the environment variable:\n` +
88
- ` ANTHROPIC_API_KEY="your-key" clawdbot start\n\n` +
87
+ `Or run OpenClaw with the environment variable:\n` +
88
+ ` ANTHROPIC_API_KEY="your-key" openclaw gateway\n\n` +
89
89
  `Alternatively, configure ollama provider which doesn't require an API key.`);
90
90
  }
91
91
  function getPluginConfig(api) {
@@ -215,10 +215,10 @@ export default function (api) {
215
215
  return;
216
216
  }
217
217
  console.log('[Hindsight] Auto-recall for prompt:', prompt.substring(0, 50));
218
- // Recall relevant memories (up to 1024 tokens)
218
+ // Recall relevant memories (up to 512 tokens)
219
219
  const response = await client.recall({
220
220
  query: prompt,
221
- max_tokens: 1024,
221
+ max_tokens: 512,
222
222
  });
223
223
  if (!response.results || response.results.length === 0) {
224
224
  console.log('[Hindsight] No memories found for auto-recall');
@@ -227,7 +227,10 @@ export default function (api) {
227
227
  // Format memories as JSON with all fields from recall
228
228
  const memoriesJson = JSON.stringify(response.results, null, 2);
229
229
  const contextMessage = `<hindsight_memories>
230
+ Relevant memories from past conversations (score 1=highest, prioritize recent when conflicting):
230
231
  ${memoriesJson}
232
+
233
+ User message: ${prompt}
231
234
  </hindsight_memories>`;
232
235
  console.log(`[Hindsight] Auto-recall: Injecting ${response.results.length} memories`);
233
236
  // Inject context before the user message
package/package.json CHANGED
@@ -1,11 +1,11 @@
1
1
  {
2
2
  "name": "@vectorize-io/hindsight-openclaw",
3
- "version": "0.4.5",
3
+ "version": "0.4.6",
4
4
  "description": "Hindsight memory plugin for OpenClaw - biomimetic long-term memory with fact extraction",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",
7
7
  "type": "module",
8
- "clawdbot": {
8
+ "openclaw": {
9
9
  "extensions": [
10
10
  "./dist/index.js"
11
11
  ]
@@ -27,7 +27,7 @@
27
27
  },
28
28
  "files": [
29
29
  "dist",
30
- "clawdbot.plugin.json",
30
+ "openclaw.plugin.json",
31
31
  "README.md"
32
32
  ],
33
33
  "scripts": {
File without changes