ei-tui 0.8.0 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. package/README.md +9 -6
  2. package/package.json +1 -1
  3. package/src/cli/mcp.ts +2 -2
  4. package/src/cli.ts +1 -1
  5. package/src/core/handlers/heartbeat.ts +63 -8
  6. package/src/core/handlers/index.ts +2 -1
  7. package/src/core/handlers/persona-response.ts +3 -5
  8. package/src/core/handlers/rooms.ts +3 -5
  9. package/src/core/handlers/utils.ts +5 -4
  10. package/src/core/heartbeat-manager.ts +16 -47
  11. package/src/core/message-manager.ts +6 -2
  12. package/src/core/orchestrators/ceremony.ts +49 -4
  13. package/src/core/orchestrators/human-extraction.ts +2 -2
  14. package/src/core/persona-manager.ts +1 -2
  15. package/src/core/personas/opencode-agent.ts +7 -2
  16. package/src/core/processor.ts +5 -12
  17. package/src/core/prompt-context-builder.ts +11 -1
  18. package/src/core/queue-processor.ts +4 -4
  19. package/src/core/room-manager.ts +6 -6
  20. package/src/core/state/human.ts +0 -1
  21. package/src/core/state/personas.ts +22 -13
  22. package/src/core/state/rooms.ts +0 -2
  23. package/src/core/state-manager.ts +83 -11
  24. package/src/core/types/data-items.ts +2 -2
  25. package/src/core/types/entities.ts +8 -3
  26. package/src/core/types/enums.ts +1 -0
  27. package/src/core/types/integrations.ts +1 -1
  28. package/src/core/types/llm.ts +2 -4
  29. package/src/core/types/rooms.ts +0 -4
  30. package/src/integrations/claude-code/importer.ts +1 -5
  31. package/src/integrations/cursor/importer.ts +1 -5
  32. package/src/integrations/opencode/importer.ts +1 -4
  33. package/src/integrations/opencode/types.ts +17 -1
  34. package/src/prompts/heartbeat/check.ts +7 -18
  35. package/src/prompts/heartbeat/ei.ts +14 -0
  36. package/src/prompts/heartbeat/types.ts +7 -5
  37. package/src/prompts/index.ts +9 -0
  38. package/src/prompts/message-utils.ts +7 -4
  39. package/src/prompts/reflection/index.ts +77 -0
  40. package/src/prompts/reflection/types.ts +26 -0
  41. package/src/prompts/response/index.ts +5 -2
  42. package/src/prompts/response/sections.ts +29 -1
  43. package/src/prompts/response/types.ts +10 -2
  44. package/src/prompts/room/sections.ts +4 -7
  45. package/src/prompts/room/types.ts +3 -6
  46. package/src/storage/embeddings.ts +69 -34
  47. package/src/storage/merge.ts +1 -1
  48. package/src/templates/welcome.ts +0 -1
  49. package/tui/README.md +5 -2
  50. package/tui/src/commands/editor.tsx +0 -1
  51. package/tui/src/commands/persona.tsx +89 -3
  52. package/tui/src/commands/reflect.tsx +375 -0
  53. package/tui/src/commands/registry.ts +2 -0
  54. package/tui/src/components/CYPTreeOverlay.tsx +0 -2
  55. package/tui/src/components/MAPScoreOverlay.tsx +1 -1
  56. package/tui/src/components/MessageList.tsx +6 -9
  57. package/tui/src/components/PromptInput.tsx +3 -1
  58. package/tui/src/components/RoomMessageList.tsx +2 -6
  59. package/tui/src/components/Sidebar.tsx +3 -5
  60. package/tui/src/components/StatusBar.tsx +26 -14
  61. package/tui/src/context/keyboard.tsx +2 -2
  62. package/tui/src/util/cyp-editor.tsx +2 -6
  63. package/tui/src/util/yaml-context.ts +2 -6
  64. package/tui/src/util/yaml-persona.ts +3 -3
  65. package/tui/src/util/yaml-settings.ts +0 -3
package/README.md CHANGED
@@ -4,7 +4,7 @@ A local-first AI companion system with persistent personas and coding tool integ
4
4
 
5
5
  You can access the Web version at [ei.flare576.com](https://ei.flare576.com).
6
6
 
7
- You can install the local version via `npm install -g ei-tui` (see [### TUI](#tui) for details).
7
+ You can run the local version via `bunx ei-tui` — no install needed, always current (see [### TUI](#tui) for details).
8
8
 
9
9
  If you're here to give your coding tools (OpenCode, Claude Code, Cursor) persistent memory, jump over to [TUI README.md](./tui/README.md) to learn how to get information _into_ Ei, and [CLI README.md](./src/cli/README.md) to get it back _out_.
10
10
 
@@ -16,6 +16,8 @@ Unless you enable Syncing, that's where it stays.
16
16
 
17
17
  If you have a local LLM, literally no data leaves your system(s) by default. If you don't, you'll need to provide an LLM for Ei to use. I tried to make that as easy as possible via adding Providers via API Key.
18
18
 
19
+ > **One honest note**: the first time you load Ei in a browser, it downloads the embedding library and model weights from public CDNs (jsdelivr, HuggingFace). Those CDNs see your IP address — but not your data. All embedding runs locally in your browser after that first download. The TUI version caches everything on first run and is fully offline after. Additionally, the same is true for my webhost - it must see your IP address to serve you assets, but no analytics, reports, metrics, etc. are done on them.
20
+
19
21
  There's no other usage, debugging, analytics, tracking, or history information stored or transmitted - anonymized or otherwise.
20
22
 
21
23
  If there's a problem with the system, you need to tell me here on GitHub, or on Bluesky, or Discord, or whatever. There's no "report a bug" button, no "DONATE" link in the app.
@@ -110,15 +112,16 @@ More information can be found in the [Web Readme](web/README.md)
110
112
  # Install Bun (if you don't have it)
111
113
  curl -fsSL https://bun.sh/install | bash
112
114
 
113
- # Install Ei
114
- npm install -g ei-tui
115
- ```
115
+ # Run Ei — no install needed, always the latest version
116
+ bunx ei-tui
116
117
 
117
- When you install Ei, you pull down this package and it's dependencies.
118
+ # Or, if you use it as much as I do, add this to your profile!
119
+ alias ei='bunx ei-tui'
120
+ ```
118
121
 
119
122
  If you have a Local LLM, that's the first and last set of signals that leave your machine for Ei unless you tell it otherwise.
120
123
 
121
- Regardless, Running `ei` pops open the TUI interface and, just like on the web, all messages and summary requests flow to your LLM provider, but the core data stays on your device.
124
+ Regardless, running `ei` (or `bunx ei-tui`) pops open the TUI interface and, just like on the web, all messages and summary requests flow to your LLM provider, but the core data stays on your device.
122
125
 
123
126
  More information (including commands) can be found in the [TUI Readme](tui/README.md)
124
127
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ei-tui",
3
- "version": "0.8.0",
3
+ "version": "0.9.0",
4
4
  "author": "Flare576",
5
5
  "repository": {
6
6
  "type": "git",
package/src/cli/mcp.ts CHANGED
@@ -35,7 +35,7 @@ export function createMcpServer(): McpServer {
35
35
  .string()
36
36
  .optional()
37
37
  .describe(
38
- "Filter to entities from a specific source. Prefix match against namespaced source identifiers (e.g. 'cursor', 'opencode', 'opencode:ses_abc123')."
38
+ "Filter to entities from a specific source. Prefix match against namespaced source identifiers (e.g. 'cursor', 'opencode', 'opencode:my-machine', 'opencode:my-machine:ses_abc123')."
39
39
  ),
40
40
  limit: z
41
41
  .number()
@@ -104,7 +104,7 @@ export function createMcpServer(): McpServer {
104
104
  .string()
105
105
  .optional()
106
106
  .describe(
107
- "Filter to entities from a specific source. Prefix match against namespaced source identifiers (e.g. 'cursor', 'opencode', 'opencode:ses_abc123'). If the entity does not match, returns not found."
107
+ "Filter to entities from a specific source. Prefix match against namespaced source identifiers (e.g. 'cursor', 'opencode', 'opencode:my-machine', 'opencode:my-machine:ses_abc123'). If the entity does not match, returns not found."
108
108
  ),
109
109
  },
110
110
  },
package/src/cli.ts CHANGED
@@ -66,7 +66,7 @@ Options:
66
66
  --number, -n Maximum number of results (default: 10)
67
67
  --recent, -r Sort by last_mentioned date (most recent first)
68
68
  --persona, -p Filter to entities a specific persona has learned about
69
- --source, -s Filter to entities from a specific source (prefix match, e.g. "cursor", "opencode:ses_abc123")
69
+ --source, -s Filter to entities from a specific source (prefix match, e.g. "cursor", "opencode:my-machine", "opencode:my-machine:ses_abc123")
70
70
  --id Look up entity by ID (accepts value or stdin)
71
71
  --install Register Ei with OpenCode, Claude Code, and Cursor
72
72
  --help, -h Show this help message
@@ -6,6 +6,7 @@ import {
6
6
  } from "../types.js";
7
7
  import type { StateManager } from "../state-manager.js";
8
8
  import type { HeartbeatCheckResult, EiHeartbeatResult } from "../../prompts/heartbeat/types.js";
9
+ import type { ReflectionCriticResult } from "../../prompts/reflection/types.js";
9
10
  import { crossFind } from "../utils/index.js";
10
11
 
11
12
  export function handleHeartbeatCheck(response: LLMResponse, state: StateManager): void {
@@ -27,11 +28,6 @@ export function handleHeartbeatCheck(response: LLMResponse, state: StateManager)
27
28
  state.persona_update(personaId, { last_heartbeat: now });
28
29
  state.queue_clearPersonaResponses(personaId, LLMNextStep.HandleHeartbeatCheck);
29
30
 
30
- if (result.mentioned_reflection === true) {
31
- state.persona_update(personaId, { reflection_last_asked: now });
32
- console.log(`[HeartbeatCheck ${personaDisplayName}] Persona surfaced identity drift - reflection_last_asked set`);
33
- }
34
-
35
31
  if (!result.should_respond) {
36
32
  console.log(`[HeartbeatCheck ${personaDisplayName}] Chose not to reach out (should_respond=false)`);
37
33
  return;
@@ -41,7 +37,7 @@ export function handleHeartbeatCheck(response: LLMResponse, state: StateManager)
41
37
  const message: Message = {
42
38
  id: crypto.randomUUID(),
43
39
  role: "system",
44
- verbal_response: result.message,
40
+ content: result.message,
45
41
  timestamp: now,
46
42
  read: false,
47
43
  context_status: ContextStatus.Default,
@@ -74,10 +70,10 @@ export function handleEiHeartbeat(response: LLMResponse, state: StateManager): v
74
70
  return;
75
71
  }
76
72
 
77
- const sendMessage = (verbal_response: string) => state.messages_append("ei", {
73
+ const sendMessage = (content: string) => state.messages_append("ei", {
78
74
  id: crypto.randomUUID(),
79
75
  role: "system",
80
- verbal_response,
76
+ content,
81
77
  timestamp: now,
82
78
  read: false,
83
79
  context_status: ContextStatus.Default,
@@ -124,3 +120,62 @@ export function handleEiHeartbeat(response: LLMResponse, state: StateManager): v
124
120
  }
125
121
  }
126
122
  }
123
+
124
+ export function handleReflectionCritic(response: LLMResponse, state: StateManager): void {
125
+ const personaId = response.request.data.personaId as string;
126
+ const personaDisplayName = response.request.data.personaDisplayName as string;
127
+
128
+ const result = response.parsed as ReflectionCriticResult | undefined;
129
+ if (!result?.updated_identity || !result.critique) {
130
+ console.error(`[ReflectionCritic ${personaDisplayName}] Invalid or missing parsed result`);
131
+ return;
132
+ }
133
+
134
+ const personRecord = state.human_person_getByIdentifier("Ei Persona", personaId);
135
+ if (personRecord) {
136
+ state.human_person_upsert({
137
+ ...personRecord,
138
+ description: result.updated_identity.long_description,
139
+ });
140
+ console.log(`[ReflectionCritic ${personaDisplayName}] Person record description replaced (was log, now distilled identity)`);
141
+ }
142
+
143
+ const persona = state.persona_getById(personaId);
144
+ if (!persona) {
145
+ console.error(`[ReflectionCritic ${personaDisplayName}] Persona not found after critic`);
146
+ return;
147
+ }
148
+
149
+ const mergedTopics = result.updated_identity.topics.map(updatedTopic => {
150
+ const existing = persona.topics.find(t => t.name === updatedTopic.name);
151
+ return {
152
+ ...updatedTopic,
153
+ sentiment: updatedTopic.sentiment ?? existing?.sentiment ?? 0,
154
+ exposure_current: updatedTopic.exposure_current ?? existing?.exposure_current ?? 0,
155
+ exposure_desired: updatedTopic.exposure_desired ?? existing?.exposure_desired ?? 0.5,
156
+ };
157
+ });
158
+
159
+ const sanitizedTraits = result.updated_identity.traits.map(t => ({
160
+ ...t,
161
+ name: t.name?.trim() ?? "",
162
+ description: t.description?.trim() ?? "",
163
+ }));
164
+
165
+ const sanitizedTopics = mergedTopics.map(t => ({
166
+ ...t,
167
+ name: t.name?.trim() ?? "",
168
+ }));
169
+
170
+ state.persona_update(personaId, {
171
+ pending_update: {
172
+ short_description: result.updated_identity.short_description?.trim() ?? "",
173
+ long_description: result.updated_identity.long_description?.trim() ?? "",
174
+ traits: sanitizedTraits,
175
+ topics: sanitizedTopics,
176
+ critique: result.critique,
177
+ created_at: new Date().toISOString(),
178
+ },
179
+ });
180
+ console.log(`[ReflectionCritic ${personaDisplayName}] pending_update written to persona`);
181
+ }
@@ -7,7 +7,7 @@ import type { PersonIdentifier } from "../types/data-items.js";
7
7
  export type { ResponseHandler } from "./persona-response.js";
8
8
 
9
9
  import { handlePersonaResponse, handleToolContinuation, handleOneShot, handleOneShotJSON } from "./persona-response.js";
10
- import { handleHeartbeatCheck, handleEiHeartbeat } from "./heartbeat.js";
10
+ import { handleHeartbeatCheck, handleEiHeartbeat, handleReflectionCritic } from "./heartbeat.js";
11
11
  import { handlePersonaGeneration, handlePersonaDescriptions, handlePersonaTraitExtraction } from "./persona-generation.js";
12
12
  import {
13
13
  handlePersonaTopicRating,
@@ -90,4 +90,5 @@ export const handlers: Record<LLMNextStep, ResponseHandler> = {
90
90
  handlePersonaPreview,
91
91
  [LLMNextStep.HandlePersonIdentifierMigration]: handlePersonIdentifierMigration,
92
92
  [LLMNextStep.HandleTopicValidate]: handleDedupCurate,
93
+ [LLMNextStep.HandleReflectionCritic]: handleReflectionCritic,
93
94
  };
@@ -76,10 +76,9 @@ export function handlePersonaResponse(response: LLMResponse, state: StateManager
76
76
  return;
77
77
  }
78
78
 
79
- const verbal = result.verbal_response || undefined;
80
- const action = result.action_response || undefined;
79
+ const content = result.content || undefined;
81
80
 
82
- if (!verbal && !action) {
81
+ if (!content) {
83
82
  console.log(`[handlePersonaResponse] ${personaDisplayName} JSON had should_respond=true but no content fields`);
84
83
  return;
85
84
  }
@@ -87,8 +86,7 @@ export function handlePersonaResponse(response: LLMResponse, state: StateManager
87
86
  const message: Message = {
88
87
  id: crypto.randomUUID(),
89
88
  role: "system",
90
- verbal_response: verbal,
91
- action_response: action,
89
+ content,
92
90
  timestamp: new Date().toISOString(),
93
91
  read: false,
94
92
  context_status: ContextStatus.Default,
@@ -81,10 +81,9 @@ export function handleRoomResponse(response: LLMResponse, state: StateManager):
81
81
  return;
82
82
  }
83
83
 
84
- const verbal = result.verbal_response || undefined;
85
- const action = result.action_response || undefined;
84
+ const content = result.content || undefined;
86
85
 
87
- if (!verbal && !action) {
86
+ if (!content) {
88
87
  console.log(`[handleRoomResponse] ${personaDisplayName} returned should_respond=true but no content`);
89
88
  return;
90
89
  }
@@ -94,8 +93,7 @@ export function handleRoomResponse(response: LLMResponse, state: StateManager):
94
93
  parent_id: parentMessageId,
95
94
  role: "persona",
96
95
  persona_id: personaId,
97
- verbal_response: verbal,
98
- action_response: action,
96
+ content,
99
97
  timestamp: now,
100
98
  read: false,
101
99
  context_status: ContextStatus.Default,
@@ -3,9 +3,12 @@ import type { StateManager } from "../state-manager.js";
3
3
 
4
4
  export function getMessageContent(msg: { content?: string; verbal_response?: string; action_response?: string }): string {
5
5
  if (msg.content) return msg.content;
6
+ // Legacy fallback for data not yet migrated on disk
7
+ // TODO(v1.0.0): Remove legacy verbal_response/action_response fallback
8
+ const legacy = msg as { verbal_response?: string; action_response?: string };
6
9
  const parts: string[] = [];
7
- if (msg.action_response) parts.push(`_${msg.action_response}_`);
8
- if (msg.verbal_response) parts.push(msg.verbal_response);
10
+ if (legacy.action_response) parts.push(`_${legacy.action_response}_`);
11
+ if (legacy.verbal_response) parts.push(legacy.verbal_response);
9
12
  return parts.join('\n\n');
10
13
  }
11
14
 
@@ -24,8 +27,6 @@ export function normalizeRoomMessages(messages: RoomMessage[], state: StateManag
24
27
  role: m.role === "human" ? "human" as const : "system" as const,
25
28
  speaker_name: speakerName,
26
29
  content: m.content,
27
- verbal_response: m.verbal_response,
28
- action_response: m.action_response,
29
30
  silence_reason: m.silence_reason,
30
31
  timestamp: m.timestamp,
31
32
  read: m.read,
@@ -16,10 +16,6 @@ import {
16
16
  } from "../prompts/index.js";
17
17
  import { filterMessagesForContext } from "./context-utils.js";
18
18
  import { filterHumanDataByVisibility } from "./prompt-context-builder.js";
19
- import { cosineSimilarity, computePersonaDescriptionEmbedding } from "./embedding-service.js";
20
-
21
- const REFLECTION_SIMILARITY_THRESHOLD = 0.80;
22
- const REFLECTION_COOLDOWN_MS = 7 * 24 * 60 * 60 * 1000; // 1 week between drift prompts
23
19
 
24
20
  // =============================================================================
25
21
  // MODEL HELPERS
@@ -172,6 +168,18 @@ export async function queueEiHeartbeat(
172
168
  });
173
169
  }
174
170
 
171
+ const personasWithPendingUpdate = personas.filter(
172
+ (p) => !p.is_archived && !p.is_paused && p.id !== "ei" && p.pending_update?.critique
173
+ );
174
+ for (const p of personasWithPendingUpdate) {
175
+ items.push({
176
+ id: p.id,
177
+ type: "Persona Reflection Alert",
178
+ persona_name: p.display_name,
179
+ critique: p.pending_update!.critique,
180
+ });
181
+ }
182
+
175
183
  if (items.length === 0) {
176
184
  console.log("[queueEiHeartbeat] No items to address, skipping");
177
185
  return;
@@ -218,10 +226,9 @@ export async function queueHeartbeatCheck(sm: StateManager, personaId: string, i
218
226
  }
219
227
 
220
228
  const filteredHuman = await filterHumanDataByVisibility(human, persona);
221
- const inactiveDays = persona.last_activity
222
- ? Math.floor(
223
- (Date.now() - new Date(persona.last_activity).getTime()) / (1000 * 60 * 60 * 24)
224
- )
229
+ const lastActivity = sm.messages_getLastActivity(persona.id);
230
+ const inactiveDays = lastActivity
231
+ ? Math.floor((Date.now() - lastActivity) / (1000 * 60 * 60 * 24))
225
232
  : 0;
226
233
 
227
234
  const sortByEngagementGap = <T extends { exposure_desired: number; exposure_current: number }>(
@@ -232,49 +239,12 @@ export async function queueHeartbeatCheck(sm: StateManager, personaId: string, i
232
239
  b.exposure_desired - b.exposure_current - (a.exposure_desired - a.exposure_current)
233
240
  );
234
241
 
235
- let driftContext: HeartbeatCheckPromptData["drift_context"];
236
- const personRecord = sm.human_person_getByIdentifier("ei_persona", personaId);
237
-
238
- if (personRecord?.embedding) {
239
- let currentPersona = persona;
240
-
241
- if (!currentPersona.description_embedding) {
242
- const embedding = await computePersonaDescriptionEmbedding(currentPersona);
243
- if (embedding) {
244
- sm.persona_update(personaId, { description_embedding: embedding });
245
- currentPersona = { ...currentPersona, description_embedding: embedding };
246
- }
247
- }
248
-
249
- if (currentPersona.description_embedding) {
250
- const lastAsked = currentPersona.reflection_last_asked
251
- ? new Date(currentPersona.reflection_last_asked).getTime()
252
- : 0;
253
-
254
- // Gate: person must have been updated at least 1 week AFTER reflection was last asked.
255
- // This handles both the cooldown AND the extraction echo — the ceremony extraction
256
- // that fires right after a persona surfaces drift updates last_updated by minutes,
257
- // which can never satisfy the 1-week offset requirement.
258
- if (new Date(personRecord.last_updated).getTime() > lastAsked + REFLECTION_COOLDOWN_MS) {
259
- const similarity = cosineSimilarity(personRecord.embedding, currentPersona.description_embedding);
260
- if (similarity < REFLECTION_SIMILARITY_THRESHOLD) {
261
- driftContext = {
262
- people_description: personRecord.description ?? '',
263
- persona_description: currentPersona.long_description ?? '',
264
- };
265
- console.log(`[HeartbeatCheck ${persona.display_name}] Drift detected (similarity: ${similarity.toFixed(3)}) - including reflection context`);
266
- } else {
267
- console.log(`[HeartbeatCheck ${persona.display_name}] Person updated but no drift (similarity: ${similarity.toFixed(3)})`);
268
- }
269
- }
270
- }
271
- }
272
-
273
242
  const promptData: HeartbeatCheckPromptData = {
274
243
  persona: {
275
244
  name: persona.display_name,
276
245
  traits: persona.traits,
277
246
  topics: persona.topics,
247
+ has_pending_update: !!persona.pending_update,
278
248
  },
279
249
  human: {
280
250
  topics: sortByEngagementGap(filteredHuman.topics).slice(0, 5),
@@ -282,7 +252,6 @@ export async function queueHeartbeatCheck(sm: StateManager, personaId: string, i
282
252
  },
283
253
  recent_history: contextHistory.slice(-10),
284
254
  inactive_days: inactiveDays,
285
- drift_context: driftContext,
286
255
  };
287
256
 
288
257
  const prompt = buildHeartbeatCheckPrompt(promptData);
@@ -151,7 +151,7 @@ export async function sendMessage(
151
151
  const message: Message = {
152
152
  id: crypto.randomUUID(),
153
153
  role: "human",
154
- verbal_response: content ?? undefined,
154
+ content: content ?? undefined,
155
155
  silence_reason: content ? undefined : (silenceReason ?? "passed"),
156
156
  timestamp: new Date().toISOString(),
157
157
  read: false,
@@ -283,8 +283,12 @@ export function fetchMessagesForLLM(
283
283
  const contextWindowHours = persona.context_window_hours ?? human.settings?.default_context_window_hours ?? 8;
284
284
  const filteredHistory = filterMessagesForContext(history, persona.context_boundary, contextWindowHours);
285
285
 
286
+ const humanName = human.settings?.name_display
287
+ || human.facts?.find(f => f.name === "Nickname/Preferred Name")?.description
288
+ || "Human";
289
+
286
290
  return filteredHistory.reduce<import("./types.js").ChatMessage[]>((acc, m) => {
287
- const content = buildChatMessageContent(m);
291
+ const content = buildChatMessageContent(m, humanName);
288
292
  if (content.length > 0) {
289
293
  const finalContent = persona.include_message_timestamps ? `[${formatTimestamp(m.timestamp)}] ${content}` : content;
290
294
  acc.push({
@@ -13,6 +13,10 @@ import {
13
13
  import { queuePersonaTopicRating, type PersonaTopicContext, type PersonaTopicOptions } from "./persona-topics.js";
14
14
  import { queuePersonMigration } from "./person-migration.js";
15
15
  import { buildRewriteScanPrompt, type RewriteItemType } from "../../prompts/ceremony/index.js";
16
+ import { buildReflectionCriticPrompt } from "../../prompts/reflection/index.js";
17
+ import { getModelForPersona } from "../heartbeat-manager.js";
18
+
19
+ const PERSON_LOG_REFLECTION_THRESHOLD = 3000;
16
20
 
17
21
  export function isNewDay(lastCeremony: string | undefined, now: Date): boolean {
18
22
  if (!lastCeremony) return true;
@@ -143,10 +147,8 @@ function queueExposurePhase(personaId: string, state: StateManager, options?: Ex
143
147
  console.log(`[ceremony:exposure] Queued human extraction scans (f:${unextractedFacts.length}, t:${unextractedTopics.length}, p:${unextractedPeople.length})`);
144
148
  }
145
149
 
146
- const human = state.getHuman();
147
- const lastCeremony = human.settings?.ceremony?.last_ceremony;
148
150
  const shortId = personaId.slice(0, 8);
149
- const forPersonaTopics = state.messages_getUnextractedForPersona(personaId, shortId, lastCeremony ?? undefined);
151
+ const forPersonaTopics = state.messages_getUnextractedForPersona(personaId, shortId);
150
152
  if (forPersonaTopics.length > 0) {
151
153
  const personaTopicContext: PersonaTopicContext = {
152
154
  personaId,
@@ -271,7 +273,10 @@ export function handleCeremonyProgress(state: StateManager, lastPhase: number):
271
273
 
272
274
  // Rewrite phase: fire-and-forget scans for bloated human data items
273
275
  queueRewritePhase(state);
274
-
276
+
277
+ // Reflection phase: fire-and-forget critic calls for persona person records above threshold
278
+ queueReflectionPhase(state);
279
+
275
280
  console.log("[ceremony:progress] Ceremony Decay complete");
276
281
  }
277
282
 
@@ -507,3 +512,43 @@ function queueEventSummaryForAll(state: StateManager, options?: ExtractionOption
507
512
  }
508
513
  console.log(`[ceremony:event] Queued event summary scans for ${activePersonas.length} personas (${totalQueued} total chunks)`);
509
514
  }
515
+
516
+ function queueReflectionPhase(state: StateManager): void {
517
+ const personas = state.persona_getAll().filter(p =>
518
+ !p.is_paused && !p.is_archived && !p.is_static && p.id !== "ei"
519
+ );
520
+
521
+ let queued = 0;
522
+ for (const persona of personas) {
523
+ const personRecord = state.human_person_getByIdentifier("Ei Persona", persona.id);
524
+ if (!personRecord || (personRecord.description?.length ?? 0) <= PERSON_LOG_REFLECTION_THRESHOLD) continue;
525
+
526
+ const prompt = buildReflectionCriticPrompt({
527
+ persona_identity: {
528
+ name: persona.display_name,
529
+ long_description: persona.long_description ?? '',
530
+ short_description: persona.short_description ?? '',
531
+ traits: persona.traits,
532
+ topics: persona.topics,
533
+ },
534
+ person_log: personRecord.description ?? '',
535
+ });
536
+
537
+ state.queue_enqueue({
538
+ type: LLMRequestType.JSON,
539
+ priority: LLMPriority.Low,
540
+ system: prompt.system,
541
+ user: prompt.user,
542
+ next_step: LLMNextStep.HandleReflectionCritic,
543
+ model: getModelForPersona(state, persona.id),
544
+ data: { personaId: persona.id, personaDisplayName: persona.display_name },
545
+ });
546
+
547
+ queued++;
548
+ console.log(`[ceremony:reflection] Queued critic for ${persona.display_name} (person log: ${personRecord.description?.length} chars)`);
549
+ }
550
+
551
+ if (queued === 0) {
552
+ console.log("[ceremony:reflection] No persona person records above threshold — skipping");
553
+ }
554
+ }
@@ -659,7 +659,7 @@ export function queueTargetedPersonUpdate(
659
659
  console.warn(`[queueTargetedPersonUpdate] Room ${roomId} not found`);
660
660
  return 0;
661
661
  }
662
- allMessages = normalizeRoomMessages(state.getRoomActivePath(roomId), state);
662
+ allMessages = normalizeRoomMessages(state.getRoomMessages(roomId), state);
663
663
  contextPersonaId = room.persona_ids.join("|");
664
664
  displayName = room.display_name;
665
665
  } else {
@@ -719,7 +719,7 @@ export function queueTargetedTopicUpdate(
719
719
  console.warn(`[queueTargetedTopicUpdate] Room ${roomId} not found`);
720
720
  return 0;
721
721
  }
722
- allMessages = normalizeRoomMessages(state.getRoomActivePath(roomId), state);
722
+ allMessages = normalizeRoomMessages(state.getRoomMessages(roomId), state);
723
723
  contextPersonaId = room.persona_ids.join("|");
724
724
  displayName = room.display_name;
725
725
  } else {
@@ -18,11 +18,11 @@ export async function getPersonaList(sm: StateManager): Promise<PersonaSummary[]
18
18
  is_paused: entity.is_paused,
19
19
  is_archived: entity.is_archived,
20
20
  unread_count: sm.messages_countUnread(entity.id),
21
- last_activity: entity.last_activity,
22
21
  context_boundary: entity.context_boundary,
23
22
  avatar_emoji: entity.avatar_emoji,
24
23
  avatar_image: entity.avatar_image,
25
24
  preferred_theme: entity.preferred_theme,
25
+ has_pending_update: !!entity.pending_update,
26
26
  }));
27
27
  }
28
28
 
@@ -68,7 +68,6 @@ export async function createPersona(
68
68
  is_archived: false,
69
69
  is_static: false,
70
70
  last_updated: now,
71
- last_activity: now,
72
71
  };
73
72
  sm.persona_add(placeholder);
74
73
 
@@ -16,7 +16,13 @@ export interface EnsureAgentPersonaOptions {
16
16
  }
17
17
 
18
18
  export function resolveCanonicalAgent(agentName: string): { canonical: string; aliases: string[] } {
19
- agentName = agentName.replace(/^\p{Z}+|\p{Z}+$/gu, "");
19
+ // Strip Unicode whitespace (\p{Z}) AND zero-width characters (\u200B, \u200C, \u200D, \u2060, \uFEFF).
20
+ // The \u200B strip is not decorative — oh-my-openagent intentionally prefixes agent display
21
+ // names with zero-width spaces (U+200B) as a sort hack to float them to the top of the agent
22
+ // picker list (1 ZWS = Sisyphus, 2 = Hephaestus, 3 = Prometheus, 4 = Atlas). Those prefixed
23
+ // names end up stored verbatim in OpenCode's SQLite message rows and come back to us here.
24
+ // \p{Z} alone does NOT catch \u200B (Unicode category Cf, not Zs), so we must be explicit.
25
+ agentName = agentName.replace(/^[\p{Z}\u200B\u200C\u200D\u2060\uFEFF]+|[\p{Z}\u200B\u200C\u200D\u2060\uFEFF]+$/gu, "");
20
26
  for (const [canonical, variants] of Object.entries(AGENT_ALIASES)) {
21
27
  if (variants.includes(agentName)) {
22
28
  return { canonical, aliases: variants };
@@ -77,7 +83,6 @@ export async function ensureAgentPersona(
77
83
  heartbeat_delay_ms: TWELVE_HOURS_MS,
78
84
  last_heartbeat: now,
79
85
  last_updated: now,
80
- last_activity: now,
81
86
  };
82
87
 
83
88
  stateManager.persona_add(persona);
@@ -265,14 +265,13 @@ export class Processor {
265
265
  id: "ei",
266
266
  display_name: "Ei",
267
267
  last_updated: new Date().toISOString(),
268
- last_activity: new Date().toISOString(),
269
268
  };
270
269
  this.stateManager.persona_add(eiEntity);
271
270
 
272
271
  const welcomeMessage: Message = {
273
272
  id: crypto.randomUUID(),
274
273
  role: "system",
275
- verbal_response: EI_WELCOME_MESSAGE,
274
+ content: EI_WELCOME_MESSAGE,
276
275
  timestamp: new Date().toISOString(),
277
276
  read: false,
278
277
  context_status: ContextStatusEnum.Always,
@@ -629,13 +628,9 @@ export class Processor {
629
628
  type: "boolean",
630
629
  description: "Whether you are responding (true) or staying silent (false)",
631
630
  },
632
- verbal_response: {
631
+ content: {
633
632
  type: "string",
634
- description: "What you say out loud. Required when should_respond is true (unless action_response is provided).",
635
- },
636
- action_response: {
637
- type: "string",
638
- description: "Italicized stage directions only — physical actions, expressions, or internal states. Keep this distinct from verbal_response: do not repeat or paraphrase what you are saying. If you have nothing to physically do, omit this field.",
633
+ description: "Your response in Markdown. Required when should_respond is true. Use _underscores_ for actions or stage directions inline with your text.",
639
634
  },
640
635
  reason: {
641
636
  type: "string",
@@ -1195,9 +1190,7 @@ const toolNextSteps = new Set([
1195
1190
 
1196
1191
  const defaultHeartbeatMs = this.stateManager.getHuman().settings?.default_heartbeat_ms ?? 1800000;
1197
1192
  const heartbeatDelay = persona.heartbeat_delay_ms ?? defaultHeartbeatMs;
1198
- const lastActivity = persona.last_activity
1199
- ? new Date(persona.last_activity).getTime()
1200
- : 0;
1193
+ const lastActivity = this.stateManager.messages_getLastActivity(persona.id);
1201
1194
  const timeSinceActivity = now - lastActivity;
1202
1195
 
1203
1196
  if (timeSinceActivity >= heartbeatDelay) {
@@ -1437,7 +1430,7 @@ const toolNextSteps = new Set([
1437
1430
  .filter((m: RoomMessage) => m.role === "persona" && getMessageContent(m))
1438
1431
  .map((m: RoomMessage) => ({
1439
1432
  name: this.stateManager.persona_getById(m.persona_id ?? "")?.display_name ?? "Participant",
1440
- verbal_response: getMessageContent(m),
1433
+ content: getMessageContent(m),
1441
1434
  }));
1442
1435
 
1443
1436
  if (siblings.length === 0) return request;
@@ -241,12 +241,21 @@ export async function buildResponsePromptData(
241
241
  ? Date.now() - new Date(previousMessage.timestamp).getTime()
242
242
  : 0;
243
243
 
244
+ const alwaysMessages = sm.messages_getAlways(persona.id);
245
+ const temporalAnchors = alwaysMessages.map(m => ({
246
+ role: m.role === "human" ? "human" as const : "system" as const,
247
+ content: m.content,
248
+ silence_reason: m.silence_reason,
249
+ timestamp: m.timestamp,
250
+ _synthesis: m._synthesis,
251
+ }));
252
+
244
253
  return {
245
254
  persona: {
246
255
  name: persona.display_name,
247
256
  aliases: persona.aliases ?? [],
248
257
  short_description: persona.short_description,
249
- long_description: persona.long_description,
258
+ long_description: persona.long_description,
250
259
  traits: persona.traits,
251
260
  topics: persona.topics,
252
261
  interested_topics: persona.topics.filter(t => t.exposure_desired - t.exposure_current > 0.2),
@@ -254,6 +263,7 @@ export async function buildResponsePromptData(
254
263
  },
255
264
  human: filteredHuman,
256
265
  visible_personas: visiblePersonas,
266
+ temporal_anchors: temporalAnchors,
257
267
  delay_ms: delayMs,
258
268
  isTUI,
259
269
  tools,
@@ -216,7 +216,7 @@ export class QueueProcessor {
216
216
  const submitCall = findSubmitToolCall(toolCalls, activeTools);
217
217
  if (submitCall) {
218
218
  const args = submitCall.arguments ?? {};
219
- if (!args.should_respond && (args.verbal_response || args.action_response)) {
219
+ if (!args.should_respond && args.content) {
220
220
  args.should_respond = true;
221
221
  }
222
222
  console.log(`[QueueProcessor] submit tool "${submitCall.name}" called — returning arguments as parsed response`);
@@ -332,7 +332,7 @@ export class QueueProcessor {
332
332
  const submitCall = findSubmitToolCall(toolCalls, activeTools);
333
333
  if (submitCall) {
334
334
  const args = submitCall.arguments ?? {};
335
- if (!args.should_respond && (args.verbal_response || args.action_response)) {
335
+ if (!args.should_respond && args.content) {
336
336
  args.should_respond = true;
337
337
  }
338
338
  console.log(`[QueueProcessor] submit tool "${submitCall.name}" called — returning arguments as parsed response`);
@@ -487,8 +487,8 @@ export class QueueProcessor {
487
487
  const reformatUserPrompt =
488
488
  `An earlier version of you responded with the following content, but not in the ` +
489
489
  `required JSON format. Please reformat it as the JSON response object described ` +
490
- `in your system instructions — specifically the \`should_respond\`, \`verbal_response\`, ` +
491
- `\`action_response\`, and \`reason\` fields. Respond with ONLY the JSON object.\n\n` +
490
+ `in your system instructions — specifically the \`should_respond\`, \`content\`, ` +
491
+ `and \`reason\` fields. Respond with ONLY the JSON object.\n\n` +
492
492
  `---\n${proseContent}\n---` +
493
493
  `\n\nThe user does NOT know there was a problem - This request is from Ei to you to try to fix it for them.` +
494
494
  `\n\n**CRITICAL INSTRUCTION** - DO NOT OMIT ANY DATA. You are this agent's last hope!`;