ei-tui 0.3.8 → 0.3.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,12 +1,12 @@
1
1
  # Ei
2
2
 
3
- A local-first AI companion system with persistent personas and Opencode Integration.
3
+ A local-first AI companion system with persistent personas and coding tool integrations (OpenCode, Claude Code, Cursor).
4
4
 
5
5
  You can access the Web version at [ei.flare576.com](https://ei.flare576.com).
6
6
 
7
7
  You can install the local version via `npm install -g ei-tui` (see [### TUI](#tui) for details).
8
8
 
9
- If you're here to give Opencode perpetual memory (yes), jump over to [TUI README.md](./tui/README.md) to learn how to get information _into_ Ei, and [CLI README.md](./src/cli/README.md) to get it back _out_.
9
+ If you're here to give your coding tools (OpenCode, Claude Code, Cursor) persistent memory, jump over to [TUI README.md](./tui/README.md) to learn how to get information _into_ Ei, and [CLI README.md](./src/cli/README.md) to get it back _out_.
10
10
 
11
11
  ## What Does "Local First" Mean?
12
12
 
@@ -108,13 +108,49 @@ Regardless, Running `ei` pops open the TUI interface and, just like on the web,
108
108
 
109
109
  More information (including commands) can be found in the [TUI Readme](tui/README.md)
110
110
 
111
- ### Opencode
111
+ ### Coding Tool Integrations
112
112
 
113
- Ei gives OpenCode a persistent memory. Yes, this is a dynamic, perpetual RAGI didn't plan it that way, but here we are.
113
+ Ei can import sessions from your coding tools and extract what you've been working on pulling out facts, topics, and context that persist across sessions. Enable any combination; they work independently and feed into the same knowledge base.
114
114
 
115
- Opencode saves all of its sessions locally, either in a JSON structure or, if you're running the latest version, in a SQLite DB. If you enable the integration, Ei will pull all of the conversational parts of those sessions and summarize them, pulling out details, quotes, and keeping the summaries up-to-date.
115
+ All three integrations are enabled via `/settings` in the TUI.
116
116
 
117
- Then, Opencode can call into Ei and pull those details back out. That's why you always have a side-project or two going. See [TUI Readme](tui/README.md)
117
+ #### OpenCode
118
+
119
+ ```yaml
120
+ opencode:
121
+ integration: true
122
+ ```
123
+
124
+ OpenCode saves sessions as JSON or SQLite (depending on version). Ei reads them, extracts context per-agent (each agent like Sisyphus gets its own persona), and keeps everything current as sessions accumulate.
125
+
126
+ OpenCode can also *read* Ei's knowledge back out via the [CLI tool](src/cli/README.md) — making it a dynamic, perpetual RAG. That's why it always has context from your other projects.
127
+
128
+ #### Claude Code
129
+
130
+ ```yaml
131
+ claudeCode:
132
+ integration: true
133
+ ```
134
+
135
+ Reads from `~/.claude/projects/` (JSONL session files). All sessions map to a single "Claude Code" persona. Tool calls, thinking blocks, and internal plumbing are stripped — only the conversational content is imported.
136
+
137
+ #### Cursor
138
+
139
+ ```yaml
140
+ cursor:
141
+ integration: true
142
+ ```
143
+
144
+ Reads from Cursor's SQLite databases:
145
+ - **macOS**: `~/Library/Application Support/Cursor/User/`
146
+ - **Windows**: `%APPDATA%\Cursor\User\`
147
+ - **Linux**: `~/.config/Cursor/User/`
148
+
149
+ All sessions map to a single "Cursor" persona.
150
+
151
+ ---
152
+
153
+ Sessions are processed oldest-first, one per queue cycle, so Ei won't overwhelm your LLM provider on first run. See [TUI Readme](tui/README.md)
118
154
 
119
155
  ## Built-in Tool Integrations
120
156
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ei-tui",
3
- "version": "0.3.8",
3
+ "version": "0.3.9",
4
4
  "author": "Flare576",
5
5
  "repository": {
6
6
  "type": "git",
@@ -16,8 +16,6 @@ import {
16
16
  import { filterMessagesForContext } from "./context-utils.js";
17
17
  import { filterHumanDataByVisibility } from "./prompt-context-builder.js";
18
18
 
19
- const DEFAULT_CONTEXT_WINDOW_HOURS = 8;
20
-
21
19
  // =============================================================================
22
20
  // MODEL HELPERS
23
21
  // =============================================================================
@@ -189,9 +187,9 @@ export async function queueHeartbeatCheck(sm: StateManager, personaId: string, i
189
187
  const model = getModelForPersona(sm, personaId);
190
188
  console.log(`[HeartbeatCheck ${persona.display_name}] Queueing heartbeat check (model: ${model})`);
191
189
  const human = sm.getHuman();
192
- const history = sm.messages_get(personaId);
193
- const contextWindowHours = persona.context_window_hours ?? DEFAULT_CONTEXT_WINDOW_HOURS;
194
- const contextHistory = filterMessagesForContext(history, persona.context_boundary, contextWindowHours);
190
+ const history = sm.messages_get(personaId);
191
+ const contextWindowHours = persona.context_window_hours ?? human.settings?.default_context_window_hours ?? 8;
192
+ const contextHistory = filterMessagesForContext(history, persona.context_boundary, contextWindowHours);
195
193
 
196
194
  if (personaId === "ei") {
197
195
  await queueEiHeartbeat(sm, human, contextHistory, isTUI);
@@ -24,8 +24,6 @@ import {
24
24
  import { buildChatMessageContent } from "../prompts/message-utils.js";
25
25
  import { filterMessagesForContext } from "./context-utils.js";
26
26
 
27
- const DEFAULT_CONTEXT_WINDOW_HOURS = 8;
28
-
29
27
  // =============================================================================
30
28
  // MESSAGE QUERIES
31
29
  // =============================================================================
@@ -270,15 +268,16 @@ export function checkAndQueueHumanExtraction(
270
268
  // =============================================================================
271
269
 
272
270
  export function fetchMessagesForLLM(
273
- sm: StateManager,
274
- personaId: string
275
- ): import("./types.js").ChatMessage[] {
276
- const persona = sm.persona_getById(personaId);
277
- if (!persona) return [];
278
-
279
- const history = sm.messages_get(personaId);
280
- const contextWindowHours = persona.context_window_hours ?? DEFAULT_CONTEXT_WINDOW_HOURS;
281
- const filteredHistory = filterMessagesForContext(history, persona.context_boundary, contextWindowHours);
271
+ sm: StateManager,
272
+ personaId: string
273
+ ): import("./types.js").ChatMessage[] {
274
+ const persona = sm.persona_getById(personaId);
275
+ if (!persona) return [];
276
+
277
+ const human = sm.getHuman();
278
+ const history = sm.messages_get(personaId);
279
+ const contextWindowHours = persona.context_window_hours ?? human.settings?.default_context_window_hours ?? 8;
280
+ const filteredHistory = filterMessagesForContext(history, persona.context_boundary, contextWindowHours);
282
281
 
283
282
  return filteredHistory.reduce<import("./types.js").ChatMessage[]>((acc, m) => {
284
283
  const content = buildChatMessageContent(m);
@@ -1,4 +1,4 @@
1
- import { LLMRequestType, LLMPriority, LLMNextStep, MESSAGE_MIN_COUNT, MESSAGE_MAX_AGE_DAYS, type CeremonyConfig, type PersonaTopic, type Topic, type Message, type DataItemBase } from "../types.js";
1
+ import { LLMRequestType, LLMPriority, LLMNextStep, type CeremonyConfig, type PersonaTopic, type Topic, type Message, type DataItemBase } from "../types.js";
2
2
  import type { StateManager } from "../state-manager.js";
3
3
  import { applyDecayToValue } from "../utils/index.js";
4
4
  import {
@@ -309,14 +309,17 @@ export function prunePersonaMessages(personaId: string, state: StateManager): vo
309
309
  // Sort first — injected messages (session update, archive scan) may be out of order.
310
310
  state.messages_sort(personaId);
311
311
  const messages = state.messages_get(personaId);
312
- if (messages.length <= MESSAGE_MIN_COUNT) return;
312
+ const human = state.getHuman();
313
+ const minCount = human.settings?.message_min_count ?? 200;
314
+ const maxAgeDays = human.settings?.message_max_age_days ?? 14;
315
+ if (messages.length <= minCount) return;
313
316
 
314
- const cutoffMs = Date.now() - (MESSAGE_MAX_AGE_DAYS * 24 * 60 * 60 * 1000);
317
+ const cutoffMs = Date.now() - (maxAgeDays * 24 * 60 * 60 * 1000);
315
318
 
316
319
  // Messages are sorted by timestamp (oldest first from messages_sort)
317
320
  const toRemove: string[] = [];
318
321
  for (const m of messages) {
319
- if (messages.length - toRemove.length <= MESSAGE_MIN_COUNT) break;
322
+ if (messages.length - toRemove.length <= minCount) break;
320
323
 
321
324
  const msgMs = new Date(m.timestamp).getTime();
322
325
  if (msgMs >= cutoffMs) break; // Sorted by time, no more old ones
@@ -105,7 +105,6 @@ import {
105
105
  } from "./queue-manager.js";
106
106
 
107
107
  const DEFAULT_LOOP_INTERVAL_MS = 100;
108
- const DEFAULT_CONTEXT_WINDOW_HOURS = 8;
109
108
  const DEFAULT_OPENCODE_POLLING_MS = 1800000;
110
109
  const DEFAULT_CLAUDE_CODE_POLLING_MS = 1800000;
111
110
  const DEFAULT_CURSOR_POLLING_MS = 1800000;
@@ -679,6 +678,26 @@ export class Processor {
679
678
  modified = true;
680
679
  }
681
680
 
681
+ if (human.settings.default_heartbeat_ms == null) {
682
+ human.settings.default_heartbeat_ms = 1800000;
683
+ modified = true;
684
+ }
685
+
686
+ if (human.settings.default_context_window_hours == null) {
687
+ human.settings.default_context_window_hours = 8;
688
+ modified = true;
689
+ }
690
+
691
+ if (human.settings.message_min_count == null) {
692
+ human.settings.message_min_count = 200;
693
+ modified = true;
694
+ }
695
+
696
+ if (human.settings.message_max_age_days == null) {
697
+ human.settings.message_max_age_days = 14;
698
+ modified = true;
699
+ }
700
+
682
701
  if (modified) {
683
702
  this.stateManager.setHuman(human);
684
703
  console.log(`[Processor] Seeded missing settings`);
@@ -881,7 +900,6 @@ const toolNextSteps = new Set([
881
900
 
882
901
  private async checkScheduledTasks(): Promise<void> {
883
902
  const now = Date.now();
884
- const DEFAULT_HEARTBEAT_DELAY_MS = 1800000;
885
903
 
886
904
  const human = this.stateManager.getHuman();
887
905
 
@@ -926,7 +944,8 @@ const toolNextSteps = new Set([
926
944
  for (const persona of this.stateManager.persona_getAll()) {
927
945
  if (persona.is_paused || persona.is_archived) continue;
928
946
 
929
- const heartbeatDelay = persona.heartbeat_delay_ms ?? DEFAULT_HEARTBEAT_DELAY_MS;
947
+ const defaultHeartbeatMs = this.stateManager.getHuman().settings?.default_heartbeat_ms ?? 1800000;
948
+ const heartbeatDelay = persona.heartbeat_delay_ms ?? defaultHeartbeatMs;
930
949
  const lastActivity = persona.last_activity
931
950
  ? new Date(persona.last_activity).getTime()
932
951
  : 0;
@@ -939,9 +958,11 @@ const toolNextSteps = new Set([
939
958
  const timeSinceHeartbeat = now - lastHeartbeat;
940
959
 
941
960
  if (timeSinceHeartbeat >= heartbeatDelay) {
942
- const history = this.stateManager.messages_get(persona.id);
943
- const contextWindowHours =
944
- persona.context_window_hours ?? DEFAULT_CONTEXT_WINDOW_HOURS;
961
+ const history = this.stateManager.messages_get(persona.id);
962
+ const contextWindowHours =
963
+ persona.context_window_hours
964
+ ?? this.stateManager.getHuman().settings?.default_context_window_hours
965
+ ?? 8;
945
966
  const contextHistory = filterMessagesForContext(
946
967
  history,
947
968
  persona.context_boundary,
@@ -80,6 +80,10 @@ export interface HumanSettings {
80
80
  skip_quote_delete_confirm?: boolean;
81
81
  name_display?: string;
82
82
  time_mode?: "24h" | "12h" | "local" | "utc";
83
+ default_heartbeat_ms?: number;
84
+ default_context_window_hours?: number;
85
+ message_min_count?: number;
86
+ message_max_age_days?: number;
83
87
  accounts?: ProviderAccount[];
84
88
  sync?: SyncCredentials;
85
89
  opencode?: OpenCodeSettings;
@@ -147,10 +151,6 @@ export interface PersonaCreationInput {
147
151
  // Steps - "57:3"."inputs"."steps"
148
152
  // Cfg - "57:3"."inputs"."cfg"
149
153
  export const COMFY_PROMPT_TEMPLATE = {"9":{"inputs":{"filename_prefix":"z-image-turbo","images":["57:8",0]},"class_type":"SaveImage","_meta":{"title":"Save Image"}},"57:30":{"inputs":{"clip_name":"qwen_3_4b.safetensors","type":"lumina2","device":"default"},"class_type":"CLIPLoader","_meta":{"title":"Load CLIP"}},"57:29":{"inputs":{"vae_name":"ae.safetensors"},"class_type":"VAELoader","_meta":{"title":"Load VAE"}},"57:33":{"inputs":{"conditioning":["57:27",0]},"class_type":"ConditioningZeroOut","_meta":{"title":"ConditioningZeroOut"}},"57:8":{"inputs":{"samples":["57:3",0],"vae":["57:29",0]},"class_type":"VAEDecode","_meta":{"title":"VAE Decode"}},"57:28":{"inputs":{"unet_name":"z_image_turbo_bf16.safetensors","weight_dtype":"default"},"class_type":"UNETLoader","_meta":{"title":"Load Diffusion Model"}},"57:27":{"inputs":{"text":"This is a test prompt","clip":["57:30",0]},"class_type":"CLIPTextEncode","_meta":{"title":"CLIP Text Encode (Prompt)"}},"57:13":{"inputs":{"width":768,"height":768,"batch_size":1},"class_type":"EmptySD3LatentImage","_meta":{"title":"EmptySD3LatentImage"}},"57:11":{"inputs":{"shift":3,"model":["57:28",0]},"class_type":"ModelSamplingAuraFlow","_meta":{"title":"ModelSamplingAuraFlow"}},"57:3":{"inputs":{"seed":407776369182481,"steps":8,"cfg":1,"sampler_name":"res_multistep","scheduler":"simple","denoise":1,"model":["57:11",0],"positive":["57:27",0],"negative":["57:33",0],"latent_image":["57:13",0]},"class_type":"KSampler","_meta":{"title":"KSampler"}}};
150
- // Message pruning thresholds (shared by ceremony and import)
151
- export const MESSAGE_MIN_COUNT = 200;
152
- export const MESSAGE_MAX_AGE_DAYS = 14;
153
-
154
154
  // DLQ rolloff thresholds
155
155
  export const DLQ_MAX_COUNT = 50;
156
156
  export const DLQ_MAX_AGE_DAYS = 14;
package/tui/README.md CHANGED
@@ -2,7 +2,21 @@
2
2
 
3
3
  Ei TUI is built with OpenTUI and SolidJS.
4
4
 
5
- OpenCode integration: import via `/settings` (`opencode.integration: true`) · export via [CLI](../src/cli/README.md)
5
+ Coding tool integrations (OpenCode, Claude Code, Cursor): enable via `/settings` · export data via [CLI](../src/cli/README.md)
6
+
7
+ ## Coding Tool Integrations
8
+
9
+ Enable any or all three in `/settings`. They work independently and feed into the same knowledge base.
10
+
11
+ | Tool | Settings key | Session data location |
12
+ |------|-------------|----------------------|
13
+ | OpenCode | `opencode.integration: true` | OpenCode's local SQLite / JSON session store |
14
+ | Claude Code | `claudeCode.integration: true` | `~/.claude/projects/` (JSONL files) |
15
+ | Cursor | `cursor.integration: true` | `~/Library/Application Support/Cursor/User/` (macOS)<br>`%APPDATA%\Cursor\User\` (Windows)<br>`~/.config/Cursor/User/` (Linux) |
16
+
17
+ Sessions are processed oldest-first, one per queue cycle. On first run Ei works through your backlog gradually — it won't flood your LLM provider.
18
+
19
+ OpenCode also supports reading Ei's extracted knowledge back out via the [CLI tool](../src/cli/README.md), giving it persistent memory across sessions.
6
20
 
7
21
  # Installation
8
22
 
@@ -504,11 +504,16 @@ interface EditableSettingsData {
504
504
  rewrite_model?: string | null;
505
505
  time_mode?: "24h" | "12h" | "local" | "utc" | null;
506
506
  name_display?: string | null;
507
+ default_heartbeat_ms?: number | null;
508
+ default_context_window_hours?: number | null;
509
+ message_min_count?: number | null;
510
+ message_max_age_days?: number | null;
507
511
  ceremony?: {
508
512
  time: string;
509
513
  decay_rate?: number | null;
510
514
  explore_threshold?: number | null;
511
515
  dedup_threshold?: number | null;
516
+ event_window_hours?: number | null;
512
517
  };
513
518
  opencode?: {
514
519
  integration?: boolean | null;
@@ -547,11 +552,16 @@ export function settingsToYAML(settings: HumanSettings | undefined): string {
547
552
  rewrite_model: settings?.rewrite_model ?? null,
548
553
  time_mode: settings?.time_mode ?? null,
549
554
  name_display: settings?.name_display ?? null,
555
+ default_heartbeat_ms: settings?.default_heartbeat_ms ?? 1800000,
556
+ default_context_window_hours: settings?.default_context_window_hours ?? 8,
557
+ message_min_count: settings?.message_min_count ?? 200,
558
+ message_max_age_days: settings?.message_max_age_days ?? 14,
550
559
  ceremony: {
551
560
  time: settings?.ceremony?.time ?? "09:00",
552
561
  decay_rate: settings?.ceremony?.decay_rate ?? null,
553
562
  explore_threshold: settings?.ceremony?.explore_threshold ?? null,
554
563
  dedup_threshold: settings?.ceremony?.dedup_threshold ?? null,
564
+ event_window_hours: settings?.ceremony?.event_window_hours ?? null,
555
565
  },
556
566
  opencode: {
557
567
  integration: settings?.opencode?.integration ?? false,
@@ -603,6 +613,7 @@ export function settingsFromYAML(yamlContent: string, original: HumanSettings |
603
613
  decay_rate: nullToUndefined(data.ceremony.decay_rate),
604
614
  explore_threshold: nullToUndefined(data.ceremony.explore_threshold),
605
615
  dedup_threshold: nullToUndefined(data.ceremony.dedup_threshold),
616
+ event_window_hours: nullToUndefined(data.ceremony.event_window_hours),
606
617
  last_ceremony: original?.ceremony?.last_ceremony,
607
618
  };
608
619
  }
@@ -669,6 +680,10 @@ export function settingsFromYAML(yamlContent: string, original: HumanSettings |
669
680
  rewrite_model: nullToUndefined(data.rewrite_model),
670
681
  time_mode: nullToUndefined(data.time_mode),
671
682
  name_display: nullToUndefined(data.name_display),
683
+ default_heartbeat_ms: nullToUndefined(data.default_heartbeat_ms),
684
+ default_context_window_hours: nullToUndefined(data.default_context_window_hours),
685
+ message_min_count: nullToUndefined(data.message_min_count),
686
+ message_max_age_days: nullToUndefined(data.message_max_age_days),
672
687
  ceremony,
673
688
  opencode,
674
689
  claudeCode,