@tjamescouch/gro 1.3.6 → 1.3.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/drivers/anthropic.js +256 -0
  2. package/dist/drivers/index.js +2 -0
  3. package/dist/drivers/streaming-openai.js +262 -0
  4. package/dist/drivers/types.js +1 -0
  5. package/dist/errors.js +79 -0
  6. package/dist/logger.js +30 -0
  7. package/dist/main.js +867 -0
  8. package/dist/mcp/client.js +130 -0
  9. package/dist/mcp/index.js +1 -0
  10. package/dist/memory/advanced-memory.js +210 -0
  11. package/dist/memory/agent-memory.js +52 -0
  12. package/dist/memory/agenthnsw.js +86 -0
  13. package/{src/memory/index.ts → dist/memory/index.js} +0 -1
  14. package/dist/memory/simple-memory.js +34 -0
  15. package/dist/memory/vector-index.js +7 -0
  16. package/dist/package.json +22 -0
  17. package/dist/session.js +154 -0
  18. package/dist/tools/agentpatch.js +91 -0
  19. package/dist/tools/bash.js +61 -0
  20. package/dist/tools/version.js +76 -0
  21. package/dist/utils/rate-limiter.js +46 -0
  22. package/{src/utils/retry.ts → dist/utils/retry.js} +8 -12
  23. package/dist/utils/timed-fetch.js +25 -0
  24. package/package.json +11 -3
  25. package/.github/workflows/ci.yml +0 -20
  26. package/src/drivers/anthropic.ts +0 -281
  27. package/src/drivers/index.ts +0 -5
  28. package/src/drivers/streaming-openai.ts +0 -258
  29. package/src/drivers/types.ts +0 -39
  30. package/src/errors.ts +0 -97
  31. package/src/logger.ts +0 -28
  32. package/src/main.ts +0 -905
  33. package/src/mcp/client.ts +0 -163
  34. package/src/mcp/index.ts +0 -2
  35. package/src/memory/advanced-memory.ts +0 -263
  36. package/src/memory/agent-memory.ts +0 -61
  37. package/src/memory/agenthnsw.ts +0 -122
  38. package/src/memory/simple-memory.ts +0 -41
  39. package/src/memory/vector-index.ts +0 -30
  40. package/src/session.ts +0 -150
  41. package/src/tools/agentpatch.ts +0 -89
  42. package/src/tools/bash.ts +0 -61
  43. package/src/tools/version.ts +0 -98
  44. package/src/utils/rate-limiter.ts +0 -60
  45. package/src/utils/timed-fetch.ts +0 -29
  46. package/tests/errors.test.ts +0 -246
  47. package/tests/memory.test.ts +0 -186
  48. package/tests/rate-limiter.test.ts +0 -76
  49. package/tests/retry.test.ts +0 -138
  50. package/tests/timed-fetch.test.ts +0 -104
  51. package/tsconfig.json +0 -13
@@ -0,0 +1,130 @@
1
+ /**
2
+ * MCP client — connects to MCP servers, discovers tools, routes tool calls.
3
+ * Compatible with Claude Code's ~/.claude/settings.json mcpServers config.
4
+ */
5
+ import { Client } from "@modelcontextprotocol/sdk/client/index.js";
6
+ import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js";
7
+ import { Logger } from "../logger.js";
8
+ import { groError, asError, errorLogFields } from "../errors.js";
9
+ export class McpManager {
10
+ constructor() {
11
+ this.servers = new Map();
12
+ }
13
+ /**
14
+ * Connect to all configured MCP servers and discover their tools.
15
+ */
16
+ async connectAll(configs) {
17
+ const entries = Object.entries(configs);
18
+ if (entries.length === 0)
19
+ return;
20
+ Logger.debug(`Connecting to ${entries.length} MCP server(s)...`);
21
+ await Promise.all(entries.map(([name, cfg]) => this.connectOne(name, cfg).catch((e) => {
22
+ const ge = groError("mcp_error", `MCP server "${name}" failed to connect: ${asError(e).message}`, {
23
+ retryable: true,
24
+ cause: e,
25
+ });
26
+ Logger.warn(ge.message, errorLogFields(ge));
27
+ })));
28
+ }
29
+ async connectOne(name, cfg) {
30
+ const transport = new StdioClientTransport({
31
+ command: cfg.command,
32
+ args: cfg.args ?? [],
33
+ env: { ...process.env, ...cfg.env },
34
+ cwd: cfg.cwd,
35
+ stderr: "pipe",
36
+ });
37
+ const client = new Client({ name: "gro", version: "0.2.0" }, { capabilities: {} });
38
+ await client.connect(transport);
39
+ // Discover tools
40
+ const toolsResult = await client.listTools();
41
+ const tools = (toolsResult.tools ?? []).map((t) => ({
42
+ name: t.name,
43
+ description: t.description,
44
+ inputSchema: t.inputSchema,
45
+ serverName: name,
46
+ }));
47
+ this.servers.set(name, { name, client, transport, tools });
48
+ Logger.debug(`MCP "${name}": ${tools.length} tool(s) available`);
49
+ }
50
+ /**
51
+ * Get all discovered tools in OpenAI function-calling format.
52
+ */
53
+ getToolDefinitions() {
54
+ const defs = [];
55
+ for (const server of this.servers.values()) {
56
+ for (const tool of server.tools) {
57
+ defs.push({
58
+ type: "function",
59
+ function: {
60
+ name: tool.name,
61
+ description: tool.description ?? "",
62
+ parameters: tool.inputSchema ?? { type: "object", properties: {} },
63
+ },
64
+ });
65
+ }
66
+ }
67
+ return defs;
68
+ }
69
+ /**
70
+ * Execute a tool call by routing it to the correct MCP server.
71
+ */
72
+ async callTool(name, args) {
73
+ // Find which server provides this tool
74
+ for (const server of this.servers.values()) {
75
+ const tool = server.tools.find(t => t.name === name);
76
+ if (tool) {
77
+ try {
78
+ const result = await server.client.callTool({ name, arguments: args }, undefined, { timeout: 5 * 60 * 1000 });
79
+ // Extract text content from result
80
+ if (Array.isArray(result.content)) {
81
+ return result.content
82
+ .map((c) => {
83
+ if (c.type === "text")
84
+ return c.text;
85
+ return JSON.stringify(c);
86
+ })
87
+ .join("\n");
88
+ }
89
+ return JSON.stringify(result);
90
+ }
91
+ catch (e) {
92
+ const err = asError(e);
93
+ const ge = groError("mcp_error", `MCP tool "${name}" (server: ${server.name}) failed: ${err.message}`, {
94
+ retryable: true,
95
+ cause: e,
96
+ });
97
+ Logger.error(`MCP tool call failed [${server.name}/${name}]:`, errorLogFields(ge));
98
+ throw ge;
99
+ }
100
+ }
101
+ }
102
+ const ge = groError("mcp_error", `No MCP server provides tool "${name}"`, { retryable: false });
103
+ Logger.error(ge.message, errorLogFields(ge));
104
+ throw ge;
105
+ }
106
+ /**
107
+ * Check if a tool name is provided by any connected MCP server.
108
+ */
109
+ hasTool(name) {
110
+ for (const server of this.servers.values()) {
111
+ if (server.tools.some(t => t.name === name))
112
+ return true;
113
+ }
114
+ return false;
115
+ }
116
+ /**
117
+ * Disconnect all MCP servers.
118
+ */
119
+ async disconnectAll() {
120
+ for (const server of this.servers.values()) {
121
+ try {
122
+ await server.client.close();
123
+ }
124
+ catch (e) {
125
+ Logger.debug(`MCP server "${server.name}" close error: ${asError(e).message}`);
126
+ }
127
+ }
128
+ this.servers.clear();
129
+ }
130
+ }
@@ -0,0 +1 @@
1
+ export { McpManager } from "./client.js";
@@ -0,0 +1,210 @@
1
+ import { AgentMemory } from "./agent-memory.js";
2
+ import { saveSession, loadSession, ensureGroDir } from "../session.js";
3
+ /**
4
+ * AdvancedMemory — swim-lane summarization with token budgeting.
5
+ *
6
+ * Maintains three lanes (assistant / system / user) and summarizes independently.
7
+ * Uses character-based token estimation with high/low watermark hysteresis.
8
+ * Background summarization never blocks the caller.
9
+ */
10
+ export class AdvancedMemory extends AgentMemory {
11
+ constructor(args) {
12
+ super(args.systemPrompt);
13
+ this.driver = args.driver;
14
+ this.model = args.model;
15
+ this.summarizerDriver = args.summarizerDriver ?? args.driver;
16
+ this.summarizerModel = args.summarizerModel ?? args.model;
17
+ this.contextTokens = Math.max(2048, Math.floor(args.contextTokens ?? 8192));
18
+ this.reserveHeaderTokens = Math.max(0, Math.floor(args.reserveHeaderTokens ?? 1200));
19
+ this.reserveResponseTokens = Math.max(0, Math.floor(args.reserveResponseTokens ?? 800));
20
+ this.highRatio = Math.min(0.95, Math.max(0.55, args.highRatio ?? 0.70));
21
+ this.lowRatio = Math.min(this.highRatio - 0.05, Math.max(0.35, args.lowRatio ?? 0.50));
22
+ this.summaryRatio = Math.min(0.50, Math.max(0.15, args.summaryRatio ?? 0.35));
23
+ this.avgCharsPerToken = Math.max(1.5, Number(args.avgCharsPerToken ?? 4));
24
+ this.keepRecentPerLane = Math.max(1, Math.floor(args.keepRecentPerLane ?? 4));
25
+ this.keepRecentTools = Math.max(0, Math.floor(args.keepRecentTools ?? 3));
26
+ }
27
+ async load(id) {
28
+ const session = loadSession(id);
29
+ if (session) {
30
+ this.messagesBuffer = session.messages;
31
+ }
32
+ }
33
+ async save(id) {
34
+ ensureGroDir();
35
+ saveSession(id, this.messagesBuffer, {
36
+ id,
37
+ provider: "unknown",
38
+ model: this.model,
39
+ createdAt: new Date().toISOString(),
40
+ });
41
+ }
42
+ async onAfterAdd() {
43
+ const budget = this.budgetTokens();
44
+ const estTok = this.estimateTokens(this.messagesBuffer);
45
+ if (estTok <= Math.floor(this.highRatio * budget))
46
+ return;
47
+ await this.runOnce(async () => {
48
+ const budget2 = this.budgetTokens();
49
+ const estTok2 = this.estimateTokens(this.messagesBuffer);
50
+ if (estTok2 <= Math.floor(this.highRatio * budget2))
51
+ return;
52
+ const { firstSystemIndex, assistant, user, system, tool, other } = this.partition();
53
+ const tailN = this.keepRecentPerLane;
54
+ const olderAssistant = assistant.slice(0, Math.max(0, assistant.length - tailN));
55
+ const keepAssistant = assistant.slice(Math.max(0, assistant.length - tailN));
56
+ const sysHead = firstSystemIndex === 0 ? [this.messagesBuffer[0]] : [];
57
+ const remainingSystem = firstSystemIndex === 0 ? system.slice(1) : system.slice(0);
58
+ const olderSystem = remainingSystem.slice(0, Math.max(0, remainingSystem.length - tailN));
59
+ const keepSystem = remainingSystem.slice(Math.max(0, remainingSystem.length - tailN));
60
+ const olderUser = user.slice(0, Math.max(0, user.length - tailN));
61
+ const keepUser = user.slice(Math.max(0, user.length - tailN));
62
+ const keepTools = tool.slice(Math.max(0, tool.length - this.keepRecentTools));
63
+ const preserved = [
64
+ ...sysHead, ...keepAssistant, ...keepSystem, ...keepUser, ...keepTools, ...other,
65
+ ];
66
+ const preservedTok = this.estimateTokens(preserved);
67
+ const lowTarget = Math.floor(this.lowRatio * budget2);
68
+ const maxSummaryTok = Math.floor(this.summaryRatio * budget2);
69
+ if (preservedTok <= lowTarget) {
70
+ const rebuilt = this.ordered([], sysHead, keepAssistant, keepSystem, keepUser, keepTools, other);
71
+ this.messagesBuffer.splice(0, this.messagesBuffer.length, ...rebuilt);
72
+ return;
73
+ }
74
+ const removedCharA = this.totalChars(olderAssistant);
75
+ const removedCharS = this.totalChars(olderSystem);
76
+ const removedCharU = this.totalChars(olderUser);
77
+ const removedTotal = Math.max(1, removedCharA + removedCharS + removedCharU);
78
+ const totalSummaryBudget = Math.max(64, Math.min(maxSummaryTok, lowTarget - preservedTok));
79
+ const budgetA = Math.max(48, Math.floor(totalSummaryBudget * (removedCharA / removedTotal)));
80
+ const budgetS = Math.max(48, Math.floor(totalSummaryBudget * (removedCharS / removedTotal)));
81
+ const budgetU = Math.max(48, Math.floor(totalSummaryBudget * (removedCharU / removedTotal)));
82
+ const [sumA, sumS, sumU] = await Promise.all([
83
+ olderAssistant.length ? this.summarizeLane("assistant", olderAssistant, budgetA) : "",
84
+ olderSystem.length ? this.summarizeLane("system", olderSystem, budgetS) : "",
85
+ olderUser.length ? this.summarizeLane("user", olderUser, budgetU) : "",
86
+ ]);
87
+ const summaries = [];
88
+ if (sumA)
89
+ summaries.push({ from: "Me", role: "assistant", content: `ASSISTANT SUMMARY:\n${sumA}` });
90
+ if (sumS)
91
+ summaries.push({ from: "System", role: "system", content: `SYSTEM SUMMARY:\n${sumS}` });
92
+ if (sumU)
93
+ summaries.push({ from: "Memory", role: "user", content: `USER SUMMARY:\n${sumU}` });
94
+ const rebuilt = this.ordered(summaries, sysHead, keepAssistant, keepSystem, keepUser, keepTools, other);
95
+ this.messagesBuffer.splice(0, this.messagesBuffer.length, ...rebuilt);
96
+ // Final clamp
97
+ let finalTok = this.estimateTokens(this.messagesBuffer);
98
+ if (finalTok > lowTarget) {
99
+ const pruned = [];
100
+ for (const m of this.messagesBuffer) {
101
+ pruned.push(m);
102
+ finalTok = this.estimateTokens(pruned);
103
+ if (finalTok > lowTarget && m.role !== "system") {
104
+ pruned.pop();
105
+ }
106
+ }
107
+ this.messagesBuffer.splice(0, this.messagesBuffer.length, ...pruned);
108
+ }
109
+ });
110
+ }
111
+ budgetTokens() {
112
+ return Math.max(512, this.contextTokens - this.reserveHeaderTokens - this.reserveResponseTokens);
113
+ }
114
+ estimateTokens(msgs) {
115
+ return Math.ceil(this.totalChars(msgs) / this.avgCharsPerToken);
116
+ }
117
+ totalChars(msgs) {
118
+ let c = 0;
119
+ for (const m of msgs) {
120
+ const s = String(m.content ?? "");
121
+ if (m.role === "tool" && s.length > 24_000)
122
+ c += 24_000;
123
+ else
124
+ c += s.length;
125
+ c += 32;
126
+ }
127
+ return c;
128
+ }
129
+ partition() {
130
+ const assistant = [];
131
+ const user = [];
132
+ const system = [];
133
+ const tool = [];
134
+ const other = [];
135
+ for (const m of this.messagesBuffer) {
136
+ switch (m.role) {
137
+ case "assistant":
138
+ assistant.push(m);
139
+ break;
140
+ case "user":
141
+ user.push(m);
142
+ break;
143
+ case "system":
144
+ system.push(m);
145
+ break;
146
+ case "tool":
147
+ tool.push(m);
148
+ break;
149
+ default:
150
+ other.push(m);
151
+ break;
152
+ }
153
+ }
154
+ const firstSystemIndex = this.messagesBuffer.findIndex(x => x.role === "system");
155
+ return { firstSystemIndex, assistant, user, system, tool, other };
156
+ }
157
+ ordered(summaries, sysHead, keepA, keepS, keepU, keepT, other) {
158
+ const keepSet = new Set([...sysHead, ...keepA, ...keepS, ...keepU, ...keepT, ...other]);
159
+ const rest = [];
160
+ for (const m of this.messagesBuffer) {
161
+ if (keepSet.has(m))
162
+ rest.push(m);
163
+ }
164
+ const orderedSummaries = [
165
+ ...summaries.filter(s => s.role === "assistant"),
166
+ ...summaries.filter(s => s.role === "system"),
167
+ ...summaries.filter(s => s.role === "user"),
168
+ ];
169
+ return [...orderedSummaries, ...rest];
170
+ }
171
+ async summarizeLane(laneName, messages, tokenBudget) {
172
+ if (messages.length === 0 || tokenBudget <= 0)
173
+ return "";
174
+ const approxChars = Math.max(120, Math.floor(tokenBudget * this.avgCharsPerToken));
175
+ const header = (() => {
176
+ switch (laneName) {
177
+ case "assistant": return "Summarize prior ASSISTANT replies (decisions, plans, code edits, shell commands and outcomes).";
178
+ case "system": return "Summarize SYSTEM instructions (rules, goals, constraints) without changing their intent.";
179
+ case "user": return "Summarize USER requests, feedback, constraints, and acceptance criteria.";
180
+ }
181
+ })();
182
+ let acc = "";
183
+ for (const m of messages) {
184
+ let c = String(m.content ?? "");
185
+ if (c.length > 4000)
186
+ c = c.slice(0, 4000) + "\n…(truncated)…";
187
+ const next = `- ${laneName.toUpperCase()}: ${c}\n\n`;
188
+ if (acc.length + next.length > approxChars * 3)
189
+ break;
190
+ acc += next;
191
+ }
192
+ const sys = {
193
+ role: "system",
194
+ from: "System",
195
+ content: [
196
+ "You are a precise summarizer.",
197
+ "Output concise bullet points; preserve facts, tasks, file paths, commands, constraints.",
198
+ `Hard limit: ~${approxChars} characters total.`,
199
+ "Avoid fluff; keep actionable details.",
200
+ ].join(" "),
201
+ };
202
+ const usr = {
203
+ role: "user",
204
+ from: "User",
205
+ content: `${header}\n\nTranscript:\n${acc}`,
206
+ };
207
+ const out = await this.summarizerDriver.chat([sys, usr], { model: this.summarizerModel });
208
+ return String(out?.text ?? "").trim();
209
+ }
210
+ }
@@ -0,0 +1,52 @@
1
+ /**
2
+ * Base class for agent memory with background summarization support.
3
+ * Subclasses call `runOnce` to serialize/queue summarization so callers never block.
4
+ */
5
+ export class AgentMemory {
6
+ constructor(systemPrompt) {
7
+ this.messagesBuffer = [];
8
+ this.summarizing = false;
9
+ this.pending = false;
10
+ if (systemPrompt && systemPrompt.trim().length > 0) {
11
+ this.messagesBuffer.push({ role: "system", content: systemPrompt, from: "System" });
12
+ }
13
+ }
14
+ async add(msg) {
15
+ this.messagesBuffer.push(msg);
16
+ await this.onAfterAdd();
17
+ }
18
+ async addIfNotExists(msg) {
19
+ const exists = this.messagesBuffer.some(m => m.content === msg.content && m.role === msg.role);
20
+ if (!exists) {
21
+ this.messagesBuffer.push(msg);
22
+ await this.onAfterAdd();
23
+ }
24
+ }
25
+ messages() {
26
+ return [...this.messagesBuffer];
27
+ }
28
+ nonSystemCount() {
29
+ if (this.messagesBuffer.length === 0)
30
+ return 0;
31
+ return this.messagesBuffer[0].role === "system"
32
+ ? this.messagesBuffer.length - 1
33
+ : this.messagesBuffer.length;
34
+ }
35
+ async runOnce(task) {
36
+ if (this.summarizing) {
37
+ this.pending = true;
38
+ return;
39
+ }
40
+ this.summarizing = true;
41
+ try {
42
+ await task();
43
+ }
44
+ finally {
45
+ this.summarizing = false;
46
+ if (this.pending) {
47
+ this.pending = false;
48
+ void this.runOnce(task);
49
+ }
50
+ }
51
+ }
52
+ }
@@ -0,0 +1,86 @@
1
+ /**
2
+ * agenthnsw adapter — optional dynamic import.
3
+ *
4
+ * The "agenthnsw" package is an optional dependency. We load it at
5
+ * runtime via a dynamic `import()` so that TypeScript compilation and
6
+ * Docker builds succeed even when the package is not installed.
7
+ *
8
+ * If agenthnsw is missing at runtime the factory function
9
+ * `createAgentHnswIndex()` will throw a clear error.
10
+ */
11
+ /**
12
+ * Dynamically import "agenthnsw".
13
+ *
14
+ * We use `eval("(m) => import(m)")` to prevent the TypeScript compiler
15
+ * and bundlers from resolving the specifier at compile time.
16
+ */
17
+ async function importAgentHnsw() {
18
+ try {
19
+ const dynImport = eval("(m) => import(m)");
20
+ return await dynImport("agenthnsw");
21
+ }
22
+ catch (err) {
23
+ throw new Error(`Optional dependency "agenthnsw" is not installed. ` +
24
+ `Install it with: npm install agenthnsw\n` +
25
+ `Original error: ${err instanceof Error ? err.message : String(err)}`);
26
+ }
27
+ }
28
+ // ── AgentHnswIndex class ────────────────────────────────────────────────────
29
+ export class AgentHnswIndex {
30
+ constructor(opts) {
31
+ this.idx = null;
32
+ this.metric = opts?.metric;
33
+ }
34
+ /** Lazily initialise the underlying index on first use. */
35
+ async ensureIndex() {
36
+ if (!this.idx) {
37
+ const mod = await importAgentHnsw();
38
+ this.idx = new mod.InMemoryLinearIndex({ metric: this.metric });
39
+ }
40
+ return this.idx;
41
+ }
42
+ async upsert(record) {
43
+ const idx = await this.ensureIndex();
44
+ await idx.upsert({ id: record.id, vector: record.vector, metadata: record.metadata });
45
+ }
46
+ async upsertMany(records) {
47
+ const idx = await this.ensureIndex();
48
+ await idx.upsertMany(records.map((r) => ({ id: r.id, vector: r.vector, metadata: r.metadata })));
49
+ }
50
+ async search(query, k) {
51
+ const idx = await this.ensureIndex();
52
+ const res = await idx.search(query, k);
53
+ return res.map((r) => ({
54
+ id: r.id,
55
+ score: r.score,
56
+ metadata: r.metadata,
57
+ }));
58
+ }
59
+ async delete(id) {
60
+ const idx = await this.ensureIndex();
61
+ await idx.delete(id);
62
+ }
63
+ async save(dir) {
64
+ const idx = await this.ensureIndex();
65
+ await idx.save(dir);
66
+ }
67
+ async load(dir) {
68
+ const idx = await this.ensureIndex();
69
+ await idx.load(dir);
70
+ }
71
+ async stats() {
72
+ const idx = await this.ensureIndex();
73
+ return await idx.stats();
74
+ }
75
+ }
76
+ // ── Factory ─────────────────────────────────────────────────────────────────
77
+ /**
78
+ * Create an AgentHnswIndex.
79
+ *
80
+ * The underlying "agenthnsw" package is loaded lazily — this function
81
+ * itself never throws even when the package is absent. The first call
82
+ * to any index method will attempt the dynamic import.
83
+ */
84
+ export function createAgentHnswIndex(opts) {
85
+ return new AgentHnswIndex(opts);
86
+ }
@@ -1,6 +1,5 @@
1
1
  export * from "./vector-index.js";
2
2
  export * from "./agenthnsw.js";
3
-
4
3
  export { AgentMemory } from "./agent-memory.js";
5
4
  export { AdvancedMemory } from "./advanced-memory.js";
6
5
  export { SimpleMemory } from "./simple-memory.js";
@@ -0,0 +1,34 @@
1
+ import { AgentMemory } from "./agent-memory.js";
2
+ import { saveSession, loadSession, ensureGroDir } from "../session.js";
3
+ /**
4
+ * SimpleMemory — unbounded message buffer.
5
+ * No summarization, no token budgeting. Useful for short conversations
6
+ * or when the caller manages context externally.
7
+ */
8
+ export class SimpleMemory extends AgentMemory {
9
+ constructor(systemPrompt) {
10
+ super(systemPrompt);
11
+ this.provider = "";
12
+ this.model = "";
13
+ }
14
+ setMeta(provider, model) {
15
+ this.provider = provider;
16
+ this.model = model;
17
+ }
18
+ async load(id) {
19
+ const session = loadSession(id);
20
+ if (session) {
21
+ this.messagesBuffer = session.messages;
22
+ }
23
+ }
24
+ async save(id) {
25
+ ensureGroDir();
26
+ saveSession(id, this.messagesBuffer, {
27
+ id,
28
+ provider: this.provider,
29
+ model: this.model,
30
+ createdAt: new Date().toISOString(),
31
+ });
32
+ }
33
+ async onAfterAdd() { }
34
+ }
@@ -0,0 +1,7 @@
1
+ /**
2
+ * Pluggable vector index interface for retrieval-backed memory.
3
+ *
4
+ * This is intentionally small so backends can live in separate packages
5
+ * (e.g. agenthnsw) and be imported by gro.
6
+ */
7
+ export {};
@@ -0,0 +1,22 @@
1
+ {
2
+ "name": "@tjamescouch/gro",
3
+ "version": "1.3.2",
4
+ "description": "Provider-agnostic LLM runtime with context management",
5
+ "type": "module",
6
+ "scripts": {
7
+ "start": "npx tsx src/main.ts",
8
+ "build": "npx tsc && cp package.json dist/",
9
+ "build:bun": "bun build src/main.ts --outdir dist --target bun",
10
+ "test": "npx tsx --test tests/*.test.ts",
11
+ "test:bun": "bun test"
12
+ },
13
+ "devDependencies": {
14
+ "@types/node": "^25.2.3",
15
+ "tsx": "^4.21.0",
16
+ "typescript": "^5.9.3"
17
+ },
18
+ "dependencies": {
19
+ "@modelcontextprotocol/sdk": "^1.26.0",
20
+ "isexe": "^4.0.0"
21
+ }
22
+ }