devsh-memory-mcp 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +114 -0
- package/dist/chunk-PI6DAQTW.js +655 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +58 -0
- package/dist/index.d.ts +52 -0
- package/dist/index.js +8 -0
- package/package.json +53 -0
package/README.md
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
# devsh-memory-mcp
|
|
2
|
+
|
|
3
|
+
MCP server for devsh/cmux agent memory - enables Claude Desktop, Cursor, and other MCP clients to access sandbox agent memory.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install -g devsh-memory-mcp
|
|
9
|
+
# or
|
|
10
|
+
npx devsh-memory-mcp
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Usage
|
|
14
|
+
|
|
15
|
+
### CLI
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
# Use default memory directory (/root/lifecycle/memory)
|
|
19
|
+
devsh-memory-mcp
|
|
20
|
+
|
|
21
|
+
# Specify custom directory
|
|
22
|
+
devsh-memory-mcp --dir /path/to/memory
|
|
23
|
+
|
|
24
|
+
# Set agent name for messaging
|
|
25
|
+
devsh-memory-mcp --agent my-agent
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
### Claude Desktop Configuration
|
|
29
|
+
|
|
30
|
+
Add to your `claude_desktop_config.json`:
|
|
31
|
+
|
|
32
|
+
```json
|
|
33
|
+
{
|
|
34
|
+
"mcpServers": {
|
|
35
|
+
"devsh-memory": {
|
|
36
|
+
"command": "npx",
|
|
37
|
+
"args": ["devsh-memory-mcp"]
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
With custom options:
|
|
44
|
+
|
|
45
|
+
```json
|
|
46
|
+
{
|
|
47
|
+
"mcpServers": {
|
|
48
|
+
"devsh-memory": {
|
|
49
|
+
"command": "npx",
|
|
50
|
+
"args": ["devsh-memory-mcp", "--dir", "/path/to/memory", "--agent", "claude-desktop"]
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
## Available Tools
|
|
57
|
+
|
|
58
|
+
### Read Tools
|
|
59
|
+
|
|
60
|
+
| Tool | Description |
|
|
61
|
+
|------|-------------|
|
|
62
|
+
| `read_memory` | Read knowledge, tasks, or mailbox memory |
|
|
63
|
+
| `list_daily_logs` | List available daily log dates |
|
|
64
|
+
| `read_daily_log` | Read a specific daily log |
|
|
65
|
+
| `search_memory` | Search across all memory files |
|
|
66
|
+
|
|
67
|
+
### Messaging Tools
|
|
68
|
+
|
|
69
|
+
| Tool | Description |
|
|
70
|
+
|------|-------------|
|
|
71
|
+
| `send_message` | Send a message to another agent (or "*" for broadcast) |
|
|
72
|
+
| `get_my_messages` | Get messages addressed to this agent |
|
|
73
|
+
| `mark_read` | Mark a message as read |
|
|
74
|
+
|
|
75
|
+
### Write Tools
|
|
76
|
+
|
|
77
|
+
| Tool | Description |
|
|
78
|
+
|------|-------------|
|
|
79
|
+
| `append_daily_log` | Append content to today's daily log |
|
|
80
|
+
| `update_knowledge` | Add an entry to a priority section (P0/P1/P2) |
|
|
81
|
+
| `add_task` | Add a new task to TASKS.json |
|
|
82
|
+
| `update_task` | Update the status of a task |
|
|
83
|
+
|
|
84
|
+
### Orchestration Tools
|
|
85
|
+
|
|
86
|
+
| Tool | Description |
|
|
87
|
+
|------|-------------|
|
|
88
|
+
| `read_orchestration` | Read PLAN.json, AGENTS.json, or EVENTS.jsonl |
|
|
89
|
+
| `append_event` | Append an orchestration event to EVENTS.jsonl |
|
|
90
|
+
| `update_plan_task` | Update task status in PLAN.json |
|
|
91
|
+
|
|
92
|
+
## Memory Directory Structure
|
|
93
|
+
|
|
94
|
+
```
|
|
95
|
+
/root/lifecycle/memory/
|
|
96
|
+
├── knowledge/
|
|
97
|
+
│ └── MEMORY.md # Long-term insights (P0/P1/P2 sections)
|
|
98
|
+
├── daily/
|
|
99
|
+
│ └── {date}.md # Daily session logs
|
|
100
|
+
├── orchestration/
|
|
101
|
+
│ ├── PLAN.json # Orchestration task plan
|
|
102
|
+
│ ├── AGENTS.json # Agent registry
|
|
103
|
+
│ └── EVENTS.jsonl # Orchestration event log
|
|
104
|
+
├── TASKS.json # Task registry
|
|
105
|
+
└── MAILBOX.json # Inter-agent messages
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
## Priority Tiers (MEMORY.md)
|
|
109
|
+
|
|
110
|
+
- **P0 - Core**: Never expires. Project fundamentals, invariants.
|
|
111
|
+
- **P1 - Active**: 90-day TTL. Ongoing work, current strategies.
|
|
112
|
+
- **P2 - Reference**: 30-day TTL. Temporary findings, debug notes.
|
|
113
|
+
|
|
114
|
+
Format: `- [YYYY-MM-DD] Your insight here`
|
|
@@ -0,0 +1,655 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
3
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
4
|
+
import {
|
|
5
|
+
CallToolRequestSchema,
|
|
6
|
+
ListToolsRequestSchema
|
|
7
|
+
} from "@modelcontextprotocol/sdk/types.js";
|
|
8
|
+
import * as fs from "fs";
|
|
9
|
+
import * as path from "path";
|
|
10
|
+
import * as crypto from "crypto";
|
|
11
|
+
var DEFAULT_MEMORY_DIR = "/root/lifecycle/memory";
|
|
12
|
+
function createMemoryMcpServer(config) {
|
|
13
|
+
const memoryDir = config?.memoryDir ?? DEFAULT_MEMORY_DIR;
|
|
14
|
+
const agentName = config?.agentName ?? process.env.CMUX_AGENT_NAME ?? "external-client";
|
|
15
|
+
const knowledgeDir = path.join(memoryDir, "knowledge");
|
|
16
|
+
const dailyDir = path.join(memoryDir, "daily");
|
|
17
|
+
const orchestrationDir = path.join(memoryDir, "orchestration");
|
|
18
|
+
const mailboxPath = path.join(memoryDir, "MAILBOX.json");
|
|
19
|
+
const tasksPath = path.join(memoryDir, "TASKS.json");
|
|
20
|
+
const planPath = path.join(orchestrationDir, "PLAN.json");
|
|
21
|
+
const agentsPath = path.join(orchestrationDir, "AGENTS.json");
|
|
22
|
+
const eventsPath = path.join(orchestrationDir, "EVENTS.jsonl");
|
|
23
|
+
function readFile(filePath) {
|
|
24
|
+
try {
|
|
25
|
+
if (!fs.existsSync(filePath)) return null;
|
|
26
|
+
return fs.readFileSync(filePath, "utf-8");
|
|
27
|
+
} catch {
|
|
28
|
+
return null;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
function writeFile(filePath, content) {
|
|
32
|
+
try {
|
|
33
|
+
fs.writeFileSync(filePath, content, "utf-8");
|
|
34
|
+
return true;
|
|
35
|
+
} catch {
|
|
36
|
+
return false;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
function readTasks() {
|
|
40
|
+
const content = readFile(tasksPath);
|
|
41
|
+
if (!content) return { version: 1, tasks: [] };
|
|
42
|
+
try {
|
|
43
|
+
return JSON.parse(content);
|
|
44
|
+
} catch {
|
|
45
|
+
return { version: 1, tasks: [] };
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
function writeTasks(tasks) {
|
|
49
|
+
return writeFile(tasksPath, JSON.stringify(tasks, null, 2));
|
|
50
|
+
}
|
|
51
|
+
function generateTaskId() {
|
|
52
|
+
return "task_" + crypto.randomUUID().replace(/-/g, "").slice(0, 12);
|
|
53
|
+
}
|
|
54
|
+
function getTodayDateString() {
|
|
55
|
+
const iso = (/* @__PURE__ */ new Date()).toISOString();
|
|
56
|
+
return iso.slice(0, iso.indexOf("T"));
|
|
57
|
+
}
|
|
58
|
+
function ensureDir(dirPath) {
|
|
59
|
+
if (!fs.existsSync(dirPath)) {
|
|
60
|
+
fs.mkdirSync(dirPath, { recursive: true });
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
function readPlan() {
|
|
64
|
+
const content = readFile(planPath);
|
|
65
|
+
if (!content) return null;
|
|
66
|
+
try {
|
|
67
|
+
return JSON.parse(content);
|
|
68
|
+
} catch {
|
|
69
|
+
return null;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
function writePlan(plan) {
|
|
73
|
+
ensureDir(orchestrationDir);
|
|
74
|
+
plan.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
75
|
+
return writeFile(planPath, JSON.stringify(plan, null, 2));
|
|
76
|
+
}
|
|
77
|
+
function appendEvent(event) {
|
|
78
|
+
ensureDir(orchestrationDir);
|
|
79
|
+
const line = JSON.stringify(event) + "\n";
|
|
80
|
+
try {
|
|
81
|
+
fs.appendFileSync(eventsPath, line, "utf-8");
|
|
82
|
+
return true;
|
|
83
|
+
} catch {
|
|
84
|
+
return false;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
function readMailbox() {
|
|
88
|
+
const content = readFile(mailboxPath);
|
|
89
|
+
if (!content) return { version: 1, messages: [] };
|
|
90
|
+
try {
|
|
91
|
+
return JSON.parse(content);
|
|
92
|
+
} catch {
|
|
93
|
+
return { version: 1, messages: [] };
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
function writeMailbox(mailbox) {
|
|
97
|
+
return writeFile(mailboxPath, JSON.stringify(mailbox, null, 2));
|
|
98
|
+
}
|
|
99
|
+
function generateMessageId() {
|
|
100
|
+
return "msg_" + crypto.randomUUID().replace(/-/g, "").slice(0, 12);
|
|
101
|
+
}
|
|
102
|
+
function listDailyLogs() {
|
|
103
|
+
try {
|
|
104
|
+
if (!fs.existsSync(dailyDir)) return [];
|
|
105
|
+
const files = fs.readdirSync(dailyDir);
|
|
106
|
+
return files.filter((f) => f.endsWith(".md")).map((f) => f.replace(".md", "")).sort().reverse();
|
|
107
|
+
} catch {
|
|
108
|
+
return [];
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
function searchMemory(query) {
|
|
112
|
+
const results = [];
|
|
113
|
+
const lowerQuery = query.toLowerCase();
|
|
114
|
+
const knowledge = readFile(path.join(knowledgeDir, "MEMORY.md"));
|
|
115
|
+
if (knowledge?.toLowerCase().includes(lowerQuery)) {
|
|
116
|
+
const lines = knowledge.split("\n");
|
|
117
|
+
for (let i = 0; i < lines.length; i++) {
|
|
118
|
+
if (lines[i].toLowerCase().includes(lowerQuery)) {
|
|
119
|
+
results.push({
|
|
120
|
+
source: "knowledge/MEMORY.md",
|
|
121
|
+
line: i + 1,
|
|
122
|
+
content: lines[i].trim()
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
const tasks = readFile(tasksPath);
|
|
128
|
+
if (tasks?.toLowerCase().includes(lowerQuery)) {
|
|
129
|
+
results.push({ source: "TASKS.json", content: "Match found in tasks file" });
|
|
130
|
+
}
|
|
131
|
+
const mailbox = readFile(mailboxPath);
|
|
132
|
+
if (mailbox?.toLowerCase().includes(lowerQuery)) {
|
|
133
|
+
results.push({ source: "MAILBOX.json", content: "Match found in mailbox file" });
|
|
134
|
+
}
|
|
135
|
+
const dailyLogs = listDailyLogs();
|
|
136
|
+
for (const date of dailyLogs.slice(0, 7)) {
|
|
137
|
+
const logContent = readFile(path.join(dailyDir, `${date}.md`));
|
|
138
|
+
if (logContent?.toLowerCase().includes(lowerQuery)) {
|
|
139
|
+
const lines = logContent.split("\n");
|
|
140
|
+
for (let i = 0; i < lines.length; i++) {
|
|
141
|
+
if (lines[i].toLowerCase().includes(lowerQuery)) {
|
|
142
|
+
results.push({
|
|
143
|
+
source: `daily/${date}.md`,
|
|
144
|
+
line: i + 1,
|
|
145
|
+
content: lines[i].trim()
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
return results;
|
|
152
|
+
}
|
|
153
|
+
const server = new Server(
|
|
154
|
+
{
|
|
155
|
+
name: "devsh-memory",
|
|
156
|
+
version: "0.1.0"
|
|
157
|
+
},
|
|
158
|
+
{
|
|
159
|
+
capabilities: {
|
|
160
|
+
tools: {}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
);
|
|
164
|
+
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
|
165
|
+
tools: [
|
|
166
|
+
{
|
|
167
|
+
name: "read_memory",
|
|
168
|
+
description: 'Read a memory file. Type can be "knowledge", "tasks", or "mailbox".',
|
|
169
|
+
inputSchema: {
|
|
170
|
+
type: "object",
|
|
171
|
+
properties: {
|
|
172
|
+
type: {
|
|
173
|
+
type: "string",
|
|
174
|
+
enum: ["knowledge", "tasks", "mailbox"],
|
|
175
|
+
description: "The type of memory to read"
|
|
176
|
+
}
|
|
177
|
+
},
|
|
178
|
+
required: ["type"]
|
|
179
|
+
}
|
|
180
|
+
},
|
|
181
|
+
{
|
|
182
|
+
name: "list_daily_logs",
|
|
183
|
+
description: "List available daily log dates (newest first).",
|
|
184
|
+
inputSchema: {
|
|
185
|
+
type: "object",
|
|
186
|
+
properties: {}
|
|
187
|
+
}
|
|
188
|
+
},
|
|
189
|
+
{
|
|
190
|
+
name: "read_daily_log",
|
|
191
|
+
description: "Read a specific daily log by date (YYYY-MM-DD format).",
|
|
192
|
+
inputSchema: {
|
|
193
|
+
type: "object",
|
|
194
|
+
properties: {
|
|
195
|
+
date: {
|
|
196
|
+
type: "string",
|
|
197
|
+
description: "The date in YYYY-MM-DD format"
|
|
198
|
+
}
|
|
199
|
+
},
|
|
200
|
+
required: ["date"]
|
|
201
|
+
}
|
|
202
|
+
},
|
|
203
|
+
{
|
|
204
|
+
name: "search_memory",
|
|
205
|
+
description: "Search across all memory files for a query string.",
|
|
206
|
+
inputSchema: {
|
|
207
|
+
type: "object",
|
|
208
|
+
properties: {
|
|
209
|
+
query: {
|
|
210
|
+
type: "string",
|
|
211
|
+
description: "The search query"
|
|
212
|
+
}
|
|
213
|
+
},
|
|
214
|
+
required: ["query"]
|
|
215
|
+
}
|
|
216
|
+
},
|
|
217
|
+
{
|
|
218
|
+
name: "send_message",
|
|
219
|
+
description: 'Send a message to another agent on the same task. Use "*" to broadcast to all agents.',
|
|
220
|
+
inputSchema: {
|
|
221
|
+
type: "object",
|
|
222
|
+
properties: {
|
|
223
|
+
to: {
|
|
224
|
+
type: "string",
|
|
225
|
+
description: 'Recipient agent name (e.g., "claude/opus-4.5") or "*" for broadcast'
|
|
226
|
+
},
|
|
227
|
+
message: {
|
|
228
|
+
type: "string",
|
|
229
|
+
description: "The message content"
|
|
230
|
+
},
|
|
231
|
+
type: {
|
|
232
|
+
type: "string",
|
|
233
|
+
enum: ["handoff", "request", "status"],
|
|
234
|
+
description: "Message type: handoff (work transfer), request (ask to do something), status (progress update)"
|
|
235
|
+
}
|
|
236
|
+
},
|
|
237
|
+
required: ["to", "message"]
|
|
238
|
+
}
|
|
239
|
+
},
|
|
240
|
+
{
|
|
241
|
+
name: "get_my_messages",
|
|
242
|
+
description: "Get all messages addressed to this agent (including broadcasts). Returns unread messages first.",
|
|
243
|
+
inputSchema: {
|
|
244
|
+
type: "object",
|
|
245
|
+
properties: {
|
|
246
|
+
includeRead: {
|
|
247
|
+
type: "boolean",
|
|
248
|
+
description: "Include messages already marked as read (default: false)"
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
},
|
|
253
|
+
{
|
|
254
|
+
name: "mark_read",
|
|
255
|
+
description: "Mark a message as read by its ID.",
|
|
256
|
+
inputSchema: {
|
|
257
|
+
type: "object",
|
|
258
|
+
properties: {
|
|
259
|
+
messageId: {
|
|
260
|
+
type: "string",
|
|
261
|
+
description: "The message ID to mark as read"
|
|
262
|
+
}
|
|
263
|
+
},
|
|
264
|
+
required: ["messageId"]
|
|
265
|
+
}
|
|
266
|
+
},
|
|
267
|
+
// Write tools
|
|
268
|
+
{
|
|
269
|
+
name: "append_daily_log",
|
|
270
|
+
description: "Append content to today's daily log. Creates the file if it doesn't exist.",
|
|
271
|
+
inputSchema: {
|
|
272
|
+
type: "object",
|
|
273
|
+
properties: {
|
|
274
|
+
content: {
|
|
275
|
+
type: "string",
|
|
276
|
+
description: "Content to append to the daily log"
|
|
277
|
+
}
|
|
278
|
+
},
|
|
279
|
+
required: ["content"]
|
|
280
|
+
}
|
|
281
|
+
},
|
|
282
|
+
{
|
|
283
|
+
name: "update_knowledge",
|
|
284
|
+
description: "Update a specific priority section in the knowledge file (MEMORY.md). Appends a new entry with today's date.",
|
|
285
|
+
inputSchema: {
|
|
286
|
+
type: "object",
|
|
287
|
+
properties: {
|
|
288
|
+
section: {
|
|
289
|
+
type: "string",
|
|
290
|
+
enum: ["P0", "P1", "P2"],
|
|
291
|
+
description: "Priority section to update (P0=Core, P1=Active, P2=Reference)"
|
|
292
|
+
},
|
|
293
|
+
content: {
|
|
294
|
+
type: "string",
|
|
295
|
+
description: "Content to add to the section (will be prefixed with today's date)"
|
|
296
|
+
}
|
|
297
|
+
},
|
|
298
|
+
required: ["section", "content"]
|
|
299
|
+
}
|
|
300
|
+
},
|
|
301
|
+
{
|
|
302
|
+
name: "add_task",
|
|
303
|
+
description: "Add a new task to the TASKS.json file.",
|
|
304
|
+
inputSchema: {
|
|
305
|
+
type: "object",
|
|
306
|
+
properties: {
|
|
307
|
+
subject: {
|
|
308
|
+
type: "string",
|
|
309
|
+
description: "Brief title for the task"
|
|
310
|
+
},
|
|
311
|
+
description: {
|
|
312
|
+
type: "string",
|
|
313
|
+
description: "Detailed description of what needs to be done"
|
|
314
|
+
}
|
|
315
|
+
},
|
|
316
|
+
required: ["subject", "description"]
|
|
317
|
+
}
|
|
318
|
+
},
|
|
319
|
+
{
|
|
320
|
+
name: "update_task",
|
|
321
|
+
description: "Update the status of an existing task in TASKS.json.",
|
|
322
|
+
inputSchema: {
|
|
323
|
+
type: "object",
|
|
324
|
+
properties: {
|
|
325
|
+
taskId: {
|
|
326
|
+
type: "string",
|
|
327
|
+
description: "The ID of the task to update"
|
|
328
|
+
},
|
|
329
|
+
status: {
|
|
330
|
+
type: "string",
|
|
331
|
+
enum: ["pending", "in_progress", "completed"],
|
|
332
|
+
description: "New status for the task"
|
|
333
|
+
}
|
|
334
|
+
},
|
|
335
|
+
required: ["taskId", "status"]
|
|
336
|
+
}
|
|
337
|
+
},
|
|
338
|
+
// Orchestration tools
|
|
339
|
+
{
|
|
340
|
+
name: "read_orchestration",
|
|
341
|
+
description: "Read an orchestration file (PLAN.json, AGENTS.json, or EVENTS.jsonl).",
|
|
342
|
+
inputSchema: {
|
|
343
|
+
type: "object",
|
|
344
|
+
properties: {
|
|
345
|
+
type: {
|
|
346
|
+
type: "string",
|
|
347
|
+
enum: ["plan", "agents", "events"],
|
|
348
|
+
description: "Type of orchestration file to read"
|
|
349
|
+
}
|
|
350
|
+
},
|
|
351
|
+
required: ["type"]
|
|
352
|
+
}
|
|
353
|
+
},
|
|
354
|
+
{
|
|
355
|
+
name: "append_event",
|
|
356
|
+
description: "Append an orchestration event to EVENTS.jsonl.",
|
|
357
|
+
inputSchema: {
|
|
358
|
+
type: "object",
|
|
359
|
+
properties: {
|
|
360
|
+
event: {
|
|
361
|
+
type: "string",
|
|
362
|
+
description: "Event type (e.g., agent_spawned, agent_completed, message_sent)"
|
|
363
|
+
},
|
|
364
|
+
message: {
|
|
365
|
+
type: "string",
|
|
366
|
+
description: "Human-readable message describing the event"
|
|
367
|
+
},
|
|
368
|
+
agentName: {
|
|
369
|
+
type: "string",
|
|
370
|
+
description: "Agent name associated with the event (optional)"
|
|
371
|
+
},
|
|
372
|
+
taskRunId: {
|
|
373
|
+
type: "string",
|
|
374
|
+
description: "Task run ID associated with the event (optional)"
|
|
375
|
+
}
|
|
376
|
+
},
|
|
377
|
+
required: ["event", "message"]
|
|
378
|
+
}
|
|
379
|
+
},
|
|
380
|
+
{
|
|
381
|
+
name: "update_plan_task",
|
|
382
|
+
description: "Update the status of a task in the orchestration PLAN.json.",
|
|
383
|
+
inputSchema: {
|
|
384
|
+
type: "object",
|
|
385
|
+
properties: {
|
|
386
|
+
taskId: {
|
|
387
|
+
type: "string",
|
|
388
|
+
description: "The ID of the orchestration task to update"
|
|
389
|
+
},
|
|
390
|
+
status: {
|
|
391
|
+
type: "string",
|
|
392
|
+
description: "New status (pending, assigned, running, completed, failed, cancelled)"
|
|
393
|
+
},
|
|
394
|
+
result: {
|
|
395
|
+
type: "string",
|
|
396
|
+
description: "Result message (for completed tasks)"
|
|
397
|
+
},
|
|
398
|
+
errorMessage: {
|
|
399
|
+
type: "string",
|
|
400
|
+
description: "Error message (for failed tasks)"
|
|
401
|
+
}
|
|
402
|
+
},
|
|
403
|
+
required: ["taskId", "status"]
|
|
404
|
+
}
|
|
405
|
+
}
|
|
406
|
+
]
|
|
407
|
+
}));
|
|
408
|
+
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
409
|
+
const { name, arguments: args } = request.params;
|
|
410
|
+
switch (name) {
|
|
411
|
+
case "read_memory": {
|
|
412
|
+
const type = args.type;
|
|
413
|
+
let content = null;
|
|
414
|
+
if (type === "knowledge") {
|
|
415
|
+
content = readFile(path.join(knowledgeDir, "MEMORY.md"));
|
|
416
|
+
} else if (type === "tasks") {
|
|
417
|
+
content = readFile(tasksPath);
|
|
418
|
+
} else if (type === "mailbox") {
|
|
419
|
+
content = readFile(mailboxPath);
|
|
420
|
+
}
|
|
421
|
+
return {
|
|
422
|
+
content: [{ type: "text", text: content ?? `No ${type} content found.` }]
|
|
423
|
+
};
|
|
424
|
+
}
|
|
425
|
+
case "list_daily_logs": {
|
|
426
|
+
const dates = listDailyLogs();
|
|
427
|
+
return {
|
|
428
|
+
content: [{ type: "text", text: dates.length > 0 ? dates.join("\n") : "No daily logs found." }]
|
|
429
|
+
};
|
|
430
|
+
}
|
|
431
|
+
case "read_daily_log": {
|
|
432
|
+
const date = args.date;
|
|
433
|
+
const content = readFile(path.join(dailyDir, `${date}.md`));
|
|
434
|
+
return {
|
|
435
|
+
content: [{ type: "text", text: content ?? `No log found for ${date}.` }]
|
|
436
|
+
};
|
|
437
|
+
}
|
|
438
|
+
case "search_memory": {
|
|
439
|
+
const query = args.query;
|
|
440
|
+
const results = searchMemory(query);
|
|
441
|
+
if (results.length === 0) {
|
|
442
|
+
return { content: [{ type: "text", text: `No results found for "${query}".` }] };
|
|
443
|
+
}
|
|
444
|
+
const formatted = results.map((r) => `[${r.source}${r.line ? `:${r.line}` : ""}] ${r.content}`).join("\n");
|
|
445
|
+
return { content: [{ type: "text", text: formatted }] };
|
|
446
|
+
}
|
|
447
|
+
case "send_message": {
|
|
448
|
+
const { to, message, type } = args;
|
|
449
|
+
const mailbox = readMailbox();
|
|
450
|
+
const newMessage = {
|
|
451
|
+
id: generateMessageId(),
|
|
452
|
+
from: agentName,
|
|
453
|
+
to,
|
|
454
|
+
type: type ?? "request",
|
|
455
|
+
message,
|
|
456
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
457
|
+
read: false
|
|
458
|
+
};
|
|
459
|
+
mailbox.messages.push(newMessage);
|
|
460
|
+
writeMailbox(mailbox);
|
|
461
|
+
return { content: [{ type: "text", text: `Message sent successfully. ID: ${newMessage.id}` }] };
|
|
462
|
+
}
|
|
463
|
+
case "get_my_messages": {
|
|
464
|
+
const includeRead = args.includeRead ?? false;
|
|
465
|
+
const mailbox = readMailbox();
|
|
466
|
+
const myMessages = mailbox.messages.filter(
|
|
467
|
+
(m) => m.to === agentName || m.to === "*"
|
|
468
|
+
);
|
|
469
|
+
const filtered = includeRead ? myMessages : myMessages.filter((m) => !m.read);
|
|
470
|
+
if (filtered.length === 0) {
|
|
471
|
+
return { content: [{ type: "text", text: "No messages for you." }] };
|
|
472
|
+
}
|
|
473
|
+
const formatted = filtered.map((m) => `[${m.id}] ${m.type ?? "message"} from ${m.from}: ${m.message}`).join("\n\n");
|
|
474
|
+
return { content: [{ type: "text", text: formatted }] };
|
|
475
|
+
}
|
|
476
|
+
case "mark_read": {
|
|
477
|
+
const messageId = args.messageId;
|
|
478
|
+
const mailbox = readMailbox();
|
|
479
|
+
const message = mailbox.messages.find((m) => m.id === messageId);
|
|
480
|
+
if (!message) {
|
|
481
|
+
return { content: [{ type: "text", text: `Message ${messageId} not found.` }] };
|
|
482
|
+
}
|
|
483
|
+
message.read = true;
|
|
484
|
+
writeMailbox(mailbox);
|
|
485
|
+
return { content: [{ type: "text", text: `Message ${messageId} marked as read.` }] };
|
|
486
|
+
}
|
|
487
|
+
// Write tool handlers
|
|
488
|
+
case "append_daily_log": {
|
|
489
|
+
const { content } = args;
|
|
490
|
+
const today = getTodayDateString();
|
|
491
|
+
ensureDir(dailyDir);
|
|
492
|
+
const logPath = path.join(dailyDir, `${today}.md`);
|
|
493
|
+
const existing = readFile(logPath) ?? `# Daily Log: ${today}
|
|
494
|
+
|
|
495
|
+
> Session-specific observations. Temporary notes go here.
|
|
496
|
+
|
|
497
|
+
---
|
|
498
|
+
`;
|
|
499
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[1].split(".")[0];
|
|
500
|
+
const newContent = existing + `
|
|
501
|
+
- [${timestamp}] ${content}`;
|
|
502
|
+
if (writeFile(logPath, newContent)) {
|
|
503
|
+
return { content: [{ type: "text", text: `Appended to daily/${today}.md` }] };
|
|
504
|
+
}
|
|
505
|
+
return { content: [{ type: "text", text: `Failed to append to daily log` }] };
|
|
506
|
+
}
|
|
507
|
+
case "update_knowledge": {
|
|
508
|
+
const { section, content } = args;
|
|
509
|
+
ensureDir(knowledgeDir);
|
|
510
|
+
const knowledgePath = path.join(knowledgeDir, "MEMORY.md");
|
|
511
|
+
let existing = readFile(knowledgePath);
|
|
512
|
+
if (!existing) {
|
|
513
|
+
existing = `# Project Knowledge
|
|
514
|
+
|
|
515
|
+
> Curated insights organized by priority. Add date tags for TTL tracking.
|
|
516
|
+
|
|
517
|
+
## P0 - Core (Never Expires)
|
|
518
|
+
<!-- Fundamental project facts, configuration, invariants -->
|
|
519
|
+
|
|
520
|
+
## P1 - Active (90-day TTL)
|
|
521
|
+
<!-- Ongoing work context, current strategies, recent decisions -->
|
|
522
|
+
|
|
523
|
+
## P2 - Reference (30-day TTL)
|
|
524
|
+
<!-- Temporary findings, debug notes, one-off context -->
|
|
525
|
+
|
|
526
|
+
---
|
|
527
|
+
*Priority guide: P0 = permanent truth, P1 = active context, P2 = temporary reference*
|
|
528
|
+
*Format: - [YYYY-MM-DD] Your insight here*
|
|
529
|
+
`;
|
|
530
|
+
}
|
|
531
|
+
const today = getTodayDateString();
|
|
532
|
+
const newEntry = `- [${today}] ${content}`;
|
|
533
|
+
const sectionHeaders = {
|
|
534
|
+
P0: "## P0 - Core (Never Expires)",
|
|
535
|
+
P1: "## P1 - Active (90-day TTL)",
|
|
536
|
+
P2: "## P2 - Reference (30-day TTL)"
|
|
537
|
+
};
|
|
538
|
+
const header = sectionHeaders[section];
|
|
539
|
+
const headerIndex = existing.indexOf(header);
|
|
540
|
+
if (headerIndex === -1) {
|
|
541
|
+
return { content: [{ type: "text", text: `Section ${section} not found in MEMORY.md` }] };
|
|
542
|
+
}
|
|
543
|
+
const afterHeader = existing.slice(headerIndex + header.length);
|
|
544
|
+
const nextSectionMatch = afterHeader.match(/\n## /);
|
|
545
|
+
const insertPoint = nextSectionMatch ? headerIndex + header.length + (nextSectionMatch.index ?? afterHeader.length) : existing.length;
|
|
546
|
+
const commentEndMatch = afterHeader.match(/<!--[^>]*-->\n/);
|
|
547
|
+
const commentEnd = commentEndMatch ? headerIndex + header.length + (commentEndMatch.index ?? 0) + commentEndMatch[0].length : headerIndex + header.length + 1;
|
|
548
|
+
const actualInsertPoint = Math.min(commentEnd, insertPoint);
|
|
549
|
+
const updated = existing.slice(0, actualInsertPoint) + newEntry + "\n" + existing.slice(actualInsertPoint);
|
|
550
|
+
if (writeFile(knowledgePath, updated)) {
|
|
551
|
+
return { content: [{ type: "text", text: `Added entry to ${section} section in MEMORY.md` }] };
|
|
552
|
+
}
|
|
553
|
+
return { content: [{ type: "text", text: `Failed to update MEMORY.md` }] };
|
|
554
|
+
}
|
|
555
|
+
case "add_task": {
|
|
556
|
+
const { subject, description } = args;
|
|
557
|
+
const tasks = readTasks();
|
|
558
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
559
|
+
const newTask = {
|
|
560
|
+
id: generateTaskId(),
|
|
561
|
+
subject,
|
|
562
|
+
description,
|
|
563
|
+
status: "pending",
|
|
564
|
+
createdAt: now,
|
|
565
|
+
updatedAt: now
|
|
566
|
+
};
|
|
567
|
+
tasks.tasks.push(newTask);
|
|
568
|
+
if (writeTasks(tasks)) {
|
|
569
|
+
return { content: [{ type: "text", text: `Task created with ID: ${newTask.id}` }] };
|
|
570
|
+
}
|
|
571
|
+
return { content: [{ type: "text", text: `Failed to create task` }] };
|
|
572
|
+
}
|
|
573
|
+
case "update_task": {
|
|
574
|
+
const { taskId, status } = args;
|
|
575
|
+
const tasks = readTasks();
|
|
576
|
+
const task = tasks.tasks.find((t) => t.id === taskId);
|
|
577
|
+
if (!task) {
|
|
578
|
+
return { content: [{ type: "text", text: `Task ${taskId} not found` }] };
|
|
579
|
+
}
|
|
580
|
+
task.status = status;
|
|
581
|
+
task.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
582
|
+
if (writeTasks(tasks)) {
|
|
583
|
+
return { content: [{ type: "text", text: `Task ${taskId} updated to status: ${status}` }] };
|
|
584
|
+
}
|
|
585
|
+
return { content: [{ type: "text", text: `Failed to update task` }] };
|
|
586
|
+
}
|
|
587
|
+
// Orchestration tool handlers
|
|
588
|
+
case "read_orchestration": {
|
|
589
|
+
const type = args.type;
|
|
590
|
+
let content = null;
|
|
591
|
+
if (type === "plan") {
|
|
592
|
+
content = readFile(planPath);
|
|
593
|
+
} else if (type === "agents") {
|
|
594
|
+
content = readFile(agentsPath);
|
|
595
|
+
} else if (type === "events") {
|
|
596
|
+
content = readFile(eventsPath);
|
|
597
|
+
}
|
|
598
|
+
return {
|
|
599
|
+
content: [{ type: "text", text: content ?? `No ${type} file found in orchestration directory.` }]
|
|
600
|
+
};
|
|
601
|
+
}
|
|
602
|
+
case "append_event": {
|
|
603
|
+
const { event, message, agentName: agentName2, taskRunId } = args;
|
|
604
|
+
const eventObj = {
|
|
605
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
606
|
+
event,
|
|
607
|
+
message
|
|
608
|
+
};
|
|
609
|
+
if (agentName2) eventObj.agentName = agentName2;
|
|
610
|
+
if (taskRunId) eventObj.taskRunId = taskRunId;
|
|
611
|
+
if (appendEvent(eventObj)) {
|
|
612
|
+
return { content: [{ type: "text", text: `Event appended to EVENTS.jsonl` }] };
|
|
613
|
+
}
|
|
614
|
+
return { content: [{ type: "text", text: `Failed to append event` }] };
|
|
615
|
+
}
|
|
616
|
+
case "update_plan_task": {
|
|
617
|
+
const { taskId, status, result, errorMessage } = args;
|
|
618
|
+
const plan = readPlan();
|
|
619
|
+
if (!plan) {
|
|
620
|
+
return { content: [{ type: "text", text: `No PLAN.json found in orchestration directory` }] };
|
|
621
|
+
}
|
|
622
|
+
const task = plan.tasks.find((t) => t.id === taskId);
|
|
623
|
+
if (!task) {
|
|
624
|
+
return { content: [{ type: "text", text: `Task ${taskId} not found in PLAN.json` }] };
|
|
625
|
+
}
|
|
626
|
+
task.status = status;
|
|
627
|
+
if (result !== void 0) task.result = result;
|
|
628
|
+
if (errorMessage !== void 0) task.errorMessage = errorMessage;
|
|
629
|
+
if (status === "running" && !task.startedAt) {
|
|
630
|
+
task.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
631
|
+
}
|
|
632
|
+
if (status === "completed" || status === "failed" || status === "cancelled") {
|
|
633
|
+
task.completedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
634
|
+
}
|
|
635
|
+
if (writePlan(plan)) {
|
|
636
|
+
return { content: [{ type: "text", text: `Plan task ${taskId} updated to status: ${status}` }] };
|
|
637
|
+
}
|
|
638
|
+
return { content: [{ type: "text", text: `Failed to update plan task` }] };
|
|
639
|
+
}
|
|
640
|
+
default:
|
|
641
|
+
return { content: [{ type: "text", text: `Unknown tool: ${name}` }] };
|
|
642
|
+
}
|
|
643
|
+
});
|
|
644
|
+
return server;
|
|
645
|
+
}
|
|
646
|
+
async function runServer(config) {
|
|
647
|
+
const server = createMemoryMcpServer(config);
|
|
648
|
+
const transport = new StdioServerTransport();
|
|
649
|
+
await server.connect(transport);
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
export {
|
|
653
|
+
createMemoryMcpServer,
|
|
654
|
+
runServer
|
|
655
|
+
};
|
package/dist/cli.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
package/dist/cli.js
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
runServer
|
|
4
|
+
} from "./chunk-PI6DAQTW.js";
|
|
5
|
+
|
|
6
|
+
// src/cli.ts
|
|
7
|
+
function parseArgs() {
|
|
8
|
+
const args = process.argv.slice(2);
|
|
9
|
+
const result = {};
|
|
10
|
+
for (let i = 0; i < args.length; i++) {
|
|
11
|
+
if (args[i] === "--dir" && args[i + 1]) {
|
|
12
|
+
result.memoryDir = args[i + 1];
|
|
13
|
+
i++;
|
|
14
|
+
} else if (args[i] === "--agent" && args[i + 1]) {
|
|
15
|
+
result.agentName = args[i + 1];
|
|
16
|
+
i++;
|
|
17
|
+
} else if (args[i] === "--help" || args[i] === "-h") {
|
|
18
|
+
console.log(`
|
|
19
|
+
devsh-memory-mcp - MCP server for devsh/cmux agent memory
|
|
20
|
+
|
|
21
|
+
Usage:
|
|
22
|
+
devsh-memory-mcp [options]
|
|
23
|
+
|
|
24
|
+
Options:
|
|
25
|
+
--dir <path> Memory directory (default: /root/lifecycle/memory)
|
|
26
|
+
--agent <name> Agent name for messaging (default: from CMUX_AGENT_NAME env)
|
|
27
|
+
--help, -h Show this help message
|
|
28
|
+
|
|
29
|
+
Example Claude Desktop config:
|
|
30
|
+
{
|
|
31
|
+
"mcpServers": {
|
|
32
|
+
"devsh-memory": {
|
|
33
|
+
"command": "npx",
|
|
34
|
+
"args": ["devsh-memory-mcp", "--dir", "/path/to/memory"]
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
`);
|
|
39
|
+
process.exit(0);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
return result;
|
|
43
|
+
}
|
|
44
|
+
async function main() {
|
|
45
|
+
const config = parseArgs();
|
|
46
|
+
console.error(`[devsh-memory-mcp] Starting server...`);
|
|
47
|
+
if (config.memoryDir) {
|
|
48
|
+
console.error(`[devsh-memory-mcp] Memory directory: ${config.memoryDir}`);
|
|
49
|
+
}
|
|
50
|
+
if (config.agentName) {
|
|
51
|
+
console.error(`[devsh-memory-mcp] Agent name: ${config.agentName}`);
|
|
52
|
+
}
|
|
53
|
+
await runServer(config);
|
|
54
|
+
}
|
|
55
|
+
main().catch((err) => {
|
|
56
|
+
console.error("[devsh-memory-mcp] Fatal error:", err);
|
|
57
|
+
process.exit(1);
|
|
58
|
+
});
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* cmux Memory MCP Server
|
|
5
|
+
*
|
|
6
|
+
* Standalone MCP server that exposes cmux agent memory for external clients
|
|
7
|
+
* like Claude Desktop and Cursor. Can connect to:
|
|
8
|
+
* - Local sandbox memory directory
|
|
9
|
+
* - Remote sandbox via SSH/HTTP
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
interface MemoryMcpConfig {
|
|
13
|
+
memoryDir: string;
|
|
14
|
+
agentName?: string;
|
|
15
|
+
}
|
|
16
|
+
declare function createMemoryMcpServer(config?: Partial<MemoryMcpConfig>): Server<{
|
|
17
|
+
method: string;
|
|
18
|
+
params?: {
|
|
19
|
+
[x: string]: unknown;
|
|
20
|
+
_meta?: {
|
|
21
|
+
[x: string]: unknown;
|
|
22
|
+
progressToken?: string | number;
|
|
23
|
+
"io.modelcontextprotocol/related-task"?: {
|
|
24
|
+
taskId: string;
|
|
25
|
+
};
|
|
26
|
+
};
|
|
27
|
+
};
|
|
28
|
+
}, {
|
|
29
|
+
method: string;
|
|
30
|
+
params?: {
|
|
31
|
+
[x: string]: unknown;
|
|
32
|
+
_meta?: {
|
|
33
|
+
[x: string]: unknown;
|
|
34
|
+
progressToken?: string | number;
|
|
35
|
+
"io.modelcontextprotocol/related-task"?: {
|
|
36
|
+
taskId: string;
|
|
37
|
+
};
|
|
38
|
+
};
|
|
39
|
+
};
|
|
40
|
+
}, {
|
|
41
|
+
[x: string]: unknown;
|
|
42
|
+
_meta?: {
|
|
43
|
+
[x: string]: unknown;
|
|
44
|
+
progressToken?: string | number;
|
|
45
|
+
"io.modelcontextprotocol/related-task"?: {
|
|
46
|
+
taskId: string;
|
|
47
|
+
};
|
|
48
|
+
};
|
|
49
|
+
}>;
|
|
50
|
+
declare function runServer(config?: Partial<MemoryMcpConfig>): Promise<void>;
|
|
51
|
+
|
|
52
|
+
export { type MemoryMcpConfig, createMemoryMcpServer, runServer };
|
package/dist/index.js
ADDED
package/package.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "devsh-memory-mcp",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "MCP server for devsh/cmux agent memory - enables Claude Desktop and external clients to access sandbox memory",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "dist/index.js",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"bin": {
|
|
9
|
+
"devsh-memory-mcp": "dist/cli.js"
|
|
10
|
+
},
|
|
11
|
+
"scripts": {
|
|
12
|
+
"build": "tsup",
|
|
13
|
+
"dev": "tsup --watch",
|
|
14
|
+
"typecheck": "tsc --noEmit",
|
|
15
|
+
"prepublishOnly": "npm run build"
|
|
16
|
+
},
|
|
17
|
+
"dependencies": {
|
|
18
|
+
"@modelcontextprotocol/sdk": "^1.0.0"
|
|
19
|
+
},
|
|
20
|
+
"devDependencies": {
|
|
21
|
+
"@types/node": "^22.0.0",
|
|
22
|
+
"tsup": "^8.0.0",
|
|
23
|
+
"typescript": "^5.7.0"
|
|
24
|
+
},
|
|
25
|
+
"files": [
|
|
26
|
+
"dist",
|
|
27
|
+
"README.md"
|
|
28
|
+
],
|
|
29
|
+
"keywords": [
|
|
30
|
+
"mcp",
|
|
31
|
+
"devsh",
|
|
32
|
+
"cmux",
|
|
33
|
+
"claude",
|
|
34
|
+
"memory",
|
|
35
|
+
"agent",
|
|
36
|
+
"model-context-protocol",
|
|
37
|
+
"claude-desktop"
|
|
38
|
+
],
|
|
39
|
+
"repository": {
|
|
40
|
+
"type": "git",
|
|
41
|
+
"url": "git+https://github.com/karlorz/cmux.git",
|
|
42
|
+
"directory": "packages/cmux-memory-mcp"
|
|
43
|
+
},
|
|
44
|
+
"homepage": "https://github.com/karlorz/cmux/tree/main/packages/cmux-memory-mcp#readme",
|
|
45
|
+
"bugs": {
|
|
46
|
+
"url": "https://github.com/karlorz/cmux/issues"
|
|
47
|
+
},
|
|
48
|
+
"author": "cmux team",
|
|
49
|
+
"license": "MIT",
|
|
50
|
+
"engines": {
|
|
51
|
+
"node": ">=20.0.0"
|
|
52
|
+
}
|
|
53
|
+
}
|