@echoes-io/mcp-server 1.4.2 → 1.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +54 -13
- package/lib/server.d.ts +12 -1
- package/lib/server.js +133 -42
- package/lib/tools/book-generate.d.ts +5 -4
- package/lib/tools/book-generate.js +0 -1
- package/lib/tools/index.d.ts +1 -0
- package/lib/tools/index.js +1 -0
- package/lib/tools/rag-characters.d.ts +18 -0
- package/lib/tools/rag-characters.js +26 -0
- package/lib/tools/rag-context.d.ts +3 -0
- package/lib/tools/rag-context.js +6 -0
- package/lib/tools/rag-index.d.ts +5 -4
- package/lib/tools/rag-index.js +28 -37
- package/lib/tools/rag-search.d.ts +6 -0
- package/lib/tools/rag-search.js +11 -0
- package/lib/tools/timeline-sync.d.ts +5 -4
- package/lib/tools/timeline-sync.js +0 -1
- package/package.json +9 -9
package/README.md
CHANGED
|
@@ -8,6 +8,8 @@ The server is distributed as an npm package and can be used without cloning the
|
|
|
8
8
|
|
|
9
9
|
### Using with MCP Clients
|
|
10
10
|
|
|
11
|
+
**Important:** The server must be run from the `.github` directory of your Echoes project.
|
|
12
|
+
|
|
11
13
|
Add to your MCP client configuration (e.g., `~/.config/q/mcp.json` for Amazon Q):
|
|
12
14
|
|
|
13
15
|
```json
|
|
@@ -15,7 +17,8 @@ Add to your MCP client configuration (e.g., `~/.config/q/mcp.json` for Amazon Q)
|
|
|
15
17
|
"mcpServers": {
|
|
16
18
|
"echoes": {
|
|
17
19
|
"command": "npx",
|
|
18
|
-
"args": ["-y", "@echoes-io/mcp-server"]
|
|
20
|
+
"args": ["-y", "@echoes-io/mcp-server"],
|
|
21
|
+
"cwd": "/path/to/echoes-io/.github"
|
|
19
22
|
}
|
|
20
23
|
}
|
|
21
24
|
}
|
|
@@ -34,9 +37,9 @@ Then configure:
|
|
|
34
37
|
"mcpServers": {
|
|
35
38
|
"echoes": {
|
|
36
39
|
"command": "echoes-mcp-server",
|
|
40
|
+
"cwd": "/path/to/echoes-io/.github",
|
|
37
41
|
"env": {
|
|
38
|
-
"ECHOES_RAG_PROVIDER": "e5-small"
|
|
39
|
-
"ECHOES_RAG_DB_PATH": "./rag.db"
|
|
42
|
+
"ECHOES_RAG_PROVIDER": "e5-small"
|
|
40
43
|
}
|
|
41
44
|
}
|
|
42
45
|
}
|
|
@@ -46,7 +49,30 @@ Then configure:
|
|
|
46
49
|
**Optional RAG Configuration:**
|
|
47
50
|
- `ECHOES_RAG_PROVIDER`: Embedding provider (`e5-small`, `e5-large`, or `gemini`). Default: `e5-small`
|
|
48
51
|
- `ECHOES_GEMINI_API_KEY`: Required if using `gemini` provider
|
|
49
|
-
|
|
52
|
+
|
|
53
|
+
## Multi-Timeline Architecture
|
|
54
|
+
|
|
55
|
+
The server automatically discovers and manages multiple timelines:
|
|
56
|
+
|
|
57
|
+
```
|
|
58
|
+
echoes-io/
|
|
59
|
+
.github/ # Server runs from here
|
|
60
|
+
timeline-eros/ # Private timeline repo
|
|
61
|
+
tracker.db # Timeline-specific database
|
|
62
|
+
rag.db # Timeline-specific RAG index
|
|
63
|
+
content/...
|
|
64
|
+
timeline-other/ # Another private timeline
|
|
65
|
+
tracker.db
|
|
66
|
+
rag.db
|
|
67
|
+
content/...
|
|
68
|
+
```
|
|
69
|
+
|
|
70
|
+
**Benefits:**
|
|
71
|
+
- Each timeline has isolated databases in its own repository
|
|
72
|
+
- Timeline repositories can be private while `.github` is public
|
|
73
|
+
- No need to specify `contentPath` - auto-discovered from directory structure
|
|
74
|
+
- Easy to manage access: just share/don't share specific timeline repos
|
|
75
|
+
|
|
50
76
|
|
|
51
77
|
## Available Tools
|
|
52
78
|
|
|
@@ -77,11 +103,12 @@ All tools require a `timeline` parameter to specify which timeline to operate on
|
|
|
77
103
|
|
|
78
104
|
### Timeline Operations
|
|
79
105
|
- **`timeline-sync`** - Synchronize filesystem content with database
|
|
80
|
-
- Input: `
|
|
106
|
+
- Input: `timeline` (timeline name)
|
|
107
|
+
- Note: Content path is auto-discovered from timeline directory structure
|
|
81
108
|
|
|
82
109
|
### Statistics
|
|
83
110
|
- **`stats`** - Get aggregate statistics with optional filters
|
|
84
|
-
- Input: optional: `arc`, `episode`, `pov`
|
|
111
|
+
- Input: `timeline`, optional: `arc`, `episode`, `pov`
|
|
85
112
|
- Output: Total words/chapters, POV distribution, arc/episode breakdown, longest/shortest chapters
|
|
86
113
|
- Examples:
|
|
87
114
|
- No filters: Overall timeline statistics
|
|
@@ -91,24 +118,38 @@ All tools require a `timeline` parameter to specify which timeline to operate on
|
|
|
91
118
|
|
|
92
119
|
### RAG (Semantic Search)
|
|
93
120
|
- **`rag-index`** - Index chapters into vector database for semantic search
|
|
94
|
-
- Input: `
|
|
121
|
+
- Input: `timeline`, optional: `arc`, `episode` (to index specific content)
|
|
95
122
|
- Output: Number of chapters indexed
|
|
96
|
-
- Note:
|
|
123
|
+
- Note: Content path is auto-discovered from timeline directory structure
|
|
124
|
+
- Note: Automatically extracts character names using NER (Named Entity Recognition)
|
|
97
125
|
|
|
98
126
|
- **`rag-search`** - Semantic search across timeline content
|
|
99
|
-
- Input: `query`, optional: `arc`, `pov`, `maxResults`
|
|
100
|
-
- Output: Relevant chapters with similarity scores and
|
|
127
|
+
- Input: `timeline`, `query`, optional: `arc`, `pov`, `maxResults`, `characters`, `allCharacters`
|
|
128
|
+
- Output: Relevant chapters with similarity scores, previews, and character names
|
|
129
|
+
- Character filtering:
|
|
130
|
+
- `characters`: Array of character names to filter by
|
|
131
|
+
- `allCharacters`: If true, all characters must be present (AND). If false, at least one (OR). Default: false
|
|
132
|
+
- Examples:
|
|
133
|
+
- `characters: ["Alice", "Bob"], allCharacters: true` - Find chapters where both Alice AND Bob appear
|
|
134
|
+
- `characters: ["Alice", "Bob"]` - Find chapters where Alice OR Bob appear
|
|
101
135
|
|
|
102
136
|
- **`rag-context`** - Retrieve relevant context for AI interactions
|
|
103
|
-
- Input: `query`, optional: `arc`, `pov`, `maxChapters`
|
|
104
|
-
- Output: Full chapter content for AI context
|
|
137
|
+
- Input: `timeline`, `query`, optional: `arc`, `pov`, `maxChapters`, `characters`
|
|
138
|
+
- Output: Full chapter content for AI context with character names
|
|
139
|
+
- Supports character filtering like `rag-search`
|
|
140
|
+
|
|
141
|
+
- **`rag-characters`** - Get all characters that appear in chapters with a specific character
|
|
142
|
+
- Input: `timeline`, `character` (character name)
|
|
143
|
+
- Output: List of co-occurring characters sorted alphabetically
|
|
144
|
+
- Use case: "Who does character X interact with?"
|
|
105
145
|
|
|
106
146
|
### Book Generation
|
|
107
147
|
- **`book-generate`** - Generate PDF book from timeline content using LaTeX
|
|
108
|
-
- Input: `
|
|
148
|
+
- Input: `timeline`, `outputPath`, optional: `episodes`, `format`
|
|
109
149
|
- Output: PDF book with Victoria Regia template
|
|
110
150
|
- Formats: `a4` (default), `a5`
|
|
111
151
|
- Requirements: pandoc, LaTeX distribution (pdflatex/xelatex/lualatex)
|
|
152
|
+
- Note: Content path is auto-discovered from timeline directory structure
|
|
112
153
|
|
|
113
154
|
## Development
|
|
114
155
|
|
package/lib/server.d.ts
CHANGED
|
@@ -1,7 +1,12 @@
|
|
|
1
1
|
import { RAGSystem } from '@echoes-io/rag';
|
|
2
2
|
import { Tracker } from '@echoes-io/tracker';
|
|
3
3
|
import { Server } from '@modelcontextprotocol/sdk/server/index.js';
|
|
4
|
-
|
|
4
|
+
interface TimelineContext {
|
|
5
|
+
tracker: Tracker;
|
|
6
|
+
rag: RAGSystem;
|
|
7
|
+
contentPath: string;
|
|
8
|
+
}
|
|
9
|
+
export declare function createServer(timelines: Map<string, TimelineContext>): Server<{
|
|
5
10
|
method: string;
|
|
6
11
|
params?: {
|
|
7
12
|
[x: string]: unknown;
|
|
@@ -24,4 +29,10 @@ export declare function createServer(tracker: Tracker, rag: RAGSystem): Server<{
|
|
|
24
29
|
[x: string]: unknown;
|
|
25
30
|
} | undefined;
|
|
26
31
|
}>;
|
|
32
|
+
interface TimelineContext {
|
|
33
|
+
tracker: Tracker;
|
|
34
|
+
rag: RAGSystem;
|
|
35
|
+
contentPath: string;
|
|
36
|
+
}
|
|
27
37
|
export declare function runServer(): Promise<void>;
|
|
38
|
+
export {};
|
package/lib/server.js
CHANGED
|
@@ -7,10 +7,10 @@ import { Server } from '@modelcontextprotocol/sdk/server/index.js';
|
|
|
7
7
|
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
|
8
8
|
import { CallToolRequestSchema, ListToolsRequestSchema } from '@modelcontextprotocol/sdk/types.js';
|
|
9
9
|
import { zodToJsonSchema } from 'zod-to-json-schema';
|
|
10
|
-
import { bookGenerate, bookGenerateSchema, chapterDelete, chapterDeleteSchema, chapterInfo, chapterInfoSchema, chapterInsert, chapterInsertSchema, chapterRefresh, chapterRefreshSchema, episodeInfo, episodeInfoSchema, episodeUpdate, episodeUpdateSchema, ragContext, ragContextSchema, ragIndex, ragIndexSchema, ragSearch, ragSearchSchema, stats, statsSchema, timelineSync, timelineSyncSchema, wordsCount, wordsCountSchema, } from './tools/index.js';
|
|
10
|
+
import { bookGenerate, bookGenerateSchema, chapterDelete, chapterDeleteSchema, chapterInfo, chapterInfoSchema, chapterInsert, chapterInsertSchema, chapterRefresh, chapterRefreshSchema, episodeInfo, episodeInfoSchema, episodeUpdate, episodeUpdateSchema, ragCharacters, ragCharactersSchema, ragContext, ragContextSchema, ragIndex, ragIndexSchema, ragSearch, ragSearchSchema, stats, statsSchema, timelineSync, timelineSyncSchema, wordsCount, wordsCountSchema, } from './tools/index.js';
|
|
11
11
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
12
12
|
const pkg = JSON.parse(readFileSync(join(__dirname, '../package.json'), 'utf-8'));
|
|
13
|
-
export function createServer(
|
|
13
|
+
export function createServer(timelines) {
|
|
14
14
|
const server = new Server({
|
|
15
15
|
name: pkg.name,
|
|
16
16
|
version: pkg.version,
|
|
@@ -82,6 +82,11 @@ export function createServer(tracker, rag) {
|
|
|
82
82
|
description: 'Retrieve relevant context for AI interactions',
|
|
83
83
|
inputSchema: zodToJsonSchema(ragContextSchema),
|
|
84
84
|
},
|
|
85
|
+
{
|
|
86
|
+
name: 'rag-characters',
|
|
87
|
+
description: 'Get all characters that appear in chapters with a specific character',
|
|
88
|
+
inputSchema: zodToJsonSchema(ragCharactersSchema),
|
|
89
|
+
},
|
|
85
90
|
{
|
|
86
91
|
name: 'book-generate',
|
|
87
92
|
description: 'Generate PDF book from timeline content using LaTeX',
|
|
@@ -92,33 +97,82 @@ export function createServer(tracker, rag) {
|
|
|
92
97
|
});
|
|
93
98
|
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
|
94
99
|
const { name, arguments: args } = request.params;
|
|
100
|
+
// Helper to get timeline context
|
|
101
|
+
const getContext = (timeline) => {
|
|
102
|
+
const ctx = timelines.get(timeline);
|
|
103
|
+
if (!ctx) {
|
|
104
|
+
throw new Error(`Timeline "${timeline}" not found. Available: ${Array.from(timelines.keys()).join(', ')}`);
|
|
105
|
+
}
|
|
106
|
+
return ctx;
|
|
107
|
+
};
|
|
95
108
|
switch (name) {
|
|
96
109
|
case 'words-count':
|
|
97
110
|
return await wordsCount(wordsCountSchema.parse(args));
|
|
98
|
-
case 'chapter-info':
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
return await
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
case '
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
return await
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
case '
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
return await
|
|
111
|
+
case 'chapter-info': {
|
|
112
|
+
const parsed = chapterInfoSchema.parse(args);
|
|
113
|
+
const { tracker } = getContext(parsed.timeline);
|
|
114
|
+
return await chapterInfo(parsed, tracker);
|
|
115
|
+
}
|
|
116
|
+
case 'chapter-refresh': {
|
|
117
|
+
const parsed = chapterRefreshSchema.parse(args);
|
|
118
|
+
const { tracker } = getContext(parsed.timeline);
|
|
119
|
+
return await chapterRefresh(parsed, tracker);
|
|
120
|
+
}
|
|
121
|
+
case 'chapter-delete': {
|
|
122
|
+
const parsed = chapterDeleteSchema.parse(args);
|
|
123
|
+
const { tracker } = getContext(parsed.timeline);
|
|
124
|
+
return await chapterDelete(parsed, tracker);
|
|
125
|
+
}
|
|
126
|
+
case 'chapter-insert': {
|
|
127
|
+
const parsed = chapterInsertSchema.parse(args);
|
|
128
|
+
const { tracker } = getContext(parsed.timeline);
|
|
129
|
+
return await chapterInsert(parsed, tracker);
|
|
130
|
+
}
|
|
131
|
+
case 'episode-info': {
|
|
132
|
+
const parsed = episodeInfoSchema.parse(args);
|
|
133
|
+
const { tracker } = getContext(parsed.timeline);
|
|
134
|
+
return await episodeInfo(parsed, tracker);
|
|
135
|
+
}
|
|
136
|
+
case 'episode-update': {
|
|
137
|
+
const parsed = episodeUpdateSchema.parse(args);
|
|
138
|
+
const { tracker } = getContext(parsed.timeline);
|
|
139
|
+
return await episodeUpdate(parsed, tracker);
|
|
140
|
+
}
|
|
141
|
+
case 'timeline-sync': {
|
|
142
|
+
const parsed = timelineSyncSchema.parse(args);
|
|
143
|
+
const { tracker, contentPath } = getContext(parsed.timeline);
|
|
144
|
+
return await timelineSync({ ...parsed, contentPath }, tracker);
|
|
145
|
+
}
|
|
146
|
+
case 'stats': {
|
|
147
|
+
const parsed = statsSchema.parse(args);
|
|
148
|
+
const { tracker } = getContext(parsed.timeline);
|
|
149
|
+
return await stats(parsed, tracker);
|
|
150
|
+
}
|
|
151
|
+
case 'rag-index': {
|
|
152
|
+
const parsed = ragIndexSchema.parse(args);
|
|
153
|
+
const { tracker, rag, contentPath } = getContext(parsed.timeline);
|
|
154
|
+
return await ragIndex({ ...parsed, contentPath }, tracker, rag);
|
|
155
|
+
}
|
|
156
|
+
case 'rag-search': {
|
|
157
|
+
const parsed = ragSearchSchema.parse(args);
|
|
158
|
+
const { rag } = getContext(parsed.timeline);
|
|
159
|
+
return await ragSearch(parsed, rag);
|
|
160
|
+
}
|
|
161
|
+
case 'rag-context': {
|
|
162
|
+
const parsed = ragContextSchema.parse(args);
|
|
163
|
+
const { rag } = getContext(parsed.timeline);
|
|
164
|
+
return await ragContext(parsed, rag);
|
|
165
|
+
}
|
|
166
|
+
case 'rag-characters': {
|
|
167
|
+
const parsed = ragCharactersSchema.parse(args);
|
|
168
|
+
const { rag } = getContext(parsed.timeline);
|
|
169
|
+
return await ragCharacters(parsed, rag);
|
|
170
|
+
}
|
|
171
|
+
case 'book-generate': {
|
|
172
|
+
const parsed = bookGenerateSchema.parse(args);
|
|
173
|
+
const { contentPath } = getContext(parsed.timeline);
|
|
174
|
+
return await bookGenerate({ ...parsed, contentPath });
|
|
175
|
+
}
|
|
122
176
|
default:
|
|
123
177
|
throw new Error(`Unknown tool: ${name}`);
|
|
124
178
|
}
|
|
@@ -126,22 +180,59 @@ export function createServer(tracker, rag) {
|
|
|
126
180
|
return server;
|
|
127
181
|
}
|
|
128
182
|
export async function runServer() {
|
|
129
|
-
//
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
183
|
+
// Validate we're running from .github directory
|
|
184
|
+
if (process.env.NODE_ENV !== 'test' && !process.cwd().endsWith('/.github')) {
|
|
185
|
+
throw new Error('Server must be run from .github directory');
|
|
186
|
+
}
|
|
187
|
+
const timelines = new Map();
|
|
188
|
+
if (process.env.NODE_ENV === 'test') {
|
|
189
|
+
// Test mode: single in-memory database
|
|
190
|
+
const tracker = new Tracker(':memory:');
|
|
191
|
+
await tracker.init();
|
|
192
|
+
const rag = new RAGSystem({
|
|
193
|
+
provider: 'e5-small',
|
|
194
|
+
dbPath: ':memory:',
|
|
195
|
+
});
|
|
196
|
+
timelines.set('test', { tracker, rag, contentPath: './test-content' });
|
|
197
|
+
console.error('Test mode: in-memory databases');
|
|
198
|
+
}
|
|
199
|
+
else {
|
|
200
|
+
// Production: discover timelines and create separate databases
|
|
201
|
+
const { readdirSync, existsSync, mkdirSync } = await import('node:fs');
|
|
202
|
+
const { join } = await import('node:path');
|
|
203
|
+
const parentDir = join(process.cwd(), '..');
|
|
204
|
+
const entries = readdirSync(parentDir, { withFileTypes: true });
|
|
205
|
+
for (const entry of entries) {
|
|
206
|
+
if (entry.isDirectory() && entry.name.startsWith('timeline-')) {
|
|
207
|
+
const timelineName = entry.name.replace('timeline-', '');
|
|
208
|
+
const timelinePath = join(parentDir, entry.name);
|
|
209
|
+
const contentPath = join(timelinePath, 'content');
|
|
210
|
+
if (!existsSync(contentPath)) {
|
|
211
|
+
console.error(`Skipping ${entry.name}: no content directory`);
|
|
212
|
+
continue;
|
|
213
|
+
}
|
|
214
|
+
// Initialize tracker
|
|
215
|
+
const trackerPath = join(timelinePath, 'tracker.db');
|
|
216
|
+
const tracker = new Tracker(trackerPath);
|
|
217
|
+
await tracker.init();
|
|
218
|
+
// Initialize RAG
|
|
219
|
+
const ragPath = join(timelinePath, 'rag.db');
|
|
220
|
+
const provider = (process.env.ECHOES_RAG_PROVIDER || 'e5-small');
|
|
221
|
+
const rag = new RAGSystem({
|
|
222
|
+
provider,
|
|
223
|
+
dbPath: ragPath,
|
|
224
|
+
geminiApiKey: process.env.ECHOES_GEMINI_API_KEY,
|
|
225
|
+
});
|
|
226
|
+
timelines.set(timelineName, { tracker, rag, contentPath });
|
|
227
|
+
console.error(`Timeline "${timelineName}" initialized: ${trackerPath}`);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
if (timelines.size === 0) {
|
|
231
|
+
throw new Error('No timelines found in parent directory');
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
const server = createServer(timelines);
|
|
144
235
|
const transport = new StdioServerTransport();
|
|
145
236
|
await server.connect(transport);
|
|
146
|
-
console.error(
|
|
237
|
+
console.error(`Echoes MCP Server running on stdio (${timelines.size} timelines)`);
|
|
147
238
|
}
|
|
@@ -1,26 +1,27 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
2
|
export declare const bookGenerateSchema: z.ZodObject<{
|
|
3
3
|
timeline: z.ZodString;
|
|
4
|
-
contentPath: z.ZodString;
|
|
5
4
|
outputPath: z.ZodString;
|
|
6
5
|
episodes: z.ZodOptional<z.ZodString>;
|
|
7
6
|
format: z.ZodOptional<z.ZodEnum<["a4", "a5"]>>;
|
|
8
7
|
}, "strip", z.ZodTypeAny, {
|
|
9
8
|
timeline: string;
|
|
10
|
-
contentPath: string;
|
|
11
9
|
outputPath: string;
|
|
12
10
|
episodes?: string | undefined;
|
|
13
11
|
format?: "a4" | "a5" | undefined;
|
|
14
12
|
}, {
|
|
15
13
|
timeline: string;
|
|
16
|
-
contentPath: string;
|
|
17
14
|
outputPath: string;
|
|
18
15
|
episodes?: string | undefined;
|
|
19
16
|
format?: "a4" | "a5" | undefined;
|
|
20
17
|
}>;
|
|
21
|
-
|
|
18
|
+
type BookGenerateArgs = z.infer<typeof bookGenerateSchema> & {
|
|
19
|
+
contentPath: string;
|
|
20
|
+
};
|
|
21
|
+
export declare function bookGenerate(args: BookGenerateArgs): Promise<{
|
|
22
22
|
content: {
|
|
23
23
|
type: "text";
|
|
24
24
|
text: string;
|
|
25
25
|
}[];
|
|
26
26
|
}>;
|
|
27
|
+
export {};
|
|
@@ -2,7 +2,6 @@ import { generateBook } from '@echoes-io/books-generator';
|
|
|
2
2
|
import { z } from 'zod';
|
|
3
3
|
export const bookGenerateSchema = z.object({
|
|
4
4
|
timeline: z.string().describe('Timeline name'),
|
|
5
|
-
contentPath: z.string().describe('Path to timeline content folder'),
|
|
6
5
|
outputPath: z.string().describe('Output PDF file path'),
|
|
7
6
|
episodes: z.string().optional().describe('Comma-separated episode numbers (e.g., "1,2,3")'),
|
|
8
7
|
format: z.enum(['a4', 'a5']).optional().describe('Page format (default: a4)'),
|
package/lib/tools/index.d.ts
CHANGED
|
@@ -5,6 +5,7 @@ export { chapterInsert, chapterInsertSchema } from './chapter-insert.js';
|
|
|
5
5
|
export { chapterRefresh, chapterRefreshSchema } from './chapter-refresh.js';
|
|
6
6
|
export { episodeInfo, episodeInfoSchema } from './episode-info.js';
|
|
7
7
|
export { episodeUpdate, episodeUpdateSchema } from './episode-update.js';
|
|
8
|
+
export { ragCharacters, ragCharactersSchema } from './rag-characters.js';
|
|
8
9
|
export { ragContext, ragContextSchema } from './rag-context.js';
|
|
9
10
|
export { ragIndex, ragIndexSchema } from './rag-index.js';
|
|
10
11
|
export { ragSearch, ragSearchSchema } from './rag-search.js';
|
package/lib/tools/index.js
CHANGED
|
@@ -5,6 +5,7 @@ export { chapterInsert, chapterInsertSchema } from './chapter-insert.js';
|
|
|
5
5
|
export { chapterRefresh, chapterRefreshSchema } from './chapter-refresh.js';
|
|
6
6
|
export { episodeInfo, episodeInfoSchema } from './episode-info.js';
|
|
7
7
|
export { episodeUpdate, episodeUpdateSchema } from './episode-update.js';
|
|
8
|
+
export { ragCharacters, ragCharactersSchema } from './rag-characters.js';
|
|
8
9
|
export { ragContext, ragContextSchema } from './rag-context.js';
|
|
9
10
|
export { ragIndex, ragIndexSchema } from './rag-index.js';
|
|
10
11
|
export { ragSearch, ragSearchSchema } from './rag-search.js';
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import type { RAGSystem } from '@echoes-io/rag';
|
|
2
|
+
import { z } from 'zod';
|
|
3
|
+
export declare const ragCharactersSchema: z.ZodObject<{
|
|
4
|
+
timeline: z.ZodString;
|
|
5
|
+
character: z.ZodString;
|
|
6
|
+
}, "strip", z.ZodTypeAny, {
|
|
7
|
+
timeline: string;
|
|
8
|
+
character: string;
|
|
9
|
+
}, {
|
|
10
|
+
timeline: string;
|
|
11
|
+
character: string;
|
|
12
|
+
}>;
|
|
13
|
+
export declare function ragCharacters(args: z.infer<typeof ragCharactersSchema>, rag: RAGSystem): Promise<{
|
|
14
|
+
content: {
|
|
15
|
+
type: "text";
|
|
16
|
+
text: string;
|
|
17
|
+
}[];
|
|
18
|
+
}>;
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
export const ragCharactersSchema = z.object({
|
|
3
|
+
timeline: z.string().describe('Timeline name'),
|
|
4
|
+
character: z.string().describe('Character name to find co-occurrences for'),
|
|
5
|
+
});
|
|
6
|
+
export async function ragCharacters(args, rag) {
|
|
7
|
+
try {
|
|
8
|
+
const characters = await rag.getCharacterMentions(args.character);
|
|
9
|
+
return {
|
|
10
|
+
content: [
|
|
11
|
+
{
|
|
12
|
+
type: 'text',
|
|
13
|
+
text: JSON.stringify({
|
|
14
|
+
character: args.character,
|
|
15
|
+
timeline: args.timeline,
|
|
16
|
+
coOccurringCharacters: characters.filter((c) => c !== args.character).sort(),
|
|
17
|
+
total: characters.length - 1,
|
|
18
|
+
}, null, 2),
|
|
19
|
+
},
|
|
20
|
+
],
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
catch (error) {
|
|
24
|
+
throw new Error(`Failed to get character mentions: ${error instanceof Error ? error.message : 'Unknown error'}`);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
@@ -6,17 +6,20 @@ export declare const ragContextSchema: z.ZodObject<{
|
|
|
6
6
|
arc: z.ZodOptional<z.ZodString>;
|
|
7
7
|
pov: z.ZodOptional<z.ZodString>;
|
|
8
8
|
maxChapters: z.ZodOptional<z.ZodNumber>;
|
|
9
|
+
characters: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
|
|
9
10
|
}, "strip", z.ZodTypeAny, {
|
|
10
11
|
timeline: string;
|
|
11
12
|
query: string;
|
|
12
13
|
arc?: string | undefined;
|
|
13
14
|
pov?: string | undefined;
|
|
15
|
+
characters?: string[] | undefined;
|
|
14
16
|
maxChapters?: number | undefined;
|
|
15
17
|
}, {
|
|
16
18
|
timeline: string;
|
|
17
19
|
query: string;
|
|
18
20
|
arc?: string | undefined;
|
|
19
21
|
pov?: string | undefined;
|
|
22
|
+
characters?: string[] | undefined;
|
|
20
23
|
maxChapters?: number | undefined;
|
|
21
24
|
}>;
|
|
22
25
|
export declare function ragContext(args: z.infer<typeof ragContextSchema>, rag: RAGSystem): Promise<{
|
package/lib/tools/rag-context.js
CHANGED
|
@@ -5,6 +5,10 @@ export const ragContextSchema = z.object({
|
|
|
5
5
|
arc: z.string().optional().describe('Filter by arc name'),
|
|
6
6
|
pov: z.string().optional().describe('Filter by POV character'),
|
|
7
7
|
maxChapters: z.number().optional().describe('Maximum number of chapters (default: 5)'),
|
|
8
|
+
characters: z
|
|
9
|
+
.array(z.string())
|
|
10
|
+
.optional()
|
|
11
|
+
.describe('Filter by character names present in chapter'),
|
|
8
12
|
});
|
|
9
13
|
export async function ragContext(args, rag) {
|
|
10
14
|
try {
|
|
@@ -14,6 +18,7 @@ export async function ragContext(args, rag) {
|
|
|
14
18
|
arc: args.arc,
|
|
15
19
|
pov: args.pov,
|
|
16
20
|
maxChapters: args.maxChapters,
|
|
21
|
+
characters: args.characters,
|
|
17
22
|
});
|
|
18
23
|
return {
|
|
19
24
|
content: [
|
|
@@ -33,6 +38,7 @@ export async function ragContext(args, rag) {
|
|
|
33
38
|
chapter: r.metadata.number,
|
|
34
39
|
pov: r.metadata.pov,
|
|
35
40
|
title: r.metadata.title,
|
|
41
|
+
characters: r.metadata.characterNames || [],
|
|
36
42
|
},
|
|
37
43
|
similarity: r.similarity,
|
|
38
44
|
content: r.content,
|
package/lib/tools/rag-index.d.ts
CHANGED
|
@@ -3,23 +3,24 @@ import type { Tracker } from '@echoes-io/tracker';
|
|
|
3
3
|
import { z } from 'zod';
|
|
4
4
|
export declare const ragIndexSchema: z.ZodObject<{
|
|
5
5
|
timeline: z.ZodString;
|
|
6
|
-
contentPath: z.ZodOptional<z.ZodString>;
|
|
7
6
|
arc: z.ZodOptional<z.ZodString>;
|
|
8
7
|
episode: z.ZodOptional<z.ZodNumber>;
|
|
9
8
|
}, "strip", z.ZodTypeAny, {
|
|
10
9
|
timeline: string;
|
|
11
|
-
contentPath?: string | undefined;
|
|
12
10
|
arc?: string | undefined;
|
|
13
11
|
episode?: number | undefined;
|
|
14
12
|
}, {
|
|
15
13
|
timeline: string;
|
|
16
|
-
contentPath?: string | undefined;
|
|
17
14
|
arc?: string | undefined;
|
|
18
15
|
episode?: number | undefined;
|
|
19
16
|
}>;
|
|
20
|
-
|
|
17
|
+
type RagIndexArgs = z.infer<typeof ragIndexSchema> & {
|
|
18
|
+
contentPath: string;
|
|
19
|
+
};
|
|
20
|
+
export declare function ragIndex(args: RagIndexArgs, tracker: Tracker, rag: RAGSystem): Promise<{
|
|
21
21
|
content: {
|
|
22
22
|
type: "text";
|
|
23
23
|
text: string;
|
|
24
24
|
}[];
|
|
25
25
|
}>;
|
|
26
|
+
export {};
|
package/lib/tools/rag-index.js
CHANGED
|
@@ -4,7 +4,6 @@ import { parseMarkdown } from '@echoes-io/utils';
|
|
|
4
4
|
import { z } from 'zod';
|
|
5
5
|
export const ragIndexSchema = z.object({
|
|
6
6
|
timeline: z.string().describe('Timeline name'),
|
|
7
|
-
contentPath: z.string().optional().describe('Path to content directory (required for indexing)'),
|
|
8
7
|
arc: z.string().optional().describe('Index specific arc only'),
|
|
9
8
|
episode: z.number().optional().describe('Index specific episode only (requires arc)'),
|
|
10
9
|
});
|
|
@@ -35,46 +34,38 @@ export async function ragIndex(args, tracker, rag) {
|
|
|
35
34
|
// Convert to embedding format and add to RAG
|
|
36
35
|
const embeddingChapters = chapters
|
|
37
36
|
.map((ch) => {
|
|
38
|
-
//
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
return null;
|
|
50
|
-
}
|
|
51
|
-
// Find chapter file by episode and chapter number (filename-agnostic for title/pov)
|
|
52
|
-
const chapterPattern = `ep${String(ch.episodeNumber).padStart(2, '0')}-ch${String(ch.number).padStart(3, '0')}-`;
|
|
53
|
-
const chapterFiles = readdirSync(episodePath).filter((f) => f.startsWith(chapterPattern) && f.endsWith('.md'));
|
|
54
|
-
if (chapterFiles.length === 0) {
|
|
55
|
-
console.error(`Chapter file not found for ${ch.arcName}/ep${ch.episodeNumber}/ch${ch.number}`);
|
|
56
|
-
return null;
|
|
57
|
-
}
|
|
58
|
-
const filePath = join(episodePath, chapterFiles[0]);
|
|
59
|
-
const fileContent = readFileSync(filePath, 'utf-8');
|
|
60
|
-
const { content } = parseMarkdown(fileContent);
|
|
61
|
-
return {
|
|
62
|
-
id: `${ch.timelineName}-${ch.arcName}-${ch.episodeNumber}-${ch.number}`,
|
|
63
|
-
metadata: ch,
|
|
64
|
-
content,
|
|
65
|
-
};
|
|
37
|
+
// Read actual file content
|
|
38
|
+
try {
|
|
39
|
+
// Find episode directory
|
|
40
|
+
const episodeDir = `ep${String(ch.episodeNumber).padStart(2, '0')}`;
|
|
41
|
+
const arcPath = join(args.contentPath, ch.arcName);
|
|
42
|
+
const episodePath = readdirSync(arcPath, { withFileTypes: true })
|
|
43
|
+
.filter((e) => e.isDirectory() && e.name.startsWith(episodeDir))
|
|
44
|
+
.map((e) => join(arcPath, e.name))[0];
|
|
45
|
+
if (!episodePath) {
|
|
46
|
+
console.error(`Episode directory not found for ${ch.arcName}/ep${ch.episodeNumber}`);
|
|
47
|
+
return null;
|
|
66
48
|
}
|
|
67
|
-
|
|
68
|
-
|
|
49
|
+
// Find chapter file by episode and chapter number (filename-agnostic for title/pov)
|
|
50
|
+
const chapterPattern = `ep${String(ch.episodeNumber).padStart(2, '0')}-ch${String(ch.number).padStart(3, '0')}-`;
|
|
51
|
+
const chapterFiles = readdirSync(episodePath).filter((f) => f.startsWith(chapterPattern) && f.endsWith('.md'));
|
|
52
|
+
if (chapterFiles.length === 0) {
|
|
53
|
+
console.error(`Chapter file not found for ${ch.arcName}/ep${ch.episodeNumber}/ch${ch.number}`);
|
|
69
54
|
return null;
|
|
70
55
|
}
|
|
56
|
+
const filePath = join(episodePath, chapterFiles[0]);
|
|
57
|
+
const fileContent = readFileSync(filePath, 'utf-8');
|
|
58
|
+
const { content } = parseMarkdown(fileContent);
|
|
59
|
+
return {
|
|
60
|
+
id: `${ch.timelineName}-${ch.arcName}-${ch.episodeNumber}-${ch.number}`,
|
|
61
|
+
metadata: ch,
|
|
62
|
+
content,
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
catch (error) {
|
|
66
|
+
console.error(`Error reading chapter ${ch.arcName}/ep${ch.episodeNumber}/ch${ch.number}:`, error);
|
|
67
|
+
return null;
|
|
71
68
|
}
|
|
72
|
-
// Fallback: no content (for tests or when contentPath not provided)
|
|
73
|
-
return {
|
|
74
|
-
id: `${ch.timelineName}-${ch.arcName}-${ch.episodeNumber}-${ch.number}`,
|
|
75
|
-
metadata: ch,
|
|
76
|
-
content: '',
|
|
77
|
-
};
|
|
78
69
|
})
|
|
79
70
|
.filter((ch) => ch !== null);
|
|
80
71
|
await rag.addChapters(embeddingChapters);
|
|
@@ -6,18 +6,24 @@ export declare const ragSearchSchema: z.ZodObject<{
|
|
|
6
6
|
arc: z.ZodOptional<z.ZodString>;
|
|
7
7
|
pov: z.ZodOptional<z.ZodString>;
|
|
8
8
|
maxResults: z.ZodOptional<z.ZodNumber>;
|
|
9
|
+
characters: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
|
|
10
|
+
allCharacters: z.ZodOptional<z.ZodBoolean>;
|
|
9
11
|
}, "strip", z.ZodTypeAny, {
|
|
10
12
|
timeline: string;
|
|
11
13
|
query: string;
|
|
12
14
|
arc?: string | undefined;
|
|
13
15
|
pov?: string | undefined;
|
|
16
|
+
characters?: string[] | undefined;
|
|
14
17
|
maxResults?: number | undefined;
|
|
18
|
+
allCharacters?: boolean | undefined;
|
|
15
19
|
}, {
|
|
16
20
|
timeline: string;
|
|
17
21
|
query: string;
|
|
18
22
|
arc?: string | undefined;
|
|
19
23
|
pov?: string | undefined;
|
|
24
|
+
characters?: string[] | undefined;
|
|
20
25
|
maxResults?: number | undefined;
|
|
26
|
+
allCharacters?: boolean | undefined;
|
|
21
27
|
}>;
|
|
22
28
|
export declare function ragSearch(args: z.infer<typeof ragSearchSchema>, rag: RAGSystem): Promise<{
|
|
23
29
|
content: {
|
package/lib/tools/rag-search.js
CHANGED
|
@@ -5,6 +5,14 @@ export const ragSearchSchema = z.object({
|
|
|
5
5
|
arc: z.string().optional().describe('Filter by arc name'),
|
|
6
6
|
pov: z.string().optional().describe('Filter by POV character'),
|
|
7
7
|
maxResults: z.number().optional().describe('Maximum number of results (default: 10)'),
|
|
8
|
+
characters: z
|
|
9
|
+
.array(z.string())
|
|
10
|
+
.optional()
|
|
11
|
+
.describe('Filter by character names present in chapter'),
|
|
12
|
+
allCharacters: z
|
|
13
|
+
.boolean()
|
|
14
|
+
.optional()
|
|
15
|
+
.describe('If true, all characters must be present (AND). If false, at least one (OR). Default: false'),
|
|
8
16
|
});
|
|
9
17
|
export async function ragSearch(args, rag) {
|
|
10
18
|
try {
|
|
@@ -13,6 +21,8 @@ export async function ragSearch(args, rag) {
|
|
|
13
21
|
arc: args.arc,
|
|
14
22
|
pov: args.pov,
|
|
15
23
|
maxResults: args.maxResults,
|
|
24
|
+
characters: args.characters,
|
|
25
|
+
allCharacters: args.allCharacters,
|
|
16
26
|
});
|
|
17
27
|
return {
|
|
18
28
|
content: [
|
|
@@ -32,6 +42,7 @@ export async function ragSearch(args, rag) {
|
|
|
32
42
|
chapter: r.metadata.number,
|
|
33
43
|
pov: r.metadata.pov,
|
|
34
44
|
title: r.metadata.title,
|
|
45
|
+
characters: r.metadata.characterNames || [],
|
|
35
46
|
},
|
|
36
47
|
similarity: r.similarity,
|
|
37
48
|
preview: `${r.content.substring(0, 200)}...`,
|
|
@@ -2,17 +2,18 @@ import type { Tracker } from '@echoes-io/tracker';
|
|
|
2
2
|
import { z } from 'zod';
|
|
3
3
|
export declare const timelineSyncSchema: z.ZodObject<{
|
|
4
4
|
timeline: z.ZodString;
|
|
5
|
-
contentPath: z.ZodString;
|
|
6
5
|
}, "strip", z.ZodTypeAny, {
|
|
7
6
|
timeline: string;
|
|
8
|
-
contentPath: string;
|
|
9
7
|
}, {
|
|
10
8
|
timeline: string;
|
|
11
|
-
contentPath: string;
|
|
12
9
|
}>;
|
|
13
|
-
|
|
10
|
+
type TimelineSyncArgs = z.infer<typeof timelineSyncSchema> & {
|
|
11
|
+
contentPath: string;
|
|
12
|
+
};
|
|
13
|
+
export declare function timelineSync(args: TimelineSyncArgs, tracker: Tracker): Promise<{
|
|
14
14
|
content: {
|
|
15
15
|
type: "text";
|
|
16
16
|
text: string;
|
|
17
17
|
}[];
|
|
18
18
|
}>;
|
|
19
|
+
export {};
|
|
@@ -4,7 +4,6 @@ import { getTextStats, parseMarkdown } from '@echoes-io/utils';
|
|
|
4
4
|
import { z } from 'zod';
|
|
5
5
|
export const timelineSyncSchema = z.object({
|
|
6
6
|
timeline: z.string().describe('Timeline name'),
|
|
7
|
-
contentPath: z.string().describe('Path to content directory'),
|
|
8
7
|
});
|
|
9
8
|
export async function timelineSync(args, tracker) {
|
|
10
9
|
try {
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@echoes-io/mcp-server",
|
|
3
3
|
"type": "module",
|
|
4
|
-
"version": "1.
|
|
4
|
+
"version": "1.6.0",
|
|
5
5
|
"description": "Model Context Protocol server for AI integration with Echoes storytelling platform",
|
|
6
6
|
"scripts": {
|
|
7
7
|
"dev": "tsx cli/index.ts",
|
|
@@ -63,30 +63,30 @@
|
|
|
63
63
|
]
|
|
64
64
|
},
|
|
65
65
|
"devDependencies": {
|
|
66
|
-
"@biomejs/biome": "^2.3.
|
|
66
|
+
"@biomejs/biome": "^2.3.3",
|
|
67
67
|
"@semantic-release/changelog": "^6.0.3",
|
|
68
68
|
"@semantic-release/git": "^10.0.1",
|
|
69
69
|
"@tsconfig/node22": "^22.0.2",
|
|
70
|
-
"@types/node": "^24.
|
|
71
|
-
"@vitest/coverage-v8": "^
|
|
70
|
+
"@types/node": "^24.10.0",
|
|
71
|
+
"@vitest/coverage-v8": "^4.0.6",
|
|
72
72
|
"concurrently": "^9.2.1",
|
|
73
73
|
"husky": "^9.1.7",
|
|
74
74
|
"lint-staged": "^16.2.5",
|
|
75
75
|
"lockfile-lint": "^4.14.1",
|
|
76
76
|
"ls-engines": "^0.9.3",
|
|
77
77
|
"publint": "^0.3.15",
|
|
78
|
-
"rimraf": "^6.0
|
|
78
|
+
"rimraf": "^6.1.0",
|
|
79
79
|
"semantic-release": "^25.0.1",
|
|
80
80
|
"tsx": "^4.19.2",
|
|
81
81
|
"typescript": "^5.9.3",
|
|
82
|
-
"vitest": "^
|
|
82
|
+
"vitest": "^4.0.6"
|
|
83
83
|
},
|
|
84
84
|
"dependencies": {
|
|
85
85
|
"@echoes-io/books-generator": "^1.0.1",
|
|
86
|
-
"@echoes-io/models": "^1.0.
|
|
87
|
-
"@echoes-io/rag": "^1.
|
|
86
|
+
"@echoes-io/models": "^1.0.3",
|
|
87
|
+
"@echoes-io/rag": "^1.2.0",
|
|
88
88
|
"@echoes-io/tracker": "^1.0.1",
|
|
89
89
|
"@echoes-io/utils": "^1.2.0",
|
|
90
|
-
"@modelcontextprotocol/sdk": "^1.0.
|
|
90
|
+
"@modelcontextprotocol/sdk": "^1.0.2"
|
|
91
91
|
}
|
|
92
92
|
}
|