@laskarks/mcp-rag-node 1.0.6 → 1.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -50,7 +50,7 @@ For OpenRouter, use the model ID format, e.g. `openai/text-embedding-3-small` or
50
50
 
51
51
  | Variable | Description | Default |
52
52
  | ---------------------- | ------------------------------------- | ------- |
53
- | `RAG_CHUNK_MAX_TOKENS` | Max tokens per chunk before embedding | `512` |
53
+ | `RAG_CHUNK_MAX_TOKENS` | Max tokens per chunk before embedding | `1536` |
54
54
  | `RAG_CHUNK_OVERLAP` | Overlap tokens between chunks | `50` |
55
55
 
56
56
  ## Usage
@@ -88,10 +88,11 @@ Install the package first: `npm i @laskarks/mcp-rag-node`
88
88
  "mcpServers": {
89
89
  "rag": {
90
90
  "command": "npx",
91
- "args": ["-y", "@laskarks/mcp-rag-node"],
91
+ "args": ["-y", "@laskarks/mcp-rag-node@latest"],
92
92
  "env": {
93
93
  "APIKEY": "sk-...",
94
94
  "EMBEDDING_MODEL": "text-embedding-3-small",
95
+ "RAG_CHUNK_MAX_TOKENS": 1536,
95
96
  "PINECONE_API_KEY": "...",
96
97
  "PINECONE_INDEX": "rag-index",
97
98
  "PROVIDER": "openai"
@@ -108,10 +109,11 @@ Install the package first: `npm i @laskarks/mcp-rag-node`
108
109
  "mcpServers": {
109
110
  "rag": {
110
111
  "command": "npx",
111
- "args": ["-y", "@laskarks/mcp-rag-node"],
112
+ "args": ["-y", "@laskarks/mcp-rag-node@latest"],
112
113
  "env": {
113
114
  "APIKEY": "sk-...",
114
115
  "EMBEDDING_MODEL": "text-embedding-3-small",
116
+ "RAG_CHUNK_MAX_TOKENS": 1536,
115
117
  "PINECONE_API_KEY": "...",
116
118
  "PINECONE_INDEX": "rag-index",
117
119
  "PROVIDER": "openai"
package/dist/ai.js CHANGED
@@ -52,7 +52,7 @@ class AI {
52
52
  chunkByToken(text, maxTokens, overlap) {
53
53
  const defaultMax = env.RAG_CHUNK_MAX_TOKENS
54
54
  ? Number(env.RAG_CHUNK_MAX_TOKENS)
55
- : 512;
55
+ : 1536;
56
56
  const defaultOverlap = 50;
57
57
  const limit = maxTokens ?? defaultMax;
58
58
  const overlapTokens = overlap ?? defaultOverlap;
@@ -129,13 +129,13 @@ class AI {
129
129
  topK: 3,
130
130
  includeMetadata: true,
131
131
  });
132
- return results;
133
- // const relevantChunks = results.matches.map((match) => ({
134
- // text: match.metadata?.text as string,
135
- // score: match.score,
136
- // }));
137
- // const context = relevantChunks.map((c) => c.text).join("\n\n");
138
- // return context;
132
+ // Get text from metadata
133
+ const relevantChunks = results.matches.map((match) => ({
134
+ text: match.metadata?.text,
135
+ score: match.score,
136
+ }));
137
+ const context = relevantChunks.map((c) => c.text).join("\n\n");
138
+ return context;
139
139
  }
140
140
  else {
141
141
  return response?.data[0] || "Unexpected error";
package/dist/index.js CHANGED
@@ -60,7 +60,7 @@ async function main() {
60
60
  }, async ({ keyword }) => {
61
61
  const response = await CallAI.search_documents(keyword);
62
62
  return {
63
- content: [{ type: "text", text: `${response}` }],
63
+ content: [{ type: "text", text: JSON.stringify(response, null, 2) }],
64
64
  };
65
65
  });
66
66
  const transport = new StdioServerTransport();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@laskarks/mcp-rag-node",
3
- "version": "1.0.6",
3
+ "version": "1.0.7",
4
4
  "description": "Simple MCP RAG server using @modelcontextprotocol/sdk",
5
5
  "main": "dist/index.js",
6
6
  "bin": {
package/src/ai.ts CHANGED
@@ -68,7 +68,7 @@ class AI {
68
68
  chunkByToken(text: string, maxTokens?: number, overlap?: number): string[] {
69
69
  const defaultMax = env.RAG_CHUNK_MAX_TOKENS
70
70
  ? Number(env.RAG_CHUNK_MAX_TOKENS)
71
- : 512;
71
+ : 1536;
72
72
  const defaultOverlap = 50;
73
73
  const limit = maxTokens ?? defaultMax;
74
74
  const overlapTokens = overlap ?? defaultOverlap;