@smara/mcp-server 1.0.0 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # @smara/mcp-server
2
2
 
3
- MCP server for the [Smara Memory API](https://smara.io) — give any AI app persistent, decay-aware memory.
3
+ MCP server for the [Smara Memory API](https://smara.io) — give any AI app persistent memory with Temporal Memory Scoring™.
4
4
 
5
5
  ## Quick Start
6
6
 
@@ -27,18 +27,14 @@ Get a free API key at [smara.io](https://smara.io) (10,000 memories, no credit c
27
27
  | Tool | Description |
28
28
  |------|-------------|
29
29
  | `store_memory` | Store a fact about a user with importance scoring |
30
- | `search_memories` | Semantic search with Ebbinghaus decay-aware ranking |
30
+ | `search_memories` | Semantic search with Temporal Memory Scoring™ |
31
31
  | `get_user_context` | Pre-formatted context string for LLM system prompts |
32
32
  | `delete_memory` | Delete a specific memory |
33
33
  | `get_usage` | Check plan limits and memory count |
34
34
 
35
35
  ## How It Works
36
36
 
37
- Smara combines vector similarity search (Voyage AI embeddings) with Ebbinghaus forgetting curves. Memories decay over time recent, frequently-accessed memories rank higher, just like human recall.
38
-
39
- ```
40
- score = similarity × 0.7 + decay_score × 0.3
41
- ```
37
+ Smara uses **Temporal Memory Scoring™** a proprietary ranking system that makes AI memory work like human recall. Memories naturally fade over time, modulated by importance and access patterns. Recent, critical memories surface first. Stale, trivial ones fade. Contradictions are auto-detected and resolved.
42
38
 
43
39
  ## Works With
44
40
 
@@ -1 +1,4 @@
1
+ export declare const DEFAULT_SOURCE: string;
2
+ export declare const DEFAULT_USER_ID: string;
3
+ export declare const DEFAULT_NAMESPACE: string;
1
4
  export declare function smaraFetch(path: string, options?: RequestInit): Promise<unknown>;
@@ -1,5 +1,8 @@
1
1
  const BASE_URL = process.env.SMARA_API_URL || "https://api.smara.io";
2
2
  const API_KEY = process.env.SMARA_API_KEY;
3
+ export const DEFAULT_SOURCE = process.env.SMARA_SOURCE || "mcp";
4
+ export const DEFAULT_USER_ID = process.env.SMARA_USER_ID || "";
5
+ export const DEFAULT_NAMESPACE = process.env.SMARA_NAMESPACE || "default";
3
6
  export async function smaraFetch(path, options = {}) {
4
7
  if (!API_KEY)
5
8
  throw new Error("SMARA_API_KEY environment variable is required. Get a free key at https://smara.io");
package/build/index.js CHANGED
@@ -2,10 +2,26 @@
2
2
  import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
3
3
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
4
  import { z } from "zod";
5
- import { smaraFetch } from "./api-client.js";
6
- const server = new McpServer({
7
- name: "smara",
8
- version: "1.0.0",
5
+ import { smaraFetch, DEFAULT_SOURCE, DEFAULT_USER_ID, DEFAULT_NAMESPACE } from "./api-client.js";
6
+ const userIdHint = DEFAULT_USER_ID
7
+ ? `The user_id for this session is "${DEFAULT_USER_ID}". Always use this value.`
8
+ : `Use a stable identifier for user_id — the project name, directory name, or username. Be consistent across sessions.`;
9
+ const server = new McpServer({ name: "smara", version: "2.0.0" }, {
10
+ instructions: `You have access to Smara, a persistent cross-platform memory system. Memories stored here persist across conversations and are shared across all AI tools the user has connected.
11
+
12
+ AUTOMATIC BEHAVIOR (do this without being asked):
13
+ 1. AT CONVERSATION START: Call get_user_context with the user's ID to load relevant memories. If the conversation has a clear topic, pass it as the query parameter to focus results.
14
+ 2. WHEN YOU LEARN NEW FACTS: If the user shares a preference, correction, important decision, project detail, or personal fact, call store_memory to save it. Use importance 0.7-1.0 for preferences and corrections, 0.5 for general facts, 0.1-0.3 for trivia.
15
+ 3. WHEN THE USER SAYS "remember this" or "don't forget": Always call store_memory with importance 0.9.
16
+ 4. WHEN THE USER SAYS "forget this" or "delete that memory": Call search_memories to find the relevant memory, then call delete_memory with its ID.
17
+
18
+ ${userIdHint}
19
+
20
+ RULES:
21
+ - Do not store transient conversational filler. Only store facts useful in a future conversation.
22
+ - Do not announce memory operations unless the user asks. Load and store silently.
23
+ - If get_user_context returns memories, incorporate them naturally into your understanding.
24
+ - Smara handles deduplication automatically — don't worry about storing something twice.`,
9
25
  });
10
26
  // ── Store a memory ──────────────────────────────────
11
27
  server.registerTool("store_memory", {
@@ -21,18 +37,28 @@ server.registerTool("store_memory", {
21
37
  .optional()
22
38
  .default(0.5)
23
39
  .describe("Importance score (0-1). Higher = slower decay."),
40
+ namespace: z
41
+ .string()
42
+ .optional()
43
+ .describe("Memory namespace for isolation (default: from env or 'default')"),
24
44
  },
25
- }, async ({ user_id, fact, importance }) => {
45
+ }, async ({ user_id, fact, importance, namespace }) => {
26
46
  const data = await smaraFetch("/v1/memories", {
27
47
  method: "POST",
28
- body: JSON.stringify({ user_id, fact, importance }),
48
+ body: JSON.stringify({
49
+ user_id,
50
+ fact,
51
+ importance,
52
+ source: DEFAULT_SOURCE,
53
+ namespace: namespace || DEFAULT_NAMESPACE,
54
+ }),
29
55
  });
30
56
  return { content: [{ type: "text", text: JSON.stringify(data, null, 2) }] };
31
57
  });
32
58
  // ── Search memories ─────────────────────────────────
33
59
  server.registerTool("search_memories", {
34
60
  title: "Search Memories",
35
- description: "Semantic search across stored memories for a user. Returns results ranked by a blend of vector similarity (70%) and Ebbinghaus decay score (30%). Recent, frequently-accessed memories rank higher.",
61
+ description: "Semantic search across stored memories for a user. Ranked by Temporal Memory Scoring balances semantic relevance with memory freshness and importance.",
36
62
  inputSchema: {
37
63
  user_id: z.string().describe("User to search memories for"),
38
64
  q: z.string().describe("Natural language search query"),
@@ -43,16 +69,25 @@ server.registerTool("search_memories", {
43
69
  .optional()
44
70
  .default(5)
45
71
  .describe("Max results to return"),
72
+ namespace: z
73
+ .string()
74
+ .optional()
75
+ .describe("Memory namespace (default: from env or 'default')"),
46
76
  },
47
- }, async ({ user_id, q, limit }) => {
48
- const params = new URLSearchParams({ user_id, q, limit: String(limit) });
77
+ }, async ({ user_id, q, limit, namespace }) => {
78
+ const params = new URLSearchParams({
79
+ user_id,
80
+ q,
81
+ limit: String(limit),
82
+ namespace: namespace || DEFAULT_NAMESPACE,
83
+ });
49
84
  const data = await smaraFetch(`/v1/memories/search?${params}`);
50
85
  return { content: [{ type: "text", text: JSON.stringify(data, null, 2) }] };
51
86
  });
52
87
  // ── Get user context ────────────────────────────────
53
88
  server.registerTool("get_user_context", {
54
89
  title: "Get User Context",
55
- description: "Retrieve a pre-formatted context string for a user, ready to inject into an LLM system prompt. Returns the most relevant memories ranked by decay-aware scoring.",
90
+ description: "Retrieve a pre-formatted context string for a user, ready to inject into an LLM system prompt. Ranked by Temporal Memory Scoring. Can be called without a query to get the most important recent memories.",
56
91
  inputSchema: {
57
92
  user_id: z.string().describe("User to get context for"),
58
93
  q: z
@@ -66,9 +101,16 @@ server.registerTool("get_user_context", {
66
101
  .optional()
67
102
  .default(5)
68
103
  .describe("Number of top memories to include"),
104
+ namespace: z
105
+ .string()
106
+ .optional()
107
+ .describe("Memory namespace (default: from env or 'default')"),
69
108
  },
70
- }, async ({ user_id, q, top_n }) => {
71
- const params = new URLSearchParams({ top_n: String(top_n) });
109
+ }, async ({ user_id, q, top_n, namespace }) => {
110
+ const params = new URLSearchParams({
111
+ top_n: String(top_n),
112
+ namespace: namespace || DEFAULT_NAMESPACE,
113
+ });
72
114
  if (q)
73
115
  params.set("q", q);
74
116
  const data = await smaraFetch(`/v1/users/${encodeURIComponent(user_id)}/context?${params}`);
@@ -98,7 +140,7 @@ server.registerTool("get_usage", {
98
140
  async function main() {
99
141
  const transport = new StdioServerTransport();
100
142
  await server.connect(transport);
101
- console.error("Smara MCP Server running on stdio");
143
+ console.error("Smara MCP Server v2.0.0 running on stdio");
102
144
  }
103
145
  main().catch((error) => {
104
146
  console.error("Fatal error:", error);
package/package.json CHANGED
@@ -1,10 +1,10 @@
1
1
  {
2
2
  "name": "@smara/mcp-server",
3
- "version": "1.0.0",
3
+ "version": "2.0.1",
4
4
  "description": "MCP server for Smara Memory API — persistent memory for AI agents",
5
5
  "type": "module",
6
6
  "bin": {
7
- "smara-mcp": "./build/index.js"
7
+ "smara-mcp": "build/index.js"
8
8
  },
9
9
  "scripts": {
10
10
  "build": "tsc && chmod 755 build/index.js",
@@ -24,7 +24,7 @@
24
24
  "license": "MIT",
25
25
  "repository": {
26
26
  "type": "git",
27
- "url": "https://github.com/smara-io/mcp-server"
27
+ "url": "git+https://github.com/smara-io/mcp-server.git"
28
28
  },
29
29
  "homepage": "https://smara.io",
30
30
  "dependencies": {