prism-mcp-server 4.6.0 → 4.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +102 -10
- package/dist/server.js +9 -3
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -271,8 +271,11 @@
|
|
|
271
271
|
| **Auto-Compaction** | ✅ Gemini rollups | ❌ | ❌ | ❌ | ❌ |
|
|
272
272
|
| **Morning Briefing** | ✅ Gemini synthesis | ❌ | ❌ | ❌ | ❌ |
|
|
273
273
|
| **OCC (Concurrency)** | ✅ Version-based | ❌ | ❌ | ❌ | ❌ |
|
|
274
|
-
| **GDPR Compliance** | ✅ Soft/hard delete | ❌ | ❌ | ❌ | ❌ |
|
|
274
|
+
| **GDPR Compliance** | ✅ Soft/hard delete + ZIP export | ❌ | ❌ | ❌ | ❌ |
|
|
275
275
|
| **Memory Tracing** | ✅ Latency breakdown | ❌ | ❌ | ❌ | ❌ |
|
|
276
|
+
| **OpenTelemetry** | ✅ OTLP spans (v4.6) | ❌ | ❌ | ❌ | ❌ |
|
|
277
|
+
| **VLM Image Captions** | ✅ Auto-caption vault (v4.5) | ❌ | ❌ | ❌ | ❌ |
|
|
278
|
+
| **Pluggable LLM Adapters** | ✅ OpenAI/Anthropic/Gemini/Ollama | ❌ | ✅ Multi-provider | ❌ | ❌ |
|
|
276
279
|
| **LangChain** | ✅ BaseRetriever | ❌ | ❌ | ❌ | ❌ |
|
|
277
280
|
| **MCP Native** | ✅ stdio | ✅ stdio | ❌ Python SDK | ✅ HTTP + MCP | ✅ stdio |
|
|
278
281
|
| **Language** | TypeScript | TypeScript | Python | Python | Python |
|
|
@@ -586,15 +589,104 @@ At the end of each session, save state:
|
|
|
586
589
|
|
|
587
590
|
## Use Cases
|
|
588
591
|
|
|
589
|
-
| Scenario | How Prism MCP Helps |
|
|
590
|
-
|
|
591
|
-
| **Long-running feature work** | Save session state at end of day, restore full context
|
|
592
|
-
| **Multi-agent collaboration** | Telepathy
|
|
593
|
-
| **Consulting / multi-project** | Switch between client projects with progressive context loading |
|
|
594
|
-
| **Research & analysis** | Multi-engine search with 94% context reduction via sandboxed code transforms |
|
|
595
|
-
| **Team onboarding** | New team member's agent loads full project history
|
|
596
|
-
| **Visual debugging** | Save screenshots
|
|
597
|
-
| **Offline / air-gapped** | Full SQLite local mode
|
|
592
|
+
| Scenario | How Prism MCP Helps | Live Sample |
|
|
593
|
+
|----------|---------------------|-------------|
|
|
594
|
+
| **Long-running feature work** | Save session state at end of day, restore full context next morning — no re-explaining | `session_save_handoff(project, last_summary, open_todos)` |
|
|
595
|
+
| **Multi-agent collaboration** | Hivemind Telepathy lets multiple agents share real-time context across clients | `session_load_context(project, role="qa")` |
|
|
596
|
+
| **Consulting / multi-project** | Switch between client projects with progressive context loading | `session_load_context(project, level="quick")` |
|
|
597
|
+
| **Research & analysis** | Multi-engine search with 94% context reduction via sandboxed code transforms | `brave_web_search` + `code_mode_transform(template="api_endpoints")` |
|
|
598
|
+
| **Team onboarding** | New team member's agent loads full project history instantly | `session_load_context(project, level="deep")` |
|
|
599
|
+
| **Visual debugging** | Save UI screenshots to visual memory — searchable by description | `session_save_image(project, path, description)` → `session_view_image(id)` |
|
|
600
|
+
| **Offline / air-gapped** | Full SQLite local mode, Ollama LLM adapter — zero internet dependency | `PRISM_LLM_PROVIDER=ollama` in MCP config env |
|
|
601
|
+
| **Behavior enforcement** | Agent corrections auto-graduate into permanent `.cursorrules` | `session_save_experience(event_type="correction")` → `knowledge_sync_rules(project)` |
|
|
602
|
+
| **Infrastructure observability** | OTel spans to Jaeger/Grafana for every MCP tool call fanout | Enable in Dashboard → Settings → 🔭 Observability |
|
|
603
|
+
| **GDPR / audit export** | ZIP export of all memory as JSON + Markdown, sensitive fields redacted | `session_export_memory(project, format="zip")` |
|
|
604
|
+
|
|
605
|
+
---
|
|
606
|
+
|
|
607
|
+
## New in v4.6.0 — Feature Setup Guide
|
|
608
|
+
|
|
609
|
+
### 🔭 OpenTelemetry Distributed Tracing
|
|
610
|
+
|
|
611
|
+
**Why:** Every `session_save_ledger` call can silently fan out into a synchronous DB write, an async VLM caption, and a vector embedding backfill. Without tracing, these are invisible. OTel makes the full call tree visible in Jaeger, Grafana Tempo, or any OTLP-compatible collector.
|
|
612
|
+
|
|
613
|
+
**Setup:**
|
|
614
|
+
1. Open Mind Palace Dashboard → ⚙️ Settings → 🔭 Observability
|
|
615
|
+
2. Toggle **Enable OpenTelemetry** → set your OTLP endpoint (default: `http://localhost:4318`)
|
|
616
|
+
3. Restart the MCP server
|
|
617
|
+
4. Run Jaeger locally:
|
|
618
|
+
```bash
|
|
619
|
+
docker run -d --name jaeger \
|
|
620
|
+
-p 16686:16686 -p 4318:4318 \
|
|
621
|
+
jaegertracing/all-in-one:latest
|
|
622
|
+
```
|
|
623
|
+
5. Open http://localhost:16686 — select service `prism-mcp` to see span waterfalls.
|
|
624
|
+
|
|
625
|
+
**Span hierarchy:**
|
|
626
|
+
```
|
|
627
|
+
mcp.call_tool [session_save_ledger]
|
|
628
|
+
├── storage.write_ledger ~2ms
|
|
629
|
+
├── llm.generate_embedding ~180ms
|
|
630
|
+
└── worker.vlm_caption (async) ~1.2s
|
|
631
|
+
```
|
|
632
|
+
|
|
633
|
+
> GDPR note: Span attributes contain only metadata — no prompt content, embeddings, or image data.
|
|
634
|
+
|
|
635
|
+
---
|
|
636
|
+
|
|
637
|
+
### 🖼️ VLM Multimodal Memory
|
|
638
|
+
|
|
639
|
+
**Why:** Agents lose visual context between sessions. UI screenshots, architecture diagrams, and bug states all become searchable memory.
|
|
640
|
+
|
|
641
|
+
**Setup:** Requires `ANTHROPIC_API_KEY` or `OPENAI_API_KEY` (vision-capable model).
|
|
642
|
+
|
|
643
|
+
**Usage:**
|
|
644
|
+
```
|
|
645
|
+
session_save_image(project="my-app", file_path="/path/to/screenshot.png", description="Login page broken layout after CSS refactor")
|
|
646
|
+
```
|
|
647
|
+
The image is auto-captioned by a VLM and stored in the media vault. Retrieve later:
|
|
648
|
+
```
|
|
649
|
+
session_view_image(project="my-app", image_id="8f2a1b3c")
|
|
650
|
+
```
|
|
651
|
+
|
|
652
|
+
---
|
|
653
|
+
|
|
654
|
+
### 🔌 Pluggable LLM Adapters
|
|
655
|
+
|
|
656
|
+
**Why:** Run fully local/air-gapped with Ollama, or switch providers without changing tool logic.
|
|
657
|
+
|
|
658
|
+
**Setup:** Set in MCP config `env`:
|
|
659
|
+
|
|
660
|
+
```json
|
|
661
|
+
{
|
|
662
|
+
"env": {
|
|
663
|
+
"PRISM_LLM_PROVIDER": "ollama",
|
|
664
|
+
"PRISM_LLM_MODEL": "llama3.2",
|
|
665
|
+
"PRISM_LLM_BASE_URL": "http://localhost:11434"
|
|
666
|
+
}
|
|
667
|
+
}
|
|
668
|
+
```
|
|
669
|
+
|
|
670
|
+
| Provider | Env Var | Notes |
|
|
671
|
+
|----------|---------|-------|
|
|
672
|
+
| `gemini` (default) | `GOOGLE_API_KEY` | Best for Morning Briefings |
|
|
673
|
+
| `openai` | `OPENAI_API_KEY` | GPT-4o supports VLM |
|
|
674
|
+
| `anthropic` | `ANTHROPIC_API_KEY` | Claude 3.5 supports VLM |
|
|
675
|
+
| `ollama` | none | Full local/air-gapped mode |
|
|
676
|
+
|
|
677
|
+
---
|
|
678
|
+
|
|
679
|
+
### 📦 GDPR Memory Export
|
|
680
|
+
|
|
681
|
+
```
|
|
682
|
+
session_export_memory(project="my-app", format="zip")
|
|
683
|
+
```
|
|
684
|
+
|
|
685
|
+
Outputs a ZIP containing:
|
|
686
|
+
- `ledger.json` — all session entries
|
|
687
|
+
- `handoffs.json` — all project state snapshots
|
|
688
|
+
- `knowledge.md` — graduated insights in Markdown
|
|
689
|
+
- Sensitive fields (API keys, tokens) automatically redacted
|
|
598
690
|
|
|
599
691
|
---
|
|
600
692
|
|
package/dist/server.js
CHANGED
|
@@ -134,14 +134,20 @@ const BASE_TOOLS = [
|
|
|
134
134
|
// initConfigStorage() has run, so getSettingSync() returns real
|
|
135
135
|
// dashboard config values.
|
|
136
136
|
function buildSessionMemoryTools(autoloadList) {
|
|
137
|
-
// Dynamically inject auto-load instruction into session_load_context description
|
|
137
|
+
// Dynamically inject auto-load instruction into session_load_context description.
|
|
138
|
+
// CRITICAL: The instruction is PREPENDED so it appears FIRST in the tool description.
|
|
139
|
+
// LLMs read top-to-bottom — appending the instruction at the end caused it to be
|
|
140
|
+
// deprioritized and skipped by both Antigravity and Claude CLI.
|
|
138
141
|
let loadContextTool = SESSION_LOAD_CONTEXT_TOOL;
|
|
139
142
|
if (autoloadList.length > 0) {
|
|
140
143
|
const projectList = autoloadList.join(", ");
|
|
144
|
+
const autoLoadPrefix = `IMPORTANT: At the start of every conversation, you MUST call this tool ` +
|
|
145
|
+
`for each of these projects: ${projectList}. Do this before any other work. ` +
|
|
146
|
+
`Load level should be "standard" unless the user specifies otherwise. ` +
|
|
147
|
+
`Do NOT skip this. Do NOT claim the tool is unavailable. The tool IS available — just call it.\n\n`;
|
|
141
148
|
loadContextTool = {
|
|
142
149
|
...SESSION_LOAD_CONTEXT_TOOL,
|
|
143
|
-
description: SESSION_LOAD_CONTEXT_TOOL.description
|
|
144
|
-
`\n\nIMPORTANT: At the start of every conversation, you MUST call this tool for each of these projects: ${projectList}. Do this before any other work. Load level should be "standard" unless the user specifies otherwise.`,
|
|
150
|
+
description: autoLoadPrefix + SESSION_LOAD_CONTEXT_TOOL.description,
|
|
145
151
|
};
|
|
146
152
|
}
|
|
147
153
|
return [
|
package/package.json
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "prism-mcp-server",
|
|
3
|
-
"version": "4.6.
|
|
3
|
+
"version": "4.6.1",
|
|
4
4
|
"mcpName": "io.github.dcostenco/prism-mcp",
|
|
5
|
-
"description": "The Mind Palace for AI Agents
|
|
5
|
+
"description": "The Mind Palace for AI Agents — persistent memory (SQLite/Supabase), behavioral learning & IDE rules sync, multimodal VLM image captioning, pluggable LLM providers (OpenAI/Anthropic/Gemini/Ollama), OpenTelemetry distributed tracing, GDPR export, multi-agent Hivemind sync, time travel, visual Mind Palace dashboard. Zero-config local mode.",
|
|
6
6
|
"module": "index.ts",
|
|
7
7
|
"type": "module",
|
|
8
8
|
"main": "dist/server.js",
|