@mcpjam/inspector 1.0.14 → 1.0.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,8 +5,8 @@
5
5
  <link rel="icon" type="image/svg+xml" href="/mcp_jam.svg" />
6
6
  <meta name="viewport" content="width=device-width, initial-scale=1.0" />
7
7
  <title>MCPJam Inspector</title>
8
- <script type="module" crossorigin src="/assets/index-BwfV9koe.js"></script>
9
- <link rel="stylesheet" crossorigin href="/assets/index-DHyLrKBZ.css">
8
+ <script type="module" crossorigin src="/assets/index-CpKDh0m1.js"></script>
9
+ <link rel="stylesheet" crossorigin href="/assets/index-Ci2Mjec1.css">
10
10
  </head>
11
11
  <body>
12
12
  <div id="root"></div>
Binary file
@@ -1461,7 +1461,7 @@ resources.post("/read", async (c) => {
1461
1461
  resources.post("/widget/store", async (c) => {
1462
1462
  try {
1463
1463
  const body = await c.req.json();
1464
- const { serverId, uri, toolInput, toolOutput, toolId } = body;
1464
+ const { serverId, uri, toolInput, toolOutput, toolResponseMetadata, toolId } = body;
1465
1465
  if (!serverId || !uri || !toolId) {
1466
1466
  return c.json({ success: false, error: "Missing required fields" }, 400);
1467
1467
  }
@@ -1470,6 +1470,7 @@ resources.post("/widget/store", async (c) => {
1470
1470
  uri,
1471
1471
  toolInput,
1472
1472
  toolOutput,
1473
+ toolResponseMetadata: toolResponseMetadata ?? null,
1473
1474
  toolId,
1474
1475
  timestamp: Date.now()
1475
1476
  });
@@ -1531,7 +1532,7 @@ resources.get("/widget-content/:toolId", async (c) => {
1531
1532
  404
1532
1533
  );
1533
1534
  }
1534
- const { serverId, uri, toolInput, toolOutput } = widgetData;
1535
+ const { serverId, uri, toolInput, toolOutput, toolResponseMetadata } = widgetData;
1535
1536
  const mcpClientManager2 = c.mcpClientManager;
1536
1537
  const availableServers = mcpClientManager2.listServers().filter((id) => Boolean(mcpClientManager2.getClient(id)));
1537
1538
  let actualServerId = serverId;
@@ -1588,6 +1589,7 @@ resources.get("/widget-content/:toolId", async (c) => {
1588
1589
  const openaiAPI = {
1589
1590
  toolInput: ${JSON.stringify(toolInput)},
1590
1591
  toolOutput: ${JSON.stringify(toolOutput)},
1592
+ toolResponseMetadata: ${JSON.stringify(toolResponseMetadata ?? null)},
1591
1593
  displayMode: 'inline',
1592
1594
  maxHeight: 600,
1593
1595
  theme: 'dark',
@@ -1661,6 +1663,19 @@ resources.get("/widget-content/:toolId", async (c) => {
1661
1663
  async sendFollowUpMessage(args) {
1662
1664
  const prompt = typeof args === 'string' ? args : (args?.prompt || '');
1663
1665
  return this.sendFollowupTurn(prompt);
1666
+ },
1667
+
1668
+ async openExternal(options) {
1669
+ const href = typeof options === 'string' ? options : options?.href;
1670
+ if (!href) {
1671
+ throw new Error('href is required for openExternal');
1672
+ }
1673
+ window.parent.postMessage({
1674
+ type: 'openai:openExternal',
1675
+ href
1676
+ }, '*');
1677
+ // Also open in new tab as fallback
1678
+ window.open(href, '_blank', 'noopener,noreferrer');
1664
1679
  }
1665
1680
  };
1666
1681
 
@@ -1866,7 +1881,7 @@ import { createDeepSeek } from "@ai-sdk/deepseek";
1866
1881
  import { createGoogleGenerativeAI } from "@ai-sdk/google";
1867
1882
  import { createOpenAI } from "@ai-sdk/openai";
1868
1883
  import { createOllama } from "ollama-ai-provider-v2";
1869
- var createLlmModel = (modelDefinition, apiKey, ollamaBaseUrl) => {
1884
+ var createLlmModel = (modelDefinition, apiKey, ollamaBaseUrl, litellmBaseUrl) => {
1870
1885
  if (!modelDefinition?.id || !modelDefinition?.provider) {
1871
1886
  throw new Error(
1872
1887
  `Invalid model definition: ${JSON.stringify(modelDefinition)}`
@@ -1886,6 +1901,15 @@ var createLlmModel = (modelDefinition, apiKey, ollamaBaseUrl) => {
1886
1901
  const normalized = /\/api\/?$/.test(raw) ? raw : `${raw.replace(/\/+$/, "")}/api`;
1887
1902
  return createOllama({ baseURL: normalized })(modelDefinition.id);
1888
1903
  }
1904
+ case "litellm": {
1905
+ const baseURL = litellmBaseUrl || "http://localhost:4000";
1906
+ const openai = createOpenAI({
1907
+ apiKey: apiKey || "dummy-key",
1908
+ // LiteLLM may not require API key depending on setup
1909
+ baseURL
1910
+ });
1911
+ return openai.chat(modelDefinition.id);
1912
+ }
1889
1913
  default:
1890
1914
  throw new Error(
1891
1915
  `Unsupported provider: ${modelDefinition.provider} for model: ${modelDefinition.id}`
@@ -2538,6 +2562,7 @@ chat.post("/", async (c) => {
2538
2562
  temperature,
2539
2563
  messages,
2540
2564
  ollamaBaseUrl: _ollama_unused,
2565
+ litellmBaseUrl: _litellm_unused,
2541
2566
  action,
2542
2567
  requestId,
2543
2568
  response
@@ -2659,7 +2684,8 @@ chat.post("/", async (c) => {
2659
2684
  const llmModel = createLlmModel(
2660
2685
  model,
2661
2686
  apiKey || "",
2662
- _ollama_unused
2687
+ _ollama_unused,
2688
+ _litellm_unused
2663
2689
  );
2664
2690
  await createStreamingResponse(
2665
2691
  llmModel,