@mcpjam/inspector 1.0.20 → 1.0.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -39,7 +39,7 @@ import * as Sentry2 from "@sentry/node";
39
39
  import { serve } from "@hono/node-server";
40
40
  import dotenv from "dotenv";
41
41
  import fixPath from "fix-path";
42
- import { Hono as Hono13 } from "hono";
42
+ import { Hono as Hono15 } from "hono";
43
43
  import { HTTPException } from "hono/http-exception";
44
44
  import { cors } from "hono/cors";
45
45
  import { logger } from "hono/logger";
@@ -885,7 +885,7 @@ var MCPClientManager = class {
885
885
  };
886
886
 
887
887
  // routes/mcp/index.ts
888
- import { Hono as Hono12 } from "hono";
888
+ import { Hono as Hono14 } from "hono";
889
889
 
890
890
  // routes/mcp/connect.ts
891
891
  import { Hono } from "hono";
@@ -1472,7 +1472,8 @@ resources.post("/widget/store", async (c) => {
1472
1472
  toolInput,
1473
1473
  toolOutput,
1474
1474
  toolResponseMetadata,
1475
- toolId
1475
+ toolId,
1476
+ theme
1476
1477
  } = body;
1477
1478
  if (!serverId || !uri || !toolId) {
1478
1479
  return c.json({ success: false, error: "Missing required fields" }, 400);
@@ -1484,6 +1485,7 @@ resources.post("/widget/store", async (c) => {
1484
1485
  toolOutput,
1485
1486
  toolResponseMetadata: toolResponseMetadata ?? null,
1486
1487
  toolId,
1488
+ theme: theme ?? "dark",
1487
1489
  timestamp: Date.now()
1488
1490
  });
1489
1491
  return c.json({ success: true });
@@ -1544,7 +1546,14 @@ resources.get("/widget-content/:toolId", async (c) => {
1544
1546
  404
1545
1547
  );
1546
1548
  }
1547
- const { serverId, uri, toolInput, toolOutput, toolResponseMetadata } = widgetData;
1549
+ const {
1550
+ serverId,
1551
+ uri,
1552
+ toolInput,
1553
+ toolOutput,
1554
+ toolResponseMetadata,
1555
+ theme
1556
+ } = widgetData;
1548
1557
  const mcpClientManager2 = c.mcpClientManager;
1549
1558
  const availableServers = mcpClientManager2.listServers().filter((id) => Boolean(mcpClientManager2.getClient(id)));
1550
1559
  let actualServerId = serverId;
@@ -1604,10 +1613,13 @@ resources.get("/widget-content/:toolId", async (c) => {
1604
1613
  toolResponseMetadata: ${JSON.stringify(toolResponseMetadata ?? null)},
1605
1614
  displayMode: 'inline',
1606
1615
  maxHeight: 600,
1607
- theme: 'dark',
1616
+ theme: ${JSON.stringify(theme ?? "dark")},
1608
1617
  locale: 'en-US',
1609
1618
  safeArea: { insets: { top: 0, bottom: 0, left: 0, right: 0 } },
1610
- userAgent: {},
1619
+ userAgent: {
1620
+ device: { type: 'desktop' },
1621
+ capabilities: { hover: true, touch: false }
1622
+ },
1611
1623
  widgetState: null,
1612
1624
 
1613
1625
  async setWidgetState(state) {
@@ -1731,6 +1743,28 @@ resources.get("/widget-content/:toolId", async (c) => {
1731
1743
  }
1732
1744
  } catch (err) {}
1733
1745
  }, 0);
1746
+
1747
+ // Listen for theme changes from parent
1748
+ window.addEventListener('message', (event) => {
1749
+ if (event.data.type === 'webplus:set_globals') {
1750
+ const { globals } = event.data;
1751
+
1752
+ // Update theme if provided
1753
+ if (globals?.theme && window.openai) {
1754
+ window.openai.theme = globals.theme;
1755
+
1756
+ // Dispatch event for widgets that use useTheme() hook
1757
+ try {
1758
+ const globalsEvent = new CustomEvent('webplus:set_globals', {
1759
+ detail: { globals: { theme: globals.theme } }
1760
+ });
1761
+ window.dispatchEvent(globalsEvent);
1762
+ } catch (err) {
1763
+ console.error('[OpenAI Widget] Failed to dispatch theme change:', err);
1764
+ }
1765
+ }
1766
+ }
1767
+ });
1734
1768
  })();
1735
1769
  </script>
1736
1770
  `;
@@ -1875,224 +1909,6 @@ var MCPJAM_PROVIDED_MODEL_IDS = [
1875
1909
  var isMCPJamProvidedModel = (modelId) => {
1876
1910
  return MCPJAM_PROVIDED_MODEL_IDS.includes(modelId);
1877
1911
  };
1878
- var Model = /* @__PURE__ */ ((Model2) => {
1879
- Model2["CLAUDE_OPUS_4_0"] = "claude-opus-4-0";
1880
- Model2["CLAUDE_SONNET_4_0"] = "claude-sonnet-4-0";
1881
- Model2["CLAUDE_3_7_SONNET_LATEST"] = "claude-3-7-sonnet-latest";
1882
- Model2["CLAUDE_3_5_SONNET_LATEST"] = "claude-3-5-sonnet-latest";
1883
- Model2["CLAUDE_3_5_HAIKU_LATEST"] = "claude-3-5-haiku-latest";
1884
- Model2["GPT_4_1"] = "gpt-4.1";
1885
- Model2["GPT_4_1_MINI"] = "gpt-4.1-mini";
1886
- Model2["GPT_4_1_NANO"] = "gpt-4.1-nano";
1887
- Model2["GPT_4O"] = "gpt-4o";
1888
- Model2["GPT_4O_MINI"] = "gpt-4o-mini";
1889
- Model2["GPT_4_TURBO"] = "gpt-4-turbo";
1890
- Model2["GPT_4"] = "gpt-4";
1891
- Model2["GPT_5"] = "gpt-5";
1892
- Model2["GPT_5_MINI"] = "gpt-5-mini";
1893
- Model2["GPT_5_NANO"] = "gpt-5-nano";
1894
- Model2["GPT_5_MAIN"] = "openai/gpt-5";
1895
- Model2["GPT_5_PRO"] = "gpt-5-pro";
1896
- Model2["GPT_5_CODEX"] = "gpt-5-codex";
1897
- Model2["GPT_3_5_TURBO"] = "gpt-3.5-turbo";
1898
- Model2["DEEPSEEK_CHAT"] = "deepseek-chat";
1899
- Model2["DEEPSEEK_REASONER"] = "deepseek-reasoner";
1900
- Model2["GEMINI_2_5_PRO"] = "gemini-2.5-pro";
1901
- Model2["GEMINI_2_5_FLASH"] = "gemini-2.5-flash";
1902
- Model2["GEMINI_2_5_FLASH_LITE"] = "gemini-2.5-flash-lite";
1903
- Model2["GEMINI_2_0_FLASH_EXP"] = "gemini-2.0-flash-exp";
1904
- Model2["GEMINI_1_5_PRO"] = "gemini-1.5-pro";
1905
- Model2["GEMINI_1_5_PRO_002"] = "gemini-1.5-pro-002";
1906
- Model2["GEMINI_1_5_FLASH"] = "gemini-1.5-flash";
1907
- Model2["GEMINI_1_5_FLASH_002"] = "gemini-1.5-flash-002";
1908
- Model2["GEMINI_1_5_FLASH_8B"] = "gemini-1.5-flash-8b";
1909
- Model2["GEMINI_1_5_FLASH_8B_001"] = "gemini-1.5-flash-8b-001";
1910
- Model2["GEMMA_3_2B"] = "gemma-3-2b";
1911
- Model2["GEMMA_3_9B"] = "gemma-3-9b";
1912
- Model2["GEMMA_3_27B"] = "gemma-3-27b";
1913
- Model2["GEMMA_2_2B"] = "gemma-2-2b";
1914
- Model2["GEMMA_2_9B"] = "gemma-2-9b";
1915
- Model2["GEMMA_2_27B"] = "gemma-2-27b";
1916
- Model2["CODE_GEMMA_2B"] = "codegemma-2b";
1917
- Model2["CODE_GEMMA_7B"] = "codegemma-7b";
1918
- Model2["MISTRAL_LARGE_LATEST"] = "mistral-large-latest";
1919
- Model2["MISTRAL_SMALL_LATEST"] = "mistral-small-latest";
1920
- Model2["CODESTRAL_LATEST"] = "codestral-latest";
1921
- Model2["MINISTRAL_8B_LATEST"] = "ministral-8b-latest";
1922
- Model2["MINISTRAL_3B_LATEST"] = "ministral-3b-latest";
1923
- return Model2;
1924
- })(Model || {});
1925
- var SUPPORTED_MODELS = [
1926
- {
1927
- id: "claude-opus-4-0" /* CLAUDE_OPUS_4_0 */,
1928
- name: "Claude Opus 4",
1929
- provider: "anthropic"
1930
- },
1931
- {
1932
- id: "claude-sonnet-4-0" /* CLAUDE_SONNET_4_0 */,
1933
- name: "Claude Sonnet 4",
1934
- provider: "anthropic"
1935
- },
1936
- {
1937
- id: "claude-3-7-sonnet-latest" /* CLAUDE_3_7_SONNET_LATEST */,
1938
- name: "Claude Sonnet 3.7",
1939
- provider: "anthropic"
1940
- },
1941
- {
1942
- id: "claude-3-5-sonnet-latest" /* CLAUDE_3_5_SONNET_LATEST */,
1943
- name: "Claude Sonnet 3.5",
1944
- provider: "anthropic"
1945
- },
1946
- {
1947
- id: "claude-3-5-haiku-latest" /* CLAUDE_3_5_HAIKU_LATEST */,
1948
- name: "Claude Haiku 3.5",
1949
- provider: "anthropic"
1950
- },
1951
- { id: "gpt-5" /* GPT_5 */, name: "GPT-5", provider: "openai" },
1952
- { id: "gpt-5-mini" /* GPT_5_MINI */, name: "GPT-5 Mini", provider: "openai" },
1953
- { id: "gpt-5-nano" /* GPT_5_NANO */, name: "GPT-5 Nano", provider: "openai" },
1954
- { id: Model.GPT_5_CHAT_LATEST, name: "GPT-5 Chat", provider: "openai" },
1955
- { id: "gpt-5-pro" /* GPT_5_PRO */, name: "GPT-5 Pro", provider: "openai" },
1956
- { id: "gpt-5-codex" /* GPT_5_CODEX */, name: "GPT-5 Codex", provider: "openai" },
1957
- { id: "gpt-4.1" /* GPT_4_1 */, name: "GPT-4.1", provider: "openai" },
1958
- { id: "gpt-4.1-mini" /* GPT_4_1_MINI */, name: "GPT-4.1 Mini", provider: "openai" },
1959
- { id: "gpt-4.1-nano" /* GPT_4_1_NANO */, name: "GPT-4.1 Nano", provider: "openai" },
1960
- { id: "gpt-4o" /* GPT_4O */, name: "GPT-4o", provider: "openai" },
1961
- { id: "gpt-4o-mini" /* GPT_4O_MINI */, name: "GPT-4o Mini", provider: "openai" },
1962
- { id: "deepseek-chat" /* DEEPSEEK_CHAT */, name: "DeepSeek Chat", provider: "deepseek" },
1963
- {
1964
- id: "deepseek-reasoner" /* DEEPSEEK_REASONER */,
1965
- name: "DeepSeek Reasoner",
1966
- provider: "deepseek"
1967
- },
1968
- // Google Gemini models (latest first)
1969
- {
1970
- id: "gemini-2.5-pro" /* GEMINI_2_5_PRO */,
1971
- name: "Gemini 2.5 Pro",
1972
- provider: "google"
1973
- },
1974
- {
1975
- id: "gemini-2.5-flash" /* GEMINI_2_5_FLASH */,
1976
- name: "Gemini 2.5 Flash",
1977
- provider: "google"
1978
- },
1979
- {
1980
- id: "gemini-2.0-flash-exp" /* GEMINI_2_0_FLASH_EXP */,
1981
- name: "Gemini 2.0 Flash Experimental",
1982
- provider: "google"
1983
- },
1984
- {
1985
- id: "gemini-1.5-pro-002" /* GEMINI_1_5_PRO_002 */,
1986
- name: "Gemini 1.5 Pro 002",
1987
- provider: "google"
1988
- },
1989
- {
1990
- id: "gemini-1.5-pro" /* GEMINI_1_5_PRO */,
1991
- name: "Gemini 1.5 Pro",
1992
- provider: "google"
1993
- },
1994
- {
1995
- id: "gemini-1.5-flash-002" /* GEMINI_1_5_FLASH_002 */,
1996
- name: "Gemini 1.5 Flash 002",
1997
- provider: "google"
1998
- },
1999
- {
2000
- id: "gemini-1.5-flash" /* GEMINI_1_5_FLASH */,
2001
- name: "Gemini 1.5 Flash",
2002
- provider: "google"
2003
- },
2004
- {
2005
- id: "meta-llama/llama-3.3-70b-instruct",
2006
- name: "Llama 3.3 70B (Free)",
2007
- provider: "meta"
2008
- },
2009
- {
2010
- id: "openai/gpt-oss-120b",
2011
- name: "GPT-OSS 120B (Free)",
2012
- provider: "openai"
2013
- },
2014
- {
2015
- id: "x-ai/grok-4-fast",
2016
- name: "Grok 4 Fast (Free)",
2017
- provider: "x-ai"
2018
- },
2019
- {
2020
- id: "openai/gpt-5-nano",
2021
- name: "GPT-5 Nano (Free)",
2022
- provider: "openai"
2023
- },
2024
- {
2025
- id: "anthropic/claude-sonnet-4.5",
2026
- name: "Claude Sonnet 4.5 (Free)",
2027
- provider: "anthropic"
2028
- },
2029
- {
2030
- id: "anthropic/claude-haiku-4.5",
2031
- name: "Claude Haiku 4.5 (Free)",
2032
- provider: "anthropic"
2033
- },
2034
- {
2035
- id: "openai/gpt-5-codex",
2036
- name: "GPT-5 Codex (Free)",
2037
- provider: "openai"
2038
- },
2039
- {
2040
- id: "openai/gpt-5",
2041
- name: "GPT-5 (Free)",
2042
- provider: "openai"
2043
- },
2044
- {
2045
- id: "openai/gpt-5-mini",
2046
- name: "GPT-5 Mini (Free)",
2047
- provider: "openai"
2048
- },
2049
- {
2050
- id: "google/gemini-2.5-flash-preview-09-2025",
2051
- name: "Gemini 2.5 Flash Preview (Free)",
2052
- provider: "google"
2053
- },
2054
- {
2055
- id: "moonshotai/kimi-k2-0905",
2056
- name: "Kimi K2 (Free)",
2057
- provider: "moonshotai"
2058
- },
2059
- {
2060
- id: "google/gemini-2.5-flash",
2061
- name: "Gemini 2.5 Flash (Free)",
2062
- provider: "google"
2063
- },
2064
- {
2065
- id: "z-ai/glm-4.6",
2066
- name: "GLM 4.6 (Free)",
2067
- provider: "z-ai"
2068
- },
2069
- // Mistral models
2070
- {
2071
- id: "mistral-large-latest" /* MISTRAL_LARGE_LATEST */,
2072
- name: "Mistral Large",
2073
- provider: "mistral"
2074
- },
2075
- {
2076
- id: "mistral-small-latest" /* MISTRAL_SMALL_LATEST */,
2077
- name: "Mistral Small",
2078
- provider: "mistral"
2079
- },
2080
- {
2081
- id: "codestral-latest" /* CODESTRAL_LATEST */,
2082
- name: "Codestral",
2083
- provider: "mistral"
2084
- },
2085
- {
2086
- id: "ministral-8b-latest" /* MINISTRAL_8B_LATEST */,
2087
- name: "Ministral 8B",
2088
- provider: "mistral"
2089
- },
2090
- {
2091
- id: "ministral-3b-latest" /* MINISTRAL_3B_LATEST */,
2092
- name: "Ministral 3B",
2093
- provider: "mistral"
2094
- }
2095
- ];
2096
1912
 
2097
1913
  // routes/mcp/chat.ts
2098
1914
  import { TextEncoder as TextEncoder2 } from "util";
@@ -3045,9 +2861,382 @@ chat.post("/", async (c) => {
3045
2861
  });
3046
2862
  var chat_default = chat;
3047
2863
 
3048
- // routes/mcp/oauth.ts
2864
+ // routes/mcp/chat-v2.ts
3049
2865
  import { Hono as Hono7 } from "hono";
3050
- var oauth = new Hono7();
2866
+ import {
2867
+ convertToModelMessages,
2868
+ streamText as streamText2,
2869
+ stepCountIs,
2870
+ createUIMessageStream,
2871
+ createUIMessageStreamResponse
2872
+ } from "ai";
2873
+ import zodToJsonSchema2 from "zod-to-json-schema";
2874
+ var DEFAULT_TEMPERATURE = 0.7;
2875
+ var chatV2 = new Hono7();
2876
+ chatV2.post("/", async (c) => {
2877
+ try {
2878
+ const body = await c.req.json();
2879
+ const mcpClientManager2 = c.mcpClientManager;
2880
+ const { messages, apiKey, model } = body;
2881
+ if (!Array.isArray(messages) || messages.length === 0) {
2882
+ return c.json({ error: "messages are required" }, 400);
2883
+ }
2884
+ const modelDefinition = model;
2885
+ if (!modelDefinition) {
2886
+ return c.json({ error: "model is not supported" }, 400);
2887
+ }
2888
+ const mcpTools = await mcpClientManager2.getToolsForAiSdk();
2889
+ if (modelDefinition.id && isMCPJamProvidedModel(modelDefinition.id)) {
2890
+ if (!process.env.CONVEX_HTTP_URL) {
2891
+ return c.json(
2892
+ { error: "Server missing CONVEX_HTTP_URL configuration" },
2893
+ 500
2894
+ );
2895
+ }
2896
+ const flattenedTools = mcpTools;
2897
+ const toolDefs = [];
2898
+ for (const [name, tool2] of Object.entries(flattenedTools)) {
2899
+ if (!tool2) continue;
2900
+ let serializedSchema;
2901
+ const schema = tool2.inputSchema;
2902
+ if (schema) {
2903
+ if (typeof schema === "object" && schema !== null && "jsonSchema" in schema) {
2904
+ serializedSchema = schema.jsonSchema;
2905
+ } else {
2906
+ try {
2907
+ serializedSchema = zodToJsonSchema2(schema);
2908
+ } catch {
2909
+ serializedSchema = {
2910
+ type: "object",
2911
+ properties: {},
2912
+ additionalProperties: false
2913
+ };
2914
+ }
2915
+ }
2916
+ }
2917
+ toolDefs.push({
2918
+ name,
2919
+ description: tool2.description,
2920
+ inputSchema: serializedSchema ?? {
2921
+ type: "object",
2922
+ properties: {},
2923
+ additionalProperties: false
2924
+ }
2925
+ });
2926
+ }
2927
+ const authHeader = c.req.header("authorization") || void 0;
2928
+ let messageHistory = convertToModelMessages(messages);
2929
+ let steps = 0;
2930
+ const MAX_STEPS2 = 20;
2931
+ const stream = createUIMessageStream({
2932
+ execute: async ({ writer }) => {
2933
+ const msgId = `asst_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
2934
+ while (steps < MAX_STEPS2) {
2935
+ const res = await fetch(`${process.env.CONVEX_HTTP_URL}/stream`, {
2936
+ method: "POST",
2937
+ headers: {
2938
+ "content-type": "application/json",
2939
+ ...authHeader ? { authorization: authHeader } : {}
2940
+ },
2941
+ body: JSON.stringify({
2942
+ mode: "step",
2943
+ messages: JSON.stringify(messageHistory),
2944
+ model: String(modelDefinition.id),
2945
+ temperature: body.temperature ?? DEFAULT_TEMPERATURE,
2946
+ tools: toolDefs
2947
+ })
2948
+ });
2949
+ if (!res.ok) {
2950
+ const errorText = await res.text().catch(() => "step failed");
2951
+ writer.write({ type: "error", errorText });
2952
+ break;
2953
+ }
2954
+ const json = await res.json();
2955
+ if (!json?.ok || !Array.isArray(json.messages)) {
2956
+ break;
2957
+ }
2958
+ for (const m of json.messages) {
2959
+ if (m?.role === "assistant" && Array.isArray(m.content)) {
2960
+ for (const item of m.content) {
2961
+ if (item?.type === "text" && typeof item.text === "string") {
2962
+ writer.write({ type: "text-start", id: msgId });
2963
+ writer.write({
2964
+ type: "text-delta",
2965
+ id: msgId,
2966
+ delta: item.text
2967
+ });
2968
+ writer.write({ type: "text-end", id: msgId });
2969
+ } else if (item?.type === "tool-call") {
2970
+ if (item.input == null)
2971
+ item.input = item.parameters ?? item.args ?? {};
2972
+ if (!item.toolCallId)
2973
+ item.toolCallId = `tc_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
2974
+ writer.write({
2975
+ type: "tool-input-available",
2976
+ toolCallId: item.toolCallId,
2977
+ toolName: item.toolName ?? item.name,
2978
+ input: item.input
2979
+ });
2980
+ }
2981
+ }
2982
+ }
2983
+ messageHistory.push(m);
2984
+ }
2985
+ const beforeLen = messageHistory.length;
2986
+ if (hasUnresolvedToolCalls(messageHistory)) {
2987
+ await executeToolCallsFromMessages(messageHistory, {
2988
+ clientManager: mcpClientManager2
2989
+ });
2990
+ }
2991
+ const newMessages = messageHistory.slice(beforeLen);
2992
+ for (const msg of newMessages) {
2993
+ if (msg?.role === "tool" && Array.isArray(msg.content)) {
2994
+ for (const item of msg.content) {
2995
+ if (item?.type === "tool-result") {
2996
+ writer.write({
2997
+ type: "tool-output-available",
2998
+ toolCallId: item.toolCallId,
2999
+ output: item.output ?? item.result ?? item.value
3000
+ });
3001
+ }
3002
+ }
3003
+ }
3004
+ }
3005
+ steps++;
3006
+ const finishReason = json.finishReason;
3007
+ if (finishReason && finishReason !== "tool-calls") {
3008
+ break;
3009
+ }
3010
+ }
3011
+ }
3012
+ });
3013
+ return createUIMessageStreamResponse({ stream });
3014
+ }
3015
+ const llmModel = createLlmModel(
3016
+ modelDefinition,
3017
+ apiKey ?? "",
3018
+ body.ollamaBaseUrl,
3019
+ body.litellmBaseUrl
3020
+ );
3021
+ const result = streamText2({
3022
+ model: llmModel,
3023
+ messages: convertToModelMessages(messages),
3024
+ temperature: body.temperature ?? DEFAULT_TEMPERATURE,
3025
+ tools: mcpTools,
3026
+ stopWhen: stepCountIs(20)
3027
+ });
3028
+ return result.toUIMessageStreamResponse();
3029
+ } catch (error) {
3030
+ console.error("[mcp/chat-v2] failed to process chat request", error);
3031
+ return c.json({ error: "Unexpected error" }, 500);
3032
+ }
3033
+ });
3034
+ var chat_v2_default = chatV2;
3035
+
3036
+ // routes/mcp/oauth.ts
3037
+ import { Hono as Hono8 } from "hono";
3038
+ var oauth = new Hono8();
3039
+ oauth.post("/debug/proxy", async (c) => {
3040
+ try {
3041
+ const {
3042
+ url,
3043
+ method = "GET",
3044
+ body,
3045
+ headers: customHeaders
3046
+ } = await c.req.json();
3047
+ if (!url) {
3048
+ return c.json({ error: "Missing url parameter" }, 400);
3049
+ }
3050
+ let targetUrl;
3051
+ try {
3052
+ targetUrl = new URL(url);
3053
+ if (targetUrl.protocol !== "https:" && targetUrl.protocol !== "http:") {
3054
+ return c.json({ error: "Invalid protocol" }, 400);
3055
+ }
3056
+ } catch (error) {
3057
+ return c.json({ error: "Invalid URL format" }, 400);
3058
+ }
3059
+ const requestHeaders = {
3060
+ "User-Agent": "MCP-Inspector/1.0",
3061
+ ...customHeaders
3062
+ };
3063
+ console.log("[OAuth Debug Proxy]");
3064
+ console.log(" URL:", url);
3065
+ console.log(" Method:", method);
3066
+ console.log(" Headers:", requestHeaders);
3067
+ if (body) {
3068
+ console.log(" Body:", JSON.stringify(body, null, 2));
3069
+ }
3070
+ const contentType = customHeaders?.["Content-Type"] || customHeaders?.["content-type"];
3071
+ const isFormUrlEncoded = contentType?.includes(
3072
+ "application/x-www-form-urlencoded"
3073
+ );
3074
+ if (method === "POST" && body && !contentType) {
3075
+ requestHeaders["Content-Type"] = "application/json";
3076
+ }
3077
+ const fetchOptions = {
3078
+ method,
3079
+ headers: requestHeaders
3080
+ };
3081
+ if (method === "POST" && body) {
3082
+ if (isFormUrlEncoded && typeof body === "object") {
3083
+ const params = new URLSearchParams();
3084
+ for (const [key, value] of Object.entries(body)) {
3085
+ params.append(key, String(value));
3086
+ }
3087
+ fetchOptions.body = params.toString();
3088
+ } else {
3089
+ fetchOptions.body = JSON.stringify(body);
3090
+ }
3091
+ }
3092
+ const response = await fetch(targetUrl.toString(), fetchOptions);
3093
+ const headers = {};
3094
+ response.headers.forEach((value, key) => {
3095
+ headers[key] = value;
3096
+ });
3097
+ let responseBody = null;
3098
+ const contentTypeHeader = headers["content-type"] || "";
3099
+ if (contentTypeHeader.includes("text/event-stream")) {
3100
+ try {
3101
+ const text = await response.text();
3102
+ const events = [];
3103
+ const lines = text.split("\n");
3104
+ let currentEvent = {};
3105
+ for (const line of lines) {
3106
+ if (line.startsWith("event:")) {
3107
+ currentEvent.event = line.substring(6).trim();
3108
+ } else if (line.startsWith("data:")) {
3109
+ const data = line.substring(5).trim();
3110
+ try {
3111
+ currentEvent.data = JSON.parse(data);
3112
+ } catch {
3113
+ currentEvent.data = data;
3114
+ }
3115
+ } else if (line.startsWith("id:")) {
3116
+ currentEvent.id = line.substring(3).trim();
3117
+ } else if (line === "") {
3118
+ if (Object.keys(currentEvent).length > 0) {
3119
+ events.push(currentEvent);
3120
+ currentEvent = {};
3121
+ }
3122
+ }
3123
+ }
3124
+ responseBody = events.length > 0 ? {
3125
+ events,
3126
+ // If there's a "message" event with MCP response, extract it
3127
+ mcpResponse: events.find((e) => e.event === "message" || !e.event)?.data || null
3128
+ } : { raw: text };
3129
+ } catch (error) {
3130
+ console.error("Failed to parse SSE response:", error);
3131
+ responseBody = { error: "Failed to parse SSE stream" };
3132
+ }
3133
+ } else {
3134
+ try {
3135
+ responseBody = await response.json();
3136
+ } catch {
3137
+ try {
3138
+ responseBody = await response.text();
3139
+ } catch {
3140
+ responseBody = null;
3141
+ }
3142
+ }
3143
+ }
3144
+ console.log(" Response:", response.status, response.statusText);
3145
+ return c.json({
3146
+ status: response.status,
3147
+ statusText: response.statusText,
3148
+ headers,
3149
+ body: responseBody
3150
+ });
3151
+ } catch (error) {
3152
+ console.error("[OAuth Debug Proxy] Error:", error);
3153
+ return c.json(
3154
+ {
3155
+ error: error instanceof Error ? error.message : "Unknown error occurred"
3156
+ },
3157
+ 500
3158
+ );
3159
+ }
3160
+ });
3161
+ oauth.post("/proxy", async (c) => {
3162
+ try {
3163
+ const {
3164
+ url,
3165
+ method = "GET",
3166
+ body,
3167
+ headers: customHeaders
3168
+ } = await c.req.json();
3169
+ if (!url) {
3170
+ return c.json({ error: "Missing url parameter" }, 400);
3171
+ }
3172
+ let targetUrl;
3173
+ try {
3174
+ targetUrl = new URL(url);
3175
+ if (targetUrl.protocol !== "https:" && targetUrl.protocol !== "http:") {
3176
+ return c.json({ error: "Invalid protocol" }, 400);
3177
+ }
3178
+ } catch (error) {
3179
+ return c.json({ error: "Invalid URL format" }, 400);
3180
+ }
3181
+ const requestHeaders = {
3182
+ "User-Agent": "MCP-Inspector/1.0",
3183
+ ...customHeaders
3184
+ };
3185
+ const contentType = customHeaders?.["Content-Type"] || customHeaders?.["content-type"];
3186
+ const isFormUrlEncoded = contentType?.includes(
3187
+ "application/x-www-form-urlencoded"
3188
+ );
3189
+ if (method === "POST" && body && !contentType) {
3190
+ requestHeaders["Content-Type"] = "application/json";
3191
+ }
3192
+ const fetchOptions = {
3193
+ method,
3194
+ headers: requestHeaders
3195
+ };
3196
+ if (method === "POST" && body) {
3197
+ if (isFormUrlEncoded && typeof body === "object") {
3198
+ const params = new URLSearchParams();
3199
+ for (const [key, value] of Object.entries(body)) {
3200
+ params.append(key, String(value));
3201
+ }
3202
+ fetchOptions.body = params.toString();
3203
+ } else if (typeof body === "string") {
3204
+ fetchOptions.body = body;
3205
+ } else {
3206
+ fetchOptions.body = JSON.stringify(body);
3207
+ }
3208
+ }
3209
+ const response = await fetch(targetUrl.toString(), fetchOptions);
3210
+ const headers = {};
3211
+ response.headers.forEach((value, key) => {
3212
+ headers[key] = value;
3213
+ });
3214
+ let responseBody = null;
3215
+ try {
3216
+ responseBody = await response.json();
3217
+ } catch {
3218
+ try {
3219
+ responseBody = await response.text();
3220
+ } catch {
3221
+ responseBody = null;
3222
+ }
3223
+ }
3224
+ return c.json({
3225
+ status: response.status,
3226
+ statusText: response.statusText,
3227
+ headers,
3228
+ body: responseBody
3229
+ });
3230
+ } catch (error) {
3231
+ console.error("OAuth proxy error:", error);
3232
+ return c.json(
3233
+ {
3234
+ error: error instanceof Error ? error.message : "Unknown error occurred"
3235
+ },
3236
+ 500
3237
+ );
3238
+ }
3239
+ });
3051
3240
  oauth.get("/metadata", async (c) => {
3052
3241
  try {
3053
3242
  const url = c.req.query("url");
@@ -3057,8 +3246,8 @@ oauth.get("/metadata", async (c) => {
3057
3246
  let metadataUrl;
3058
3247
  try {
3059
3248
  metadataUrl = new URL(url);
3060
- if (metadataUrl.protocol !== "https:") {
3061
- return c.json({ error: "Only HTTPS URLs are allowed" }, 400);
3249
+ if (metadataUrl.protocol !== "https:" && metadataUrl.protocol !== "http:") {
3250
+ return c.json({ error: "Invalid protocol" }, 400);
3062
3251
  }
3063
3252
  } catch (error) {
3064
3253
  return c.json({ error: "Invalid URL format" }, 400);
@@ -3093,8 +3282,8 @@ oauth.get("/metadata", async (c) => {
3093
3282
  var oauth_default = oauth;
3094
3283
 
3095
3284
  // routes/mcp/export.ts
3096
- import { Hono as Hono8 } from "hono";
3097
- var exporter = new Hono8();
3285
+ import { Hono as Hono9 } from "hono";
3286
+ var exporter = new Hono9();
3098
3287
  exporter.post("/server", async (c) => {
3099
3288
  try {
3100
3289
  const { serverId } = await c.req.json();
@@ -3150,20 +3339,20 @@ exporter.post("/server", async (c) => {
3150
3339
  var export_default = exporter;
3151
3340
 
3152
3341
  // routes/mcp/interceptor.ts
3153
- import { Hono as Hono9 } from "hono";
3154
- var interceptor = new Hono9();
3342
+ import { Hono as Hono10 } from "hono";
3343
+ var interceptor = new Hono10();
3155
3344
  var interceptor_default = interceptor;
3156
3345
 
3157
3346
  // routes/mcp/evals.ts
3158
- import { Hono as Hono10 } from "hono";
3347
+ import { Hono as Hono11 } from "hono";
3159
3348
  import { z as z3 } from "zod";
3160
3349
 
3161
3350
  // ../evals-cli/src/evals/runner.ts
3162
3351
  import { MCPClient } from "@mastra/mcp";
3163
- import { streamText as streamText2 } from "ai";
3352
+ import { streamText as streamText3 } from "ai";
3164
3353
 
3165
3354
  // ../node_modules/convex/dist/esm/index.js
3166
- var version = "1.28.0";
3355
+ var version = "1.27.3";
3167
3356
 
3168
3357
  // ../node_modules/convex/dist/esm/values/base64.js
3169
3358
  var lookup = [];
@@ -3794,7 +3983,7 @@ function createApi(pathParts = []) {
3794
3983
  }
3795
3984
  var anyApi = createApi();
3796
3985
 
3797
- // ../node_modules/convex/dist/esm/vendor/long.js
3986
+ // ../node_modules/convex/dist/esm/browser/long.js
3798
3987
  var __defProp4 = Object.defineProperty;
3799
3988
  var __defNormalProp3 = (obj, key, value) => key in obj ? __defProp4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
3800
3989
  var __publicField3 = (obj, key, value) => __defNormalProp3(obj, typeof key !== "symbol" ? key + "" : key, value);
@@ -3871,7 +4060,7 @@ var TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL;
3871
4060
  var TWO_PWR_64_DBL = TWO_PWR_32_DBL * TWO_PWR_32_DBL;
3872
4061
  var MAX_UNSIGNED_VALUE = new Long(4294967295 | 0, 4294967295 | 0);
3873
4062
 
3874
- // ../node_modules/convex/dist/esm/vendor/jwt-decode/index.js
4063
+ // ../node_modules/jwt-decode/build/esm/index.js
3875
4064
  var InvalidTokenError = class extends Error {
3876
4065
  };
3877
4066
  InvalidTokenError.prototype.name = "InvalidTokenError";
@@ -8689,7 +8878,7 @@ import { z as z2 } from "zod";
8689
8878
 
8690
8879
  // ../shared/tools.ts
8691
8880
  import { z } from "zod";
8692
- import { zodToJsonSchema as zodToJsonSchema2 } from "zod-to-json-schema";
8881
+ import { zodToJsonSchema as zodToJsonSchema3 } from "zod-to-json-schema";
8693
8882
  import { tool } from "ai";
8694
8883
  var fallbackInputSchema = z.object({}).passthrough();
8695
8884
  var UNREPRESENTABLE_JSON_SCHEMA_MESSAGES = [
@@ -8724,7 +8913,7 @@ function canConvertToJSONSchema(schema) {
8724
8913
  }
8725
8914
  }
8726
8915
  try {
8727
- zodToJsonSchema2(schema);
8916
+ zodToJsonSchema3(schema);
8728
8917
  return true;
8729
8918
  } catch (error) {
8730
8919
  if (isUnrepresentableSchemaError(error)) {
@@ -9237,7 +9426,7 @@ var runIteration = async ({
9237
9426
  let assistantStreaming = false;
9238
9427
  let streamResult;
9239
9428
  try {
9240
- streamResult = await streamText2({
9429
+ streamResult = await streamText3({
9241
9430
  model: createLlmModel2(provider, model, llms),
9242
9431
  system,
9243
9432
  temperature,
@@ -9695,7 +9884,7 @@ async function collectToolsForServers(clientManager, serverIds) {
9695
9884
  );
9696
9885
  return perServerTools.flat();
9697
9886
  }
9698
- var evals = new Hono10();
9887
+ var evals = new Hono11();
9699
9888
  var RunEvalsRequestSchema = z3.object({
9700
9889
  tests: z3.array(
9701
9890
  z3.object({
@@ -9835,11 +10024,11 @@ evals.post("/generate-tests", async (c) => {
9835
10024
  var evals_default = evals;
9836
10025
 
9837
10026
  // routes/mcp/http-adapters.ts
9838
- import { Hono as Hono11 } from "hono";
10027
+ import { Hono as Hono12 } from "hono";
9839
10028
 
9840
10029
  // services/mcp-http-bridge.ts
9841
10030
  import { z as z4 } from "zod";
9842
- import { zodToJsonSchema as zodToJsonSchema3 } from "zod-to-json-schema";
10031
+ import { zodToJsonSchema as zodToJsonSchema4 } from "zod-to-json-schema";
9843
10032
  function buildInitializeResult(serverId, mode) {
9844
10033
  if (mode === "adapter") {
9845
10034
  return {
@@ -9871,7 +10060,7 @@ function toJsonSchemaMaybe(schema) {
9871
10060
  try {
9872
10061
  if (schema && typeof schema === "object") {
9873
10062
  if (schema instanceof z4.ZodType || "_def" in schema && "parse" in schema) {
9874
- return zodToJsonSchema3(schema);
10063
+ return zodToJsonSchema4(schema);
9875
10064
  }
9876
10065
  }
9877
10066
  } catch {
@@ -10042,7 +10231,7 @@ async function handleJsonRpc(serverId, body, clientManager, mode) {
10042
10231
  var sessions = /* @__PURE__ */ new Map();
10043
10232
  var latestSessionByServer = /* @__PURE__ */ new Map();
10044
10233
  function createHttpHandler(mode, routePrefix) {
10045
- const router = new Hono11();
10234
+ const router = new Hono12();
10046
10235
  router.options(
10047
10236
  "/:serverId",
10048
10237
  (c) => c.body(null, 204, {
@@ -10261,8 +10450,115 @@ function createHttpHandler(mode, routePrefix) {
10261
10450
  var adapterHttp = createHttpHandler("adapter", "adapter-http");
10262
10451
  var managerHttp = createHttpHandler("manager", "manager-http");
10263
10452
 
10453
+ // routes/mcp/elicitation.ts
10454
+ import { Hono as Hono13 } from "hono";
10455
+ var elicitation = new Hono13();
10456
+ var elicitationSubscribers = /* @__PURE__ */ new Set();
10457
+ function broadcastElicitation(event) {
10458
+ for (const sub of Array.from(elicitationSubscribers)) {
10459
+ try {
10460
+ sub.send(event);
10461
+ } catch {
10462
+ try {
10463
+ sub.close();
10464
+ } catch {
10465
+ }
10466
+ elicitationSubscribers.delete(sub);
10467
+ }
10468
+ }
10469
+ }
10470
+ var isCallbackRegistered = false;
10471
+ elicitation.use("*", async (c, next) => {
10472
+ if (!isCallbackRegistered) {
10473
+ const manager = c.mcpClientManager;
10474
+ manager.setElicitationCallback(({ requestId, message, schema }) => {
10475
+ return new Promise((resolve2, reject) => {
10476
+ try {
10477
+ manager.getPendingElicitations().set(requestId, { resolve: resolve2, reject });
10478
+ } catch {
10479
+ }
10480
+ broadcastElicitation({
10481
+ type: "elicitation_request",
10482
+ requestId,
10483
+ message,
10484
+ schema,
10485
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
10486
+ });
10487
+ });
10488
+ });
10489
+ isCallbackRegistered = true;
10490
+ }
10491
+ await next();
10492
+ });
10493
+ elicitation.get("/stream", async (c) => {
10494
+ const encoder = new TextEncoder();
10495
+ const stream = new ReadableStream({
10496
+ start(controller) {
10497
+ const send = (event) => {
10498
+ const payload = `data: ${JSON.stringify(event)}
10499
+
10500
+ `;
10501
+ controller.enqueue(encoder.encode(payload));
10502
+ };
10503
+ const keepAlive = setInterval(() => {
10504
+ try {
10505
+ controller.enqueue(encoder.encode(`: keep-alive
10506
+
10507
+ `));
10508
+ } catch {
10509
+ }
10510
+ }, 25e3);
10511
+ const close = () => {
10512
+ clearInterval(keepAlive);
10513
+ try {
10514
+ controller.close();
10515
+ } catch {
10516
+ }
10517
+ };
10518
+ controller.enqueue(encoder.encode(`retry: 1500
10519
+
10520
+ `));
10521
+ const subscriber = { send, close };
10522
+ elicitationSubscribers.add(subscriber);
10523
+ c.req.raw.signal?.addEventListener?.("abort", () => {
10524
+ elicitationSubscribers.delete(subscriber);
10525
+ close();
10526
+ });
10527
+ }
10528
+ });
10529
+ return new Response(stream, {
10530
+ status: 200,
10531
+ headers: {
10532
+ "Content-Type": "text/event-stream",
10533
+ "Cache-Control": "no-cache, no-transform",
10534
+ Connection: "keep-alive",
10535
+ "X-Accel-Buffering": "no",
10536
+ "Access-Control-Allow-Origin": "*"
10537
+ }
10538
+ });
10539
+ });
10540
+ elicitation.post("/respond", async (c) => {
10541
+ try {
10542
+ const body = await c.req.json();
10543
+ const { requestId, action, content } = body;
10544
+ if (!requestId || !action) {
10545
+ return c.json({ error: "Missing requestId or action" }, 400);
10546
+ }
10547
+ const response = action === "accept" ? { action: "accept", content: content ?? {} } : { action };
10548
+ const ok = c.mcpClientManager.respondToElicitation(requestId, response);
10549
+ if (!ok) {
10550
+ return c.json({ error: "Unknown or expired requestId" }, 404);
10551
+ }
10552
+ broadcastElicitation({ type: "elicitation_complete", requestId });
10553
+ return c.json({ ok: true });
10554
+ } catch (e) {
10555
+ return c.json({ error: e?.message || "Failed to respond" }, 400);
10556
+ }
10557
+ });
10558
+ var elicitation_default = elicitation;
10559
+
10264
10560
  // routes/mcp/index.ts
10265
- var mcp = new Hono12();
10561
+ var mcp = new Hono14();
10266
10562
  mcp.get("/health", (c) => {
10267
10563
  return c.json({
10268
10564
  service: "MCP API",
@@ -10271,6 +10567,8 @@ mcp.get("/health", (c) => {
10271
10567
  });
10272
10568
  });
10273
10569
  mcp.route("/chat", chat_default);
10570
+ mcp.route("/chat-v2", chat_v2_default);
10571
+ mcp.route("/elicitation", elicitation_default);
10274
10572
  mcp.route("/connect", connect_default);
10275
10573
  mcp.route("/servers", servers_default);
10276
10574
  mcp.route("/tools", tools_default);
@@ -10486,7 +10784,7 @@ try {
10486
10784
  fixPath();
10487
10785
  } catch {
10488
10786
  }
10489
- var app = new Hono13().onError((err, c) => {
10787
+ var app = new Hono15().onError((err, c) => {
10490
10788
  console.error("Unhandled error:", err);
10491
10789
  Sentry2.captureException(err);
10492
10790
  if (err instanceof HTTPException) {