@mcpjam/inspector 1.0.20 → 1.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -39,7 +39,7 @@ import * as Sentry2 from "@sentry/node";
39
39
  import { serve } from "@hono/node-server";
40
40
  import dotenv from "dotenv";
41
41
  import fixPath from "fix-path";
42
- import { Hono as Hono13 } from "hono";
42
+ import { Hono as Hono15 } from "hono";
43
43
  import { HTTPException } from "hono/http-exception";
44
44
  import { cors } from "hono/cors";
45
45
  import { logger } from "hono/logger";
@@ -885,7 +885,7 @@ var MCPClientManager = class {
885
885
  };
886
886
 
887
887
  // routes/mcp/index.ts
888
- import { Hono as Hono12 } from "hono";
888
+ import { Hono as Hono14 } from "hono";
889
889
 
890
890
  // routes/mcp/connect.ts
891
891
  import { Hono } from "hono";
@@ -1472,7 +1472,8 @@ resources.post("/widget/store", async (c) => {
1472
1472
  toolInput,
1473
1473
  toolOutput,
1474
1474
  toolResponseMetadata,
1475
- toolId
1475
+ toolId,
1476
+ theme
1476
1477
  } = body;
1477
1478
  if (!serverId || !uri || !toolId) {
1478
1479
  return c.json({ success: false, error: "Missing required fields" }, 400);
@@ -1484,6 +1485,7 @@ resources.post("/widget/store", async (c) => {
1484
1485
  toolOutput,
1485
1486
  toolResponseMetadata: toolResponseMetadata ?? null,
1486
1487
  toolId,
1488
+ theme: theme ?? "dark",
1487
1489
  timestamp: Date.now()
1488
1490
  });
1489
1491
  return c.json({ success: true });
@@ -1544,7 +1546,14 @@ resources.get("/widget-content/:toolId", async (c) => {
1544
1546
  404
1545
1547
  );
1546
1548
  }
1547
- const { serverId, uri, toolInput, toolOutput, toolResponseMetadata } = widgetData;
1549
+ const {
1550
+ serverId,
1551
+ uri,
1552
+ toolInput,
1553
+ toolOutput,
1554
+ toolResponseMetadata,
1555
+ theme
1556
+ } = widgetData;
1548
1557
  const mcpClientManager2 = c.mcpClientManager;
1549
1558
  const availableServers = mcpClientManager2.listServers().filter((id) => Boolean(mcpClientManager2.getClient(id)));
1550
1559
  let actualServerId = serverId;
@@ -1604,10 +1613,13 @@ resources.get("/widget-content/:toolId", async (c) => {
1604
1613
  toolResponseMetadata: ${JSON.stringify(toolResponseMetadata ?? null)},
1605
1614
  displayMode: 'inline',
1606
1615
  maxHeight: 600,
1607
- theme: 'dark',
1616
+ theme: ${JSON.stringify(theme ?? "dark")},
1608
1617
  locale: 'en-US',
1609
1618
  safeArea: { insets: { top: 0, bottom: 0, left: 0, right: 0 } },
1610
- userAgent: {},
1619
+ userAgent: {
1620
+ device: { type: 'desktop' },
1621
+ capabilities: { hover: true, touch: false }
1622
+ },
1611
1623
  widgetState: null,
1612
1624
 
1613
1625
  async setWidgetState(state) {
@@ -1731,6 +1743,28 @@ resources.get("/widget-content/:toolId", async (c) => {
1731
1743
  }
1732
1744
  } catch (err) {}
1733
1745
  }, 0);
1746
+
1747
+ // Listen for theme changes from parent
1748
+ window.addEventListener('message', (event) => {
1749
+ if (event.data.type === 'webplus:set_globals') {
1750
+ const { globals } = event.data;
1751
+
1752
+ // Update theme if provided
1753
+ if (globals?.theme && window.openai) {
1754
+ window.openai.theme = globals.theme;
1755
+
1756
+ // Dispatch event for widgets that use useTheme() hook
1757
+ try {
1758
+ const globalsEvent = new CustomEvent('webplus:set_globals', {
1759
+ detail: { globals: { theme: globals.theme } }
1760
+ });
1761
+ window.dispatchEvent(globalsEvent);
1762
+ } catch (err) {
1763
+ console.error('[OpenAI Widget] Failed to dispatch theme change:', err);
1764
+ }
1765
+ }
1766
+ }
1767
+ });
1734
1768
  })();
1735
1769
  </script>
1736
1770
  `;
@@ -1875,224 +1909,6 @@ var MCPJAM_PROVIDED_MODEL_IDS = [
1875
1909
  var isMCPJamProvidedModel = (modelId) => {
1876
1910
  return MCPJAM_PROVIDED_MODEL_IDS.includes(modelId);
1877
1911
  };
1878
- var Model = /* @__PURE__ */ ((Model2) => {
1879
- Model2["CLAUDE_OPUS_4_0"] = "claude-opus-4-0";
1880
- Model2["CLAUDE_SONNET_4_0"] = "claude-sonnet-4-0";
1881
- Model2["CLAUDE_3_7_SONNET_LATEST"] = "claude-3-7-sonnet-latest";
1882
- Model2["CLAUDE_3_5_SONNET_LATEST"] = "claude-3-5-sonnet-latest";
1883
- Model2["CLAUDE_3_5_HAIKU_LATEST"] = "claude-3-5-haiku-latest";
1884
- Model2["GPT_4_1"] = "gpt-4.1";
1885
- Model2["GPT_4_1_MINI"] = "gpt-4.1-mini";
1886
- Model2["GPT_4_1_NANO"] = "gpt-4.1-nano";
1887
- Model2["GPT_4O"] = "gpt-4o";
1888
- Model2["GPT_4O_MINI"] = "gpt-4o-mini";
1889
- Model2["GPT_4_TURBO"] = "gpt-4-turbo";
1890
- Model2["GPT_4"] = "gpt-4";
1891
- Model2["GPT_5"] = "gpt-5";
1892
- Model2["GPT_5_MINI"] = "gpt-5-mini";
1893
- Model2["GPT_5_NANO"] = "gpt-5-nano";
1894
- Model2["GPT_5_MAIN"] = "openai/gpt-5";
1895
- Model2["GPT_5_PRO"] = "gpt-5-pro";
1896
- Model2["GPT_5_CODEX"] = "gpt-5-codex";
1897
- Model2["GPT_3_5_TURBO"] = "gpt-3.5-turbo";
1898
- Model2["DEEPSEEK_CHAT"] = "deepseek-chat";
1899
- Model2["DEEPSEEK_REASONER"] = "deepseek-reasoner";
1900
- Model2["GEMINI_2_5_PRO"] = "gemini-2.5-pro";
1901
- Model2["GEMINI_2_5_FLASH"] = "gemini-2.5-flash";
1902
- Model2["GEMINI_2_5_FLASH_LITE"] = "gemini-2.5-flash-lite";
1903
- Model2["GEMINI_2_0_FLASH_EXP"] = "gemini-2.0-flash-exp";
1904
- Model2["GEMINI_1_5_PRO"] = "gemini-1.5-pro";
1905
- Model2["GEMINI_1_5_PRO_002"] = "gemini-1.5-pro-002";
1906
- Model2["GEMINI_1_5_FLASH"] = "gemini-1.5-flash";
1907
- Model2["GEMINI_1_5_FLASH_002"] = "gemini-1.5-flash-002";
1908
- Model2["GEMINI_1_5_FLASH_8B"] = "gemini-1.5-flash-8b";
1909
- Model2["GEMINI_1_5_FLASH_8B_001"] = "gemini-1.5-flash-8b-001";
1910
- Model2["GEMMA_3_2B"] = "gemma-3-2b";
1911
- Model2["GEMMA_3_9B"] = "gemma-3-9b";
1912
- Model2["GEMMA_3_27B"] = "gemma-3-27b";
1913
- Model2["GEMMA_2_2B"] = "gemma-2-2b";
1914
- Model2["GEMMA_2_9B"] = "gemma-2-9b";
1915
- Model2["GEMMA_2_27B"] = "gemma-2-27b";
1916
- Model2["CODE_GEMMA_2B"] = "codegemma-2b";
1917
- Model2["CODE_GEMMA_7B"] = "codegemma-7b";
1918
- Model2["MISTRAL_LARGE_LATEST"] = "mistral-large-latest";
1919
- Model2["MISTRAL_SMALL_LATEST"] = "mistral-small-latest";
1920
- Model2["CODESTRAL_LATEST"] = "codestral-latest";
1921
- Model2["MINISTRAL_8B_LATEST"] = "ministral-8b-latest";
1922
- Model2["MINISTRAL_3B_LATEST"] = "ministral-3b-latest";
1923
- return Model2;
1924
- })(Model || {});
1925
- var SUPPORTED_MODELS = [
1926
- {
1927
- id: "claude-opus-4-0" /* CLAUDE_OPUS_4_0 */,
1928
- name: "Claude Opus 4",
1929
- provider: "anthropic"
1930
- },
1931
- {
1932
- id: "claude-sonnet-4-0" /* CLAUDE_SONNET_4_0 */,
1933
- name: "Claude Sonnet 4",
1934
- provider: "anthropic"
1935
- },
1936
- {
1937
- id: "claude-3-7-sonnet-latest" /* CLAUDE_3_7_SONNET_LATEST */,
1938
- name: "Claude Sonnet 3.7",
1939
- provider: "anthropic"
1940
- },
1941
- {
1942
- id: "claude-3-5-sonnet-latest" /* CLAUDE_3_5_SONNET_LATEST */,
1943
- name: "Claude Sonnet 3.5",
1944
- provider: "anthropic"
1945
- },
1946
- {
1947
- id: "claude-3-5-haiku-latest" /* CLAUDE_3_5_HAIKU_LATEST */,
1948
- name: "Claude Haiku 3.5",
1949
- provider: "anthropic"
1950
- },
1951
- { id: "gpt-5" /* GPT_5 */, name: "GPT-5", provider: "openai" },
1952
- { id: "gpt-5-mini" /* GPT_5_MINI */, name: "GPT-5 Mini", provider: "openai" },
1953
- { id: "gpt-5-nano" /* GPT_5_NANO */, name: "GPT-5 Nano", provider: "openai" },
1954
- { id: Model.GPT_5_CHAT_LATEST, name: "GPT-5 Chat", provider: "openai" },
1955
- { id: "gpt-5-pro" /* GPT_5_PRO */, name: "GPT-5 Pro", provider: "openai" },
1956
- { id: "gpt-5-codex" /* GPT_5_CODEX */, name: "GPT-5 Codex", provider: "openai" },
1957
- { id: "gpt-4.1" /* GPT_4_1 */, name: "GPT-4.1", provider: "openai" },
1958
- { id: "gpt-4.1-mini" /* GPT_4_1_MINI */, name: "GPT-4.1 Mini", provider: "openai" },
1959
- { id: "gpt-4.1-nano" /* GPT_4_1_NANO */, name: "GPT-4.1 Nano", provider: "openai" },
1960
- { id: "gpt-4o" /* GPT_4O */, name: "GPT-4o", provider: "openai" },
1961
- { id: "gpt-4o-mini" /* GPT_4O_MINI */, name: "GPT-4o Mini", provider: "openai" },
1962
- { id: "deepseek-chat" /* DEEPSEEK_CHAT */, name: "DeepSeek Chat", provider: "deepseek" },
1963
- {
1964
- id: "deepseek-reasoner" /* DEEPSEEK_REASONER */,
1965
- name: "DeepSeek Reasoner",
1966
- provider: "deepseek"
1967
- },
1968
- // Google Gemini models (latest first)
1969
- {
1970
- id: "gemini-2.5-pro" /* GEMINI_2_5_PRO */,
1971
- name: "Gemini 2.5 Pro",
1972
- provider: "google"
1973
- },
1974
- {
1975
- id: "gemini-2.5-flash" /* GEMINI_2_5_FLASH */,
1976
- name: "Gemini 2.5 Flash",
1977
- provider: "google"
1978
- },
1979
- {
1980
- id: "gemini-2.0-flash-exp" /* GEMINI_2_0_FLASH_EXP */,
1981
- name: "Gemini 2.0 Flash Experimental",
1982
- provider: "google"
1983
- },
1984
- {
1985
- id: "gemini-1.5-pro-002" /* GEMINI_1_5_PRO_002 */,
1986
- name: "Gemini 1.5 Pro 002",
1987
- provider: "google"
1988
- },
1989
- {
1990
- id: "gemini-1.5-pro" /* GEMINI_1_5_PRO */,
1991
- name: "Gemini 1.5 Pro",
1992
- provider: "google"
1993
- },
1994
- {
1995
- id: "gemini-1.5-flash-002" /* GEMINI_1_5_FLASH_002 */,
1996
- name: "Gemini 1.5 Flash 002",
1997
- provider: "google"
1998
- },
1999
- {
2000
- id: "gemini-1.5-flash" /* GEMINI_1_5_FLASH */,
2001
- name: "Gemini 1.5 Flash",
2002
- provider: "google"
2003
- },
2004
- {
2005
- id: "meta-llama/llama-3.3-70b-instruct",
2006
- name: "Llama 3.3 70B (Free)",
2007
- provider: "meta"
2008
- },
2009
- {
2010
- id: "openai/gpt-oss-120b",
2011
- name: "GPT-OSS 120B (Free)",
2012
- provider: "openai"
2013
- },
2014
- {
2015
- id: "x-ai/grok-4-fast",
2016
- name: "Grok 4 Fast (Free)",
2017
- provider: "x-ai"
2018
- },
2019
- {
2020
- id: "openai/gpt-5-nano",
2021
- name: "GPT-5 Nano (Free)",
2022
- provider: "openai"
2023
- },
2024
- {
2025
- id: "anthropic/claude-sonnet-4.5",
2026
- name: "Claude Sonnet 4.5 (Free)",
2027
- provider: "anthropic"
2028
- },
2029
- {
2030
- id: "anthropic/claude-haiku-4.5",
2031
- name: "Claude Haiku 4.5 (Free)",
2032
- provider: "anthropic"
2033
- },
2034
- {
2035
- id: "openai/gpt-5-codex",
2036
- name: "GPT-5 Codex (Free)",
2037
- provider: "openai"
2038
- },
2039
- {
2040
- id: "openai/gpt-5",
2041
- name: "GPT-5 (Free)",
2042
- provider: "openai"
2043
- },
2044
- {
2045
- id: "openai/gpt-5-mini",
2046
- name: "GPT-5 Mini (Free)",
2047
- provider: "openai"
2048
- },
2049
- {
2050
- id: "google/gemini-2.5-flash-preview-09-2025",
2051
- name: "Gemini 2.5 Flash Preview (Free)",
2052
- provider: "google"
2053
- },
2054
- {
2055
- id: "moonshotai/kimi-k2-0905",
2056
- name: "Kimi K2 (Free)",
2057
- provider: "moonshotai"
2058
- },
2059
- {
2060
- id: "google/gemini-2.5-flash",
2061
- name: "Gemini 2.5 Flash (Free)",
2062
- provider: "google"
2063
- },
2064
- {
2065
- id: "z-ai/glm-4.6",
2066
- name: "GLM 4.6 (Free)",
2067
- provider: "z-ai"
2068
- },
2069
- // Mistral models
2070
- {
2071
- id: "mistral-large-latest" /* MISTRAL_LARGE_LATEST */,
2072
- name: "Mistral Large",
2073
- provider: "mistral"
2074
- },
2075
- {
2076
- id: "mistral-small-latest" /* MISTRAL_SMALL_LATEST */,
2077
- name: "Mistral Small",
2078
- provider: "mistral"
2079
- },
2080
- {
2081
- id: "codestral-latest" /* CODESTRAL_LATEST */,
2082
- name: "Codestral",
2083
- provider: "mistral"
2084
- },
2085
- {
2086
- id: "ministral-8b-latest" /* MINISTRAL_8B_LATEST */,
2087
- name: "Ministral 8B",
2088
- provider: "mistral"
2089
- },
2090
- {
2091
- id: "ministral-3b-latest" /* MINISTRAL_3B_LATEST */,
2092
- name: "Ministral 3B",
2093
- provider: "mistral"
2094
- }
2095
- ];
2096
1912
 
2097
1913
  // routes/mcp/chat.ts
2098
1914
  import { TextEncoder as TextEncoder2 } from "util";
@@ -3045,9 +2861,392 @@ chat.post("/", async (c) => {
3045
2861
  });
3046
2862
  var chat_default = chat;
3047
2863
 
3048
- // routes/mcp/oauth.ts
2864
+ // routes/mcp/chat-v2.ts
3049
2865
  import { Hono as Hono7 } from "hono";
3050
- var oauth = new Hono7();
2866
+ import {
2867
+ convertToModelMessages,
2868
+ streamText as streamText2,
2869
+ stepCountIs,
2870
+ createUIMessageStream,
2871
+ createUIMessageStreamResponse
2872
+ } from "ai";
2873
+ import zodToJsonSchema2 from "zod-to-json-schema";
2874
+ var DEFAULT_TEMPERATURE = 0.7;
2875
+ var chatV2 = new Hono7();
2876
+ chatV2.post("/", async (c) => {
2877
+ try {
2878
+ const body = await c.req.json();
2879
+ const mcpClientManager2 = c.mcpClientManager;
2880
+ const {
2881
+ messages,
2882
+ apiKey,
2883
+ model,
2884
+ systemPrompt,
2885
+ temperature,
2886
+ selectedServers
2887
+ } = body;
2888
+ if (!Array.isArray(messages) || messages.length === 0) {
2889
+ return c.json({ error: "messages are required" }, 400);
2890
+ }
2891
+ const modelDefinition = model;
2892
+ if (!modelDefinition) {
2893
+ return c.json({ error: "model is not supported" }, 400);
2894
+ }
2895
+ console.log("selectedServers", selectedServers);
2896
+ const mcpTools = await mcpClientManager2.getToolsForAiSdk(selectedServers);
2897
+ if (modelDefinition.id && isMCPJamProvidedModel(modelDefinition.id)) {
2898
+ if (!process.env.CONVEX_HTTP_URL) {
2899
+ return c.json(
2900
+ { error: "Server missing CONVEX_HTTP_URL configuration" },
2901
+ 500
2902
+ );
2903
+ }
2904
+ const flattenedTools = mcpTools;
2905
+ const toolDefs = [];
2906
+ for (const [name, tool2] of Object.entries(flattenedTools)) {
2907
+ if (!tool2) continue;
2908
+ let serializedSchema;
2909
+ const schema = tool2.inputSchema;
2910
+ if (schema) {
2911
+ if (typeof schema === "object" && schema !== null && "jsonSchema" in schema) {
2912
+ serializedSchema = schema.jsonSchema;
2913
+ } else {
2914
+ try {
2915
+ serializedSchema = zodToJsonSchema2(schema);
2916
+ } catch {
2917
+ serializedSchema = {
2918
+ type: "object",
2919
+ properties: {},
2920
+ additionalProperties: false
2921
+ };
2922
+ }
2923
+ }
2924
+ }
2925
+ toolDefs.push({
2926
+ name,
2927
+ description: tool2.description,
2928
+ inputSchema: serializedSchema ?? {
2929
+ type: "object",
2930
+ properties: {},
2931
+ additionalProperties: false
2932
+ }
2933
+ });
2934
+ }
2935
+ const authHeader = c.req.header("authorization") || void 0;
2936
+ let messageHistory = convertToModelMessages(messages);
2937
+ let steps = 0;
2938
+ const MAX_STEPS2 = 20;
2939
+ const stream = createUIMessageStream({
2940
+ execute: async ({ writer }) => {
2941
+ const msgId = `asst_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
2942
+ while (steps < MAX_STEPS2) {
2943
+ const res = await fetch(`${process.env.CONVEX_HTTP_URL}/stream`, {
2944
+ method: "POST",
2945
+ headers: {
2946
+ "content-type": "application/json",
2947
+ ...authHeader ? { authorization: authHeader } : {}
2948
+ },
2949
+ body: JSON.stringify({
2950
+ mode: "step",
2951
+ messages: JSON.stringify(messageHistory),
2952
+ model: String(modelDefinition.id),
2953
+ systemPrompt,
2954
+ temperature: temperature ?? DEFAULT_TEMPERATURE,
2955
+ tools: toolDefs
2956
+ })
2957
+ });
2958
+ if (!res.ok) {
2959
+ const errorText = await res.text().catch(() => "step failed");
2960
+ writer.write({ type: "error", errorText });
2961
+ break;
2962
+ }
2963
+ const json = await res.json();
2964
+ if (!json?.ok || !Array.isArray(json.messages)) {
2965
+ break;
2966
+ }
2967
+ for (const m of json.messages) {
2968
+ if (m?.role === "assistant" && Array.isArray(m.content)) {
2969
+ for (const item of m.content) {
2970
+ if (item?.type === "text" && typeof item.text === "string") {
2971
+ writer.write({ type: "text-start", id: msgId });
2972
+ writer.write({
2973
+ type: "text-delta",
2974
+ id: msgId,
2975
+ delta: item.text
2976
+ });
2977
+ writer.write({ type: "text-end", id: msgId });
2978
+ } else if (item?.type === "tool-call") {
2979
+ if (item.input == null)
2980
+ item.input = item.parameters ?? item.args ?? {};
2981
+ if (!item.toolCallId)
2982
+ item.toolCallId = `tc_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
2983
+ writer.write({
2984
+ type: "tool-input-available",
2985
+ toolCallId: item.toolCallId,
2986
+ toolName: item.toolName ?? item.name,
2987
+ input: item.input
2988
+ });
2989
+ }
2990
+ }
2991
+ }
2992
+ messageHistory.push(m);
2993
+ }
2994
+ const beforeLen = messageHistory.length;
2995
+ if (hasUnresolvedToolCalls(messageHistory)) {
2996
+ await executeToolCallsFromMessages(messageHistory, {
2997
+ clientManager: mcpClientManager2
2998
+ });
2999
+ }
3000
+ const newMessages = messageHistory.slice(beforeLen);
3001
+ for (const msg of newMessages) {
3002
+ if (msg?.role === "tool" && Array.isArray(msg.content)) {
3003
+ for (const item of msg.content) {
3004
+ if (item?.type === "tool-result") {
3005
+ writer.write({
3006
+ type: "tool-output-available",
3007
+ toolCallId: item.toolCallId,
3008
+ output: item.output ?? item.result ?? item.value
3009
+ });
3010
+ }
3011
+ }
3012
+ }
3013
+ }
3014
+ steps++;
3015
+ const finishReason = json.finishReason;
3016
+ if (finishReason && finishReason !== "tool-calls") {
3017
+ break;
3018
+ }
3019
+ }
3020
+ }
3021
+ });
3022
+ return createUIMessageStreamResponse({ stream });
3023
+ }
3024
+ const llmModel = createLlmModel(
3025
+ modelDefinition,
3026
+ apiKey ?? "",
3027
+ body.ollamaBaseUrl,
3028
+ body.litellmBaseUrl
3029
+ );
3030
+ const result = streamText2({
3031
+ model: llmModel,
3032
+ messages: convertToModelMessages(messages),
3033
+ temperature: temperature ?? DEFAULT_TEMPERATURE,
3034
+ system: systemPrompt,
3035
+ tools: mcpTools,
3036
+ stopWhen: stepCountIs(20)
3037
+ });
3038
+ return result.toUIMessageStreamResponse();
3039
+ } catch (error) {
3040
+ console.error("[mcp/chat-v2] failed to process chat request", error);
3041
+ return c.json({ error: "Unexpected error" }, 500);
3042
+ }
3043
+ });
3044
+ var chat_v2_default = chatV2;
3045
+
3046
+ // routes/mcp/oauth.ts
3047
+ import { Hono as Hono8 } from "hono";
3048
+ var oauth = new Hono8();
3049
+ oauth.post("/debug/proxy", async (c) => {
3050
+ try {
3051
+ const {
3052
+ url,
3053
+ method = "GET",
3054
+ body,
3055
+ headers: customHeaders
3056
+ } = await c.req.json();
3057
+ if (!url) {
3058
+ return c.json({ error: "Missing url parameter" }, 400);
3059
+ }
3060
+ let targetUrl;
3061
+ try {
3062
+ targetUrl = new URL(url);
3063
+ if (targetUrl.protocol !== "https:" && targetUrl.protocol !== "http:") {
3064
+ return c.json({ error: "Invalid protocol" }, 400);
3065
+ }
3066
+ } catch (error) {
3067
+ return c.json({ error: "Invalid URL format" }, 400);
3068
+ }
3069
+ const requestHeaders = {
3070
+ "User-Agent": "MCP-Inspector/1.0",
3071
+ ...customHeaders
3072
+ };
3073
+ console.log("[OAuth Debug Proxy]");
3074
+ console.log(" URL:", url);
3075
+ console.log(" Method:", method);
3076
+ console.log(" Headers:", requestHeaders);
3077
+ if (body) {
3078
+ console.log(" Body:", JSON.stringify(body, null, 2));
3079
+ }
3080
+ const contentType = customHeaders?.["Content-Type"] || customHeaders?.["content-type"];
3081
+ const isFormUrlEncoded = contentType?.includes(
3082
+ "application/x-www-form-urlencoded"
3083
+ );
3084
+ if (method === "POST" && body && !contentType) {
3085
+ requestHeaders["Content-Type"] = "application/json";
3086
+ }
3087
+ const fetchOptions = {
3088
+ method,
3089
+ headers: requestHeaders
3090
+ };
3091
+ if (method === "POST" && body) {
3092
+ if (isFormUrlEncoded && typeof body === "object") {
3093
+ const params = new URLSearchParams();
3094
+ for (const [key, value] of Object.entries(body)) {
3095
+ params.append(key, String(value));
3096
+ }
3097
+ fetchOptions.body = params.toString();
3098
+ } else {
3099
+ fetchOptions.body = JSON.stringify(body);
3100
+ }
3101
+ }
3102
+ const response = await fetch(targetUrl.toString(), fetchOptions);
3103
+ const headers = {};
3104
+ response.headers.forEach((value, key) => {
3105
+ headers[key] = value;
3106
+ });
3107
+ let responseBody = null;
3108
+ const contentTypeHeader = headers["content-type"] || "";
3109
+ if (contentTypeHeader.includes("text/event-stream")) {
3110
+ try {
3111
+ const text = await response.text();
3112
+ const events = [];
3113
+ const lines = text.split("\n");
3114
+ let currentEvent = {};
3115
+ for (const line of lines) {
3116
+ if (line.startsWith("event:")) {
3117
+ currentEvent.event = line.substring(6).trim();
3118
+ } else if (line.startsWith("data:")) {
3119
+ const data = line.substring(5).trim();
3120
+ try {
3121
+ currentEvent.data = JSON.parse(data);
3122
+ } catch {
3123
+ currentEvent.data = data;
3124
+ }
3125
+ } else if (line.startsWith("id:")) {
3126
+ currentEvent.id = line.substring(3).trim();
3127
+ } else if (line === "") {
3128
+ if (Object.keys(currentEvent).length > 0) {
3129
+ events.push(currentEvent);
3130
+ currentEvent = {};
3131
+ }
3132
+ }
3133
+ }
3134
+ responseBody = events.length > 0 ? {
3135
+ events,
3136
+ // If there's a "message" event with MCP response, extract it
3137
+ mcpResponse: events.find((e) => e.event === "message" || !e.event)?.data || null
3138
+ } : { raw: text };
3139
+ } catch (error) {
3140
+ console.error("Failed to parse SSE response:", error);
3141
+ responseBody = { error: "Failed to parse SSE stream" };
3142
+ }
3143
+ } else {
3144
+ try {
3145
+ responseBody = await response.json();
3146
+ } catch {
3147
+ try {
3148
+ responseBody = await response.text();
3149
+ } catch {
3150
+ responseBody = null;
3151
+ }
3152
+ }
3153
+ }
3154
+ console.log(" Response:", response.status, response.statusText);
3155
+ return c.json({
3156
+ status: response.status,
3157
+ statusText: response.statusText,
3158
+ headers,
3159
+ body: responseBody
3160
+ });
3161
+ } catch (error) {
3162
+ console.error("[OAuth Debug Proxy] Error:", error);
3163
+ return c.json(
3164
+ {
3165
+ error: error instanceof Error ? error.message : "Unknown error occurred"
3166
+ },
3167
+ 500
3168
+ );
3169
+ }
3170
+ });
3171
+ oauth.post("/proxy", async (c) => {
3172
+ try {
3173
+ const {
3174
+ url,
3175
+ method = "GET",
3176
+ body,
3177
+ headers: customHeaders
3178
+ } = await c.req.json();
3179
+ if (!url) {
3180
+ return c.json({ error: "Missing url parameter" }, 400);
3181
+ }
3182
+ let targetUrl;
3183
+ try {
3184
+ targetUrl = new URL(url);
3185
+ if (targetUrl.protocol !== "https:" && targetUrl.protocol !== "http:") {
3186
+ return c.json({ error: "Invalid protocol" }, 400);
3187
+ }
3188
+ } catch (error) {
3189
+ return c.json({ error: "Invalid URL format" }, 400);
3190
+ }
3191
+ const requestHeaders = {
3192
+ "User-Agent": "MCP-Inspector/1.0",
3193
+ ...customHeaders
3194
+ };
3195
+ const contentType = customHeaders?.["Content-Type"] || customHeaders?.["content-type"];
3196
+ const isFormUrlEncoded = contentType?.includes(
3197
+ "application/x-www-form-urlencoded"
3198
+ );
3199
+ if (method === "POST" && body && !contentType) {
3200
+ requestHeaders["Content-Type"] = "application/json";
3201
+ }
3202
+ const fetchOptions = {
3203
+ method,
3204
+ headers: requestHeaders
3205
+ };
3206
+ if (method === "POST" && body) {
3207
+ if (isFormUrlEncoded && typeof body === "object") {
3208
+ const params = new URLSearchParams();
3209
+ for (const [key, value] of Object.entries(body)) {
3210
+ params.append(key, String(value));
3211
+ }
3212
+ fetchOptions.body = params.toString();
3213
+ } else if (typeof body === "string") {
3214
+ fetchOptions.body = body;
3215
+ } else {
3216
+ fetchOptions.body = JSON.stringify(body);
3217
+ }
3218
+ }
3219
+ const response = await fetch(targetUrl.toString(), fetchOptions);
3220
+ const headers = {};
3221
+ response.headers.forEach((value, key) => {
3222
+ headers[key] = value;
3223
+ });
3224
+ let responseBody = null;
3225
+ try {
3226
+ responseBody = await response.json();
3227
+ } catch {
3228
+ try {
3229
+ responseBody = await response.text();
3230
+ } catch {
3231
+ responseBody = null;
3232
+ }
3233
+ }
3234
+ return c.json({
3235
+ status: response.status,
3236
+ statusText: response.statusText,
3237
+ headers,
3238
+ body: responseBody
3239
+ });
3240
+ } catch (error) {
3241
+ console.error("OAuth proxy error:", error);
3242
+ return c.json(
3243
+ {
3244
+ error: error instanceof Error ? error.message : "Unknown error occurred"
3245
+ },
3246
+ 500
3247
+ );
3248
+ }
3249
+ });
3051
3250
  oauth.get("/metadata", async (c) => {
3052
3251
  try {
3053
3252
  const url = c.req.query("url");
@@ -3057,8 +3256,8 @@ oauth.get("/metadata", async (c) => {
3057
3256
  let metadataUrl;
3058
3257
  try {
3059
3258
  metadataUrl = new URL(url);
3060
- if (metadataUrl.protocol !== "https:") {
3061
- return c.json({ error: "Only HTTPS URLs are allowed" }, 400);
3259
+ if (metadataUrl.protocol !== "https:" && metadataUrl.protocol !== "http:") {
3260
+ return c.json({ error: "Invalid protocol" }, 400);
3062
3261
  }
3063
3262
  } catch (error) {
3064
3263
  return c.json({ error: "Invalid URL format" }, 400);
@@ -3093,8 +3292,8 @@ oauth.get("/metadata", async (c) => {
3093
3292
  var oauth_default = oauth;
3094
3293
 
3095
3294
  // routes/mcp/export.ts
3096
- import { Hono as Hono8 } from "hono";
3097
- var exporter = new Hono8();
3295
+ import { Hono as Hono9 } from "hono";
3296
+ var exporter = new Hono9();
3098
3297
  exporter.post("/server", async (c) => {
3099
3298
  try {
3100
3299
  const { serverId } = await c.req.json();
@@ -3150,20 +3349,20 @@ exporter.post("/server", async (c) => {
3150
3349
  var export_default = exporter;
3151
3350
 
3152
3351
  // routes/mcp/interceptor.ts
3153
- import { Hono as Hono9 } from "hono";
3154
- var interceptor = new Hono9();
3352
+ import { Hono as Hono10 } from "hono";
3353
+ var interceptor = new Hono10();
3155
3354
  var interceptor_default = interceptor;
3156
3355
 
3157
3356
  // routes/mcp/evals.ts
3158
- import { Hono as Hono10 } from "hono";
3357
+ import { Hono as Hono11 } from "hono";
3159
3358
  import { z as z3 } from "zod";
3160
3359
 
3161
3360
  // ../evals-cli/src/evals/runner.ts
3162
3361
  import { MCPClient } from "@mastra/mcp";
3163
- import { streamText as streamText2 } from "ai";
3362
+ import { streamText as streamText3 } from "ai";
3164
3363
 
3165
3364
  // ../node_modules/convex/dist/esm/index.js
3166
- var version = "1.28.0";
3365
+ var version = "1.27.3";
3167
3366
 
3168
3367
  // ../node_modules/convex/dist/esm/values/base64.js
3169
3368
  var lookup = [];
@@ -3794,7 +3993,7 @@ function createApi(pathParts = []) {
3794
3993
  }
3795
3994
  var anyApi = createApi();
3796
3995
 
3797
- // ../node_modules/convex/dist/esm/vendor/long.js
3996
+ // ../node_modules/convex/dist/esm/browser/long.js
3798
3997
  var __defProp4 = Object.defineProperty;
3799
3998
  var __defNormalProp3 = (obj, key, value) => key in obj ? __defProp4(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
3800
3999
  var __publicField3 = (obj, key, value) => __defNormalProp3(obj, typeof key !== "symbol" ? key + "" : key, value);
@@ -3871,7 +4070,7 @@ var TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL;
3871
4070
  var TWO_PWR_64_DBL = TWO_PWR_32_DBL * TWO_PWR_32_DBL;
3872
4071
  var MAX_UNSIGNED_VALUE = new Long(4294967295 | 0, 4294967295 | 0);
3873
4072
 
3874
- // ../node_modules/convex/dist/esm/vendor/jwt-decode/index.js
4073
+ // ../node_modules/jwt-decode/build/esm/index.js
3875
4074
  var InvalidTokenError = class extends Error {
3876
4075
  };
3877
4076
  InvalidTokenError.prototype.name = "InvalidTokenError";
@@ -8689,7 +8888,7 @@ import { z as z2 } from "zod";
8689
8888
 
8690
8889
  // ../shared/tools.ts
8691
8890
  import { z } from "zod";
8692
- import { zodToJsonSchema as zodToJsonSchema2 } from "zod-to-json-schema";
8891
+ import { zodToJsonSchema as zodToJsonSchema3 } from "zod-to-json-schema";
8693
8892
  import { tool } from "ai";
8694
8893
  var fallbackInputSchema = z.object({}).passthrough();
8695
8894
  var UNREPRESENTABLE_JSON_SCHEMA_MESSAGES = [
@@ -8724,7 +8923,7 @@ function canConvertToJSONSchema(schema) {
8724
8923
  }
8725
8924
  }
8726
8925
  try {
8727
- zodToJsonSchema2(schema);
8926
+ zodToJsonSchema3(schema);
8728
8927
  return true;
8729
8928
  } catch (error) {
8730
8929
  if (isUnrepresentableSchemaError(error)) {
@@ -9237,7 +9436,7 @@ var runIteration = async ({
9237
9436
  let assistantStreaming = false;
9238
9437
  let streamResult;
9239
9438
  try {
9240
- streamResult = await streamText2({
9439
+ streamResult = await streamText3({
9241
9440
  model: createLlmModel2(provider, model, llms),
9242
9441
  system,
9243
9442
  temperature,
@@ -9695,7 +9894,7 @@ async function collectToolsForServers(clientManager, serverIds) {
9695
9894
  );
9696
9895
  return perServerTools.flat();
9697
9896
  }
9698
- var evals = new Hono10();
9897
+ var evals = new Hono11();
9699
9898
  var RunEvalsRequestSchema = z3.object({
9700
9899
  tests: z3.array(
9701
9900
  z3.object({
@@ -9835,11 +10034,11 @@ evals.post("/generate-tests", async (c) => {
9835
10034
  var evals_default = evals;
9836
10035
 
9837
10036
  // routes/mcp/http-adapters.ts
9838
- import { Hono as Hono11 } from "hono";
10037
+ import { Hono as Hono12 } from "hono";
9839
10038
 
9840
10039
  // services/mcp-http-bridge.ts
9841
10040
  import { z as z4 } from "zod";
9842
- import { zodToJsonSchema as zodToJsonSchema3 } from "zod-to-json-schema";
10041
+ import { zodToJsonSchema as zodToJsonSchema4 } from "zod-to-json-schema";
9843
10042
  function buildInitializeResult(serverId, mode) {
9844
10043
  if (mode === "adapter") {
9845
10044
  return {
@@ -9871,7 +10070,7 @@ function toJsonSchemaMaybe(schema) {
9871
10070
  try {
9872
10071
  if (schema && typeof schema === "object") {
9873
10072
  if (schema instanceof z4.ZodType || "_def" in schema && "parse" in schema) {
9874
- return zodToJsonSchema3(schema);
10073
+ return zodToJsonSchema4(schema);
9875
10074
  }
9876
10075
  }
9877
10076
  } catch {
@@ -10042,7 +10241,7 @@ async function handleJsonRpc(serverId, body, clientManager, mode) {
10042
10241
  var sessions = /* @__PURE__ */ new Map();
10043
10242
  var latestSessionByServer = /* @__PURE__ */ new Map();
10044
10243
  function createHttpHandler(mode, routePrefix) {
10045
- const router = new Hono11();
10244
+ const router = new Hono12();
10046
10245
  router.options(
10047
10246
  "/:serverId",
10048
10247
  (c) => c.body(null, 204, {
@@ -10261,8 +10460,115 @@ function createHttpHandler(mode, routePrefix) {
10261
10460
  var adapterHttp = createHttpHandler("adapter", "adapter-http");
10262
10461
  var managerHttp = createHttpHandler("manager", "manager-http");
10263
10462
 
10463
+ // routes/mcp/elicitation.ts
10464
+ import { Hono as Hono13 } from "hono";
10465
+ var elicitation = new Hono13();
10466
+ var elicitationSubscribers = /* @__PURE__ */ new Set();
10467
+ function broadcastElicitation(event) {
10468
+ for (const sub of Array.from(elicitationSubscribers)) {
10469
+ try {
10470
+ sub.send(event);
10471
+ } catch {
10472
+ try {
10473
+ sub.close();
10474
+ } catch {
10475
+ }
10476
+ elicitationSubscribers.delete(sub);
10477
+ }
10478
+ }
10479
+ }
10480
+ var isCallbackRegistered = false;
10481
+ elicitation.use("*", async (c, next) => {
10482
+ if (!isCallbackRegistered) {
10483
+ const manager = c.mcpClientManager;
10484
+ manager.setElicitationCallback(({ requestId, message, schema }) => {
10485
+ return new Promise((resolve2, reject) => {
10486
+ try {
10487
+ manager.getPendingElicitations().set(requestId, { resolve: resolve2, reject });
10488
+ } catch {
10489
+ }
10490
+ broadcastElicitation({
10491
+ type: "elicitation_request",
10492
+ requestId,
10493
+ message,
10494
+ schema,
10495
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
10496
+ });
10497
+ });
10498
+ });
10499
+ isCallbackRegistered = true;
10500
+ }
10501
+ await next();
10502
+ });
10503
+ elicitation.get("/stream", async (c) => {
10504
+ const encoder = new TextEncoder();
10505
+ const stream = new ReadableStream({
10506
+ start(controller) {
10507
+ const send = (event) => {
10508
+ const payload = `data: ${JSON.stringify(event)}
10509
+
10510
+ `;
10511
+ controller.enqueue(encoder.encode(payload));
10512
+ };
10513
+ const keepAlive = setInterval(() => {
10514
+ try {
10515
+ controller.enqueue(encoder.encode(`: keep-alive
10516
+
10517
+ `));
10518
+ } catch {
10519
+ }
10520
+ }, 25e3);
10521
+ const close = () => {
10522
+ clearInterval(keepAlive);
10523
+ try {
10524
+ controller.close();
10525
+ } catch {
10526
+ }
10527
+ };
10528
+ controller.enqueue(encoder.encode(`retry: 1500
10529
+
10530
+ `));
10531
+ const subscriber = { send, close };
10532
+ elicitationSubscribers.add(subscriber);
10533
+ c.req.raw.signal?.addEventListener?.("abort", () => {
10534
+ elicitationSubscribers.delete(subscriber);
10535
+ close();
10536
+ });
10537
+ }
10538
+ });
10539
+ return new Response(stream, {
10540
+ status: 200,
10541
+ headers: {
10542
+ "Content-Type": "text/event-stream",
10543
+ "Cache-Control": "no-cache, no-transform",
10544
+ Connection: "keep-alive",
10545
+ "X-Accel-Buffering": "no",
10546
+ "Access-Control-Allow-Origin": "*"
10547
+ }
10548
+ });
10549
+ });
10550
+ elicitation.post("/respond", async (c) => {
10551
+ try {
10552
+ const body = await c.req.json();
10553
+ const { requestId, action, content } = body;
10554
+ if (!requestId || !action) {
10555
+ return c.json({ error: "Missing requestId or action" }, 400);
10556
+ }
10557
+ const response = action === "accept" ? { action: "accept", content: content ?? {} } : { action };
10558
+ const ok = c.mcpClientManager.respondToElicitation(requestId, response);
10559
+ if (!ok) {
10560
+ return c.json({ error: "Unknown or expired requestId" }, 404);
10561
+ }
10562
+ broadcastElicitation({ type: "elicitation_complete", requestId });
10563
+ return c.json({ ok: true });
10564
+ } catch (e) {
10565
+ return c.json({ error: e?.message || "Failed to respond" }, 400);
10566
+ }
10567
+ });
10568
+ var elicitation_default = elicitation;
10569
+
10264
10570
  // routes/mcp/index.ts
10265
- var mcp = new Hono12();
10571
+ var mcp = new Hono14();
10266
10572
  mcp.get("/health", (c) => {
10267
10573
  return c.json({
10268
10574
  service: "MCP API",
@@ -10271,6 +10577,8 @@ mcp.get("/health", (c) => {
10271
10577
  });
10272
10578
  });
10273
10579
  mcp.route("/chat", chat_default);
10580
+ mcp.route("/chat-v2", chat_v2_default);
10581
+ mcp.route("/elicitation", elicitation_default);
10274
10582
  mcp.route("/connect", connect_default);
10275
10583
  mcp.route("/servers", servers_default);
10276
10584
  mcp.route("/tools", tools_default);
@@ -10486,7 +10794,7 @@ try {
10486
10794
  fixPath();
10487
10795
  } catch {
10488
10796
  }
10489
- var app = new Hono13().onError((err, c) => {
10797
+ var app = new Hono15().onError((err, c) => {
10490
10798
  console.error("Unhandled error:", err);
10491
10799
  Sentry2.captureException(err);
10492
10800
  if (err instanceof HTTPException) {