@mcpjam/inspector 1.0.19 → 1.0.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -5,8 +5,8 @@
5
5
  <link rel="icon" type="image/svg+xml" href="/mcp_jam.svg" />
6
6
  <meta name="viewport" content="width=device-width, initial-scale=1.0" />
7
7
  <title>MCPJam Inspector</title>
8
- <script type="module" crossorigin src="/assets/index-Bf9F3wwu.js"></script>
9
- <link rel="stylesheet" crossorigin href="/assets/index-DkqfjygH.css">
8
+ <script type="module" crossorigin src="/assets/index-CqSEe2h2.js"></script>
9
+ <link rel="stylesheet" crossorigin href="/assets/index-BzedM9rr.css">
10
10
  </head>
11
11
  <body>
12
12
  <div id="root"></div>
Binary file
@@ -2123,6 +2123,7 @@ import { createDeepSeek } from "@ai-sdk/deepseek";
2123
2123
  import { createGoogleGenerativeAI } from "@ai-sdk/google";
2124
2124
  import { createMistral } from "@ai-sdk/mistral";
2125
2125
  import { createOpenAI } from "@ai-sdk/openai";
2126
+ import { createOpenRouter } from "@openrouter/ai-sdk-provider";
2126
2127
  import { createOllama } from "ollama-ai-provider-v2";
2127
2128
  var createLlmModel = (modelDefinition, apiKey, ollamaBaseUrl, litellmBaseUrl) => {
2128
2129
  if (!modelDefinition?.id || !modelDefinition?.provider) {
@@ -2155,6 +2156,8 @@ var createLlmModel = (modelDefinition, apiKey, ollamaBaseUrl, litellmBaseUrl) =>
2155
2156
  });
2156
2157
  return openai.chat(modelDefinition.id);
2157
2158
  }
2159
+ case "openrouter":
2160
+ return createOpenRouter({ apiKey })(modelDefinition.id);
2158
2161
  default:
2159
2162
  throw new Error(
2160
2163
  `Unsupported provider: ${modelDefinition.provider} for model: ${modelDefinition.id}`
@@ -8961,7 +8964,7 @@ var isValidLlmApiKey = (key) => {
8961
8964
  };
8962
8965
 
8963
8966
  // ../evals-cli/src/utils/helpers.ts
8964
- import { createOpenRouter } from "@openrouter/ai-sdk-provider";
8967
+ import { createOpenRouter as createOpenRouter2 } from "@openrouter/ai-sdk-provider";
8965
8968
  import { createOpenAI as createOpenAI2 } from "@ai-sdk/openai";
8966
8969
  import { createAnthropic as createAnthropic2 } from "@ai-sdk/anthropic";
8967
8970
  var createLlmModel2 = (provider, model, llmsConfig) => {
@@ -8974,7 +8977,7 @@ var createLlmModel2 = (provider, model, llmsConfig) => {
8974
8977
  case "openai":
8975
8978
  return createOpenAI2({ apiKey: llmsConfig.openai })(model);
8976
8979
  case "openrouter":
8977
- return createOpenRouter({ apiKey: llmsConfig.openrouter })(model);
8980
+ return createOpenRouter2({ apiKey: llmsConfig.openrouter })(model);
8978
8981
  default:
8979
8982
  Logger.errorWithExit(`Unsupported provider: ${provider}`);
8980
8983
  }