unified-ai-router 3.3.14 → 3.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +0 -2
- package/docs/.vitepress/dist/404.html +1 -1
- package/docs/.vitepress/dist/assets/{configuration.md.DCGQZB7w.js → configuration.md.CeBGysiY.js} +4 -4
- package/docs/.vitepress/dist/assets/configuration.md.CeBGysiY.lean.js +1 -0
- package/docs/.vitepress/dist/assets/index.md.D-8hZ8ti.js +1 -0
- package/docs/.vitepress/dist/assets/index.md.D-8hZ8ti.lean.js +1 -0
- package/docs/.vitepress/dist/configuration.html +4 -4
- package/docs/.vitepress/dist/hashmap.json +1 -1
- package/docs/.vitepress/dist/index.html +3 -3
- package/docs/.vitepress/dist/quickstart.html +1 -1
- package/docs/configuration.md +13 -1
- package/docs/index.md +2 -0
- package/main.js +60 -9
- package/package.json +2 -1
- package/provider.js +30 -18
- package/readme.md +1 -0
- package/docs/.vitepress/dist/assets/configuration.md.DCGQZB7w.lean.js +0 -1
- package/docs/.vitepress/dist/assets/index.md.D1fgr_Mo.js +0 -1
- package/docs/.vitepress/dist/assets/index.md.D1fgr_Mo.lean.js +0 -1
package/.env.example
CHANGED
|
@@ -19,7 +19,7 @@
|
|
|
19
19
|
</head>
|
|
20
20
|
<body>
|
|
21
21
|
<div id="app"></div>
|
|
22
|
-
<script>window.__VP_HASH_MAP__=JSON.parse("{\"configuration.md\":\"
|
|
22
|
+
<script>window.__VP_HASH_MAP__=JSON.parse("{\"configuration.md\":\"CeBGysiY\",\"index.md\":\"D-8hZ8ti\",\"quickstart.md\":\"mGNzdQVa\"}");window.__VP_SITE_DATA__=JSON.parse("{\"lang\":\"en-US\",\"dir\":\"ltr\",\"title\":\"Unified AI Router\",\"description\":\"OpenAI-compatible router with multi-provider fallback.\",\"base\":\"/Unified-AI-Router/\",\"head\":[],\"router\":{\"prefetchLinks\":true},\"appearance\":true,\"themeConfig\":{\"nav\":[{\"text\":\"Home\",\"link\":\"/\"},{\"text\":\"Quickstart\",\"link\":\"/quickstart\"},{\"text\":\"Configuration\",\"link\":\"/configuration\"}],\"sidebar\":[{\"text\":\"Guide\",\"items\":[{\"text\":\"Quickstart\",\"link\":\"/quickstart\"},{\"text\":\"Configuration\",\"link\":\"/configuration\"}]}],\"socialLinks\":[{\"icon\":\"github\",\"link\":\"https://github.com/mlibre/Unified-AI-Router\"},{\"icon\":\"npm\",\"link\":\"https://www.npmjs.com/package/unified-ai-router\"}]},\"locales\":{},\"scrollOffset\":134,\"cleanUrls\":false,\"additionalConfig\":{}}");</script>
|
|
23
23
|
|
|
24
24
|
</body>
|
|
25
25
|
</html>
|
package/docs/.vitepress/dist/assets/{configuration.md.DCGQZB7w.js → configuration.md.CeBGysiY.js}
RENAMED
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import{_ as s,c as e,o as a,af as
|
|
1
|
+
import{_ as s,c as e,o as a,af as t}from"./chunks/framework.DLCvNBTH.js";const c=JSON.parse('{"title":"Configuration","description":"","frontmatter":{},"headers":[],"relativePath":"configuration.md","filePath":"configuration.md"}'),n={name:"configuration.md"};function l(o,i,r,p,h,d){return a(),e("div",null,[...i[0]||(i[0]=[t(`<h1 id="configuration" tabindex="-1">Configuration <a class="header-anchor" href="#configuration" aria-label="Permalink to “Configuration”"></a></h1><p>This page focuses on how to configure the router for local development and production deployments: setting environment variables (<code>.env</code>) and customizing <code>provider.js</code>.</p><h2 id="goals" tabindex="-1">Goals <a class="header-anchor" href="#goals" aria-label="Permalink to “Goals”"></a></h2><ul><li>Explain which environment variables the project expects and best practices for storing them.</li><li>Show how to author <code>provider.js</code> entries: enabling/disabling providers, ordering (fallback), and useful fields.</li><li>Provide examples for local, staging, and cloud deployments (Render.com, ...).</li><li>Troubleshooting tips when providers fail or models are not found.</li></ul><h2 id="env-environment-variables" tabindex="-1">.env (environment variables) <a class="header-anchor" href="#env-environment-variables" aria-label="Permalink to “.env (environment variables)”"></a></h2><p>The repository includes a <code>.env.example</code> file with common keys. Copy it to <code>.env</code> and fill the keys for the providers you plan to use:</p><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">cp</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> .env.example</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> .env</span></span>
|
|
2
2
|
<span class="line"><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;"># edit .env and paste your API keys</span></span></code></pre></div><h3 id="typical-keys" tabindex="-1">Typical keys <a class="header-anchor" href="#typical-keys" aria-label="Permalink to “Typical keys”"></a></h3><ul><li><code>OPENAI_API_KEY</code> — OpenAI API key</li><li><code>GEMINI_API_KEY</code> — Google Gemini API key</li><li><code>OPENROUTER_API_KEY</code> — OpenRouter key</li><li><code>COHERE_API_KEY</code>, <code>CEREBRAS_API_KEY</code>, <code>ZAI_API_KEY</code>, <code>GROK_API_KEY</code>, <code>QROQ_API_KEY</code>, <code>LLM7_API_KEY</code> — other providers</li><li><code>PORT</code> — optional, default 3000</li></ul><blockquote><p>Tip: use descriptive names and, when you need multiple keys for the same provider (e.g. multiple OpenRouter accounts), use suffixes like <code>OPENROUTER_API_KEY</code>, <code>OPENROUTER_API_KEY_2</code> and reference them from <code>provider.js</code>.</p></blockquote><h3 id="security-deployment" tabindex="-1">Security & deployment <a class="header-anchor" href="#security-deployment" aria-label="Permalink to “Security & deployment”"></a></h3><ul><li>Do <strong>not</strong> commit <code>.env</code> to Git. It is in <code>.gitignore</code> by default.</li><li>For cloud deployments, set the same variables in your provider’s environment configuration (Render, etc.).</li><li>Rotate keys regularly and use least-privileged keys where provider supports them.</li></ul><h2 id="provider-js-—-how-it-works" tabindex="-1"><code>provider.js</code> — how it works <a class="header-anchor" href="#provider-js-—-how-it-works" aria-label="Permalink to “provider.js — how it works”"></a></h2><p><code>provider.js</code> exports an <strong>ordered array</strong> of provider configuration objects. The router will attempt each provider in array order and fall back automatically if one fails.</p><p>Each provider object supports (at minimum) these fields:</p><div class="language-js"><button title="Copy Code" class="copy"></button><span class="lang">js</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">{</span></span>
|
|
3
3
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> name</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"openai"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">, </span><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;">// simple identifier for logs/debug</span></span>
|
|
4
4
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> apiKey</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: process.env.</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">OPENAI_API_KEY</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">,</span></span>
|
|
5
5
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> model</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"gpt-4"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">, </span><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;">// model id to request from this provider</span></span>
|
|
6
|
-
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> apiUrl</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"https://api.openai.com/v1"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">, </span><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;">// base URL for provider-compatible OpenAI endpoints
|
|
7
|
-
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">}</span></span></code></pre></div><h3 id="important-notes" tabindex="-1">Important notes <a class="header-anchor" href="#important-notes" aria-label="Permalink to “Important notes”"></a></h3><ul><li><code>apiKey</code> should reference the environment variable (use <code>process.env.X</code>). If the env var is missing the router will skip that provider and log a warning.</li><li><code>model</code> should match the provider’s model name exactly. If a provider uses a different naming scheme, use the exact ID that the provider’s API expects.</li><li><code>apiUrl</code> is used to create the OpenAI-compatible client; if a provider exposes a compatibility endpoint (like OpenRouter), set it accordingly.</li><li>Duplicate <code>name</code> values are allowed but can make logs confusing; prefer unique names like <code>openai</code>, <code>openai-alt</code>.</li></ul><h3 id="example-provider-js-snippet" tabindex="-1">Example <code>provider.js</code> snippet <a class="header-anchor" href="#example-provider-js-snippet" aria-label="Permalink to “Example provider.js snippet”"></a></h3><div class="language-js"><button title="Copy Code" class="copy"></button><span class="lang">js</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">module</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">.</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">exports</span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583;"> =</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;"> [</span></span>
|
|
6
|
+
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> apiUrl</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"https://api.openai.com/v1"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">, </span><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;">// base URL for provider-compatible OpenAI endpoints,</span></span>
|
|
7
|
+
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">}</span></span></code></pre></div><h3 id="circuit-breaker-configuration" tabindex="-1">Circuit Breaker Configuration <a class="header-anchor" href="#circuit-breaker-configuration" aria-label="Permalink to “Circuit Breaker Configuration”"></a></h3><p>The router includes built-in circuit breaker protection for each provider using the "opossum" library. This provides fault tolerance by automatically stopping requests to a provider that's experiencing issues and preventing cascading failures.</p><p>Default circuit breaker options:</p><ul><li><strong>timeout</strong>: 300000ms (5 minutes) - time before action is considered failed</li><li><strong>errorThresholdPercentage</strong>: 50% - percentage of failures before opening the circuit</li><li><strong>resetTimeout</strong>: 9000000ms (15 minutes) - time to wait before trying the provider again</li></ul><p>You can override these options per provider by passing <code>circuitOptions</code>.</p><h3 id="important-notes" tabindex="-1">Important notes <a class="header-anchor" href="#important-notes" aria-label="Permalink to “Important notes”"></a></h3><ul><li><code>apiKey</code> should reference the environment variable (use <code>process.env.X</code>). If the env var is missing the router will skip that provider and log a warning.</li><li><code>model</code> should match the provider’s model name exactly. If a provider uses a different naming scheme, use the exact ID that the provider’s API expects.</li><li><code>apiUrl</code> is used to create the OpenAI-compatible client; if a provider exposes a compatibility endpoint (like OpenRouter), set it accordingly.</li><li>Duplicate <code>name</code> values are allowed but can make logs confusing; prefer unique names like <code>openai</code>, <code>openai-alt</code>.</li></ul><h3 id="example-provider-js-snippet" tabindex="-1">Example <code>provider.js</code> snippet <a class="header-anchor" href="#example-provider-js-snippet" aria-label="Permalink to “Example provider.js snippet”"></a></h3><div class="language-js"><button title="Copy Code" class="copy"></button><span class="lang">js</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">module</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">.</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">exports</span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583;"> =</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;"> [</span></span>
|
|
8
8
|
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;"> {</span></span>
|
|
9
9
|
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;"> name: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"openai"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">,</span></span>
|
|
10
10
|
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;"> apiKey: process.env.</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">OPENAI_API_KEY</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">,</span></span>
|
|
@@ -35,4 +35,4 @@ import{_ as s,c as e,o as a,af as n}from"./chunks/framework.DLCvNBTH.js";const c
|
|
|
35
35
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> apiKey</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: process.env.</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">OPENROUTER_API_KEY_2</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">, </span><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;">// fallback / alternative account</span></span>
|
|
36
36
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> model</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"z-ai/glm-4.5-air:free"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">,</span></span>
|
|
37
37
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> apiUrl</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"https://openrouter.ai/api/v1"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">,</span></span>
|
|
38
|
-
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">}</span></span></code></pre></div><h2 id="model-selection-and-compatibility" tabindex="-1">Model selection and compatibility <a class="header-anchor" href="#model-selection-and-compatibility" aria-label="Permalink to “Model selection and compatibility”"></a></h2><ul><li>Choose a <code>model</code> that the provider actually exposes. The router attempts to list models via the provider client using <code>client.models.list()</code> — if the model is not found it will warn in logs.</li><li>Some providers require different model name formats (e.g. <code>models/gpt-4</code> vs <code>gpt-4</code>). If in doubt, query the provider’s models endpoint or check their docs.</li></ul><h2 id="tool-calling-and-streaming" tabindex="-1">Tool-calling and streaming <a class="header-anchor" href="#tool-calling-and-streaming" aria-label="Permalink to “Tool-calling and streaming”"></a></h2><ul><li>If you plan to use <strong>tools</strong> (the project supports OpenAI-style tool metadata), pass <code>tools</code> into <code>chatCompletion</code> calls and make sure the chosen provider supports tool-calling. Not all providers do.</li><li>Streaming is enabled by passing <code>stream: true</code> to the endpoint or API call. Ensure the provider supports SSE/streaming and model supports streaming.</li></ul><h2 id="local-testing-examples" tabindex="-1">Local testing & examples <a class="header-anchor" href="#local-testing-examples" aria-label="Permalink to “Local testing & examples”"></a></h2><ul><li>Non-streaming test:</li></ul><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">node</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> tests/openai-server-non-stream.js</span></span></code></pre></div><ul><li>Streaming test:</li></ul><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">node</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> tests/openai-server-stream.js</span></span></code></pre></div><ul><li>Library-level test (direct router):</li></ul><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">node</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> tests/chat.js</span></span></code></pre></div><h2 id="deployment-tips" tabindex="-1">Deployment tips <a class="header-anchor" href="#deployment-tips" aria-label="Permalink to “Deployment tips”"></a></h2><ul><li><strong>Render</strong>: Add the same env variables to service settings. Use <code>npm start</code> as the start command (project <code>package.json</code> already sets this).</li><li>If you change <code>.env</code> or <code>provider.js</code>, restart the Node process.</li></ul><h2 id="troubleshooting" tabindex="-1">Troubleshooting <a class="header-anchor" href="#troubleshooting" aria-label="Permalink to “Troubleshooting”"></a></h2><ul><li><code>Skipping provider ... due to missing API key</code> — check <code>.env</code> and deployment env configuration.</li><li><code>Model <name> not found</code> — ensure the <code>model</code> matches what the provider exposes or remove that provider from <code>provider.js</code> until you pick the right model.</li><li><code>All providers failed</code> — examine provider-specific error logs (the router logs each provider failure) and verify network access / API quotas.</li></ul>`,
|
|
38
|
+
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">}</span></span></code></pre></div><h2 id="model-selection-and-compatibility" tabindex="-1">Model selection and compatibility <a class="header-anchor" href="#model-selection-and-compatibility" aria-label="Permalink to “Model selection and compatibility”"></a></h2><ul><li>Choose a <code>model</code> that the provider actually exposes. The router attempts to list models via the provider client using <code>client.models.list()</code> — if the model is not found it will warn in logs.</li><li>Some providers require different model name formats (e.g. <code>models/gpt-4</code> vs <code>gpt-4</code>). If in doubt, query the provider’s models endpoint or check their docs.</li></ul><h2 id="tool-calling-and-streaming" tabindex="-1">Tool-calling and streaming <a class="header-anchor" href="#tool-calling-and-streaming" aria-label="Permalink to “Tool-calling and streaming”"></a></h2><ul><li>If you plan to use <strong>tools</strong> (the project supports OpenAI-style tool metadata), pass <code>tools</code> into <code>chatCompletion</code> calls and make sure the chosen provider supports tool-calling. Not all providers do.</li><li>Streaming is enabled by passing <code>stream: true</code> to the endpoint or API call. Ensure the provider supports SSE/streaming and model supports streaming.</li></ul><h2 id="local-testing-examples" tabindex="-1">Local testing & examples <a class="header-anchor" href="#local-testing-examples" aria-label="Permalink to “Local testing & examples”"></a></h2><ul><li>Non-streaming test:</li></ul><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">node</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> tests/openai-server-non-stream.js</span></span></code></pre></div><ul><li>Streaming test:</li></ul><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">node</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> tests/openai-server-stream.js</span></span></code></pre></div><ul><li>Library-level test (direct router):</li></ul><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">node</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> tests/chat.js</span></span></code></pre></div><h2 id="deployment-tips" tabindex="-1">Deployment tips <a class="header-anchor" href="#deployment-tips" aria-label="Permalink to “Deployment tips”"></a></h2><ul><li><strong>Render</strong>: Add the same env variables to service settings. Use <code>npm start</code> as the start command (project <code>package.json</code> already sets this).</li><li>If you change <code>.env</code> or <code>provider.js</code>, restart the Node process.</li></ul><h2 id="troubleshooting" tabindex="-1">Troubleshooting <a class="header-anchor" href="#troubleshooting" aria-label="Permalink to “Troubleshooting”"></a></h2><ul><li><code>Skipping provider ... due to missing API key</code> — check <code>.env</code> and deployment env configuration.</li><li><code>Model <name> not found</code> — ensure the <code>model</code> matches what the provider exposes or remove that provider from <code>provider.js</code> until you pick the right model.</li><li><code>All providers failed</code> — examine provider-specific error logs (the router logs each provider failure) and verify network access / API quotas.</li></ul>`,42)])])}const E=s(n,[["render",l]]);export{c as __pageData,E as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{_ as s,c as e,o as a,af as t}from"./chunks/framework.DLCvNBTH.js";const c=JSON.parse('{"title":"Configuration","description":"","frontmatter":{},"headers":[],"relativePath":"configuration.md","filePath":"configuration.md"}'),n={name:"configuration.md"};function l(o,i,r,p,h,d){return a(),e("div",null,[...i[0]||(i[0]=[t("",42)])])}const E=s(n,[["render",l]]);export{c as __pageData,E as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{_ as t,c as e,o as a}from"./chunks/framework.DLCvNBTH.js";const m=JSON.parse('{"title":"","description":"","frontmatter":{"layout":"home","hero":{"name":"Unified AI Router","text":"A lightweight OpenAI-compatible server in Nodejs","tagline":"OpenAI-compatible endpoints, automatic fallback, streaming support, and tool-calling — all in one lightweight package.","actions":[{"theme":"brand","text":"Quickstart","link":"/quickstart"},{"theme":"alt","text":"Configuration","link":"/configuration"}]},"features":[{"title":"Multi-provider fallback","details":"If one provider fails, requests automatically fall back to the next available provider."},{"title":"Circuit breaker protection","details":"Built-in fault tolerance with automatic circuit breaking for each provider to prevent cascading failures."},{"title":"OpenAI-compatible API","details":"Run a drop-in replacement for the OpenAI chat completion endpoints (streaming & non-streaming)."},{"title":"Tool calling & streaming","details":"Supports tool-calling metadata and SSE streaming to integrate with existing tooling."},{"title":"Easy to self-host","details":"Run locally or deploy to cloud host providers."}]},"headers":[],"relativePath":"index.md","filePath":"index.md"}'),i={name:"index.md"};function o(n,l,r,s,c,d){return a(),e("div")}const u=t(i,[["render",o]]);export{m as __pageData,u as default};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
import{_ as t,c as e,o as a}from"./chunks/framework.DLCvNBTH.js";const m=JSON.parse('{"title":"","description":"","frontmatter":{"layout":"home","hero":{"name":"Unified AI Router","text":"A lightweight OpenAI-compatible server in Nodejs","tagline":"OpenAI-compatible endpoints, automatic fallback, streaming support, and tool-calling — all in one lightweight package.","actions":[{"theme":"brand","text":"Quickstart","link":"/quickstart"},{"theme":"alt","text":"Configuration","link":"/configuration"}]},"features":[{"title":"Multi-provider fallback","details":"If one provider fails, requests automatically fall back to the next available provider."},{"title":"Circuit breaker protection","details":"Built-in fault tolerance with automatic circuit breaking for each provider to prevent cascading failures."},{"title":"OpenAI-compatible API","details":"Run a drop-in replacement for the OpenAI chat completion endpoints (streaming & non-streaming)."},{"title":"Tool calling & streaming","details":"Supports tool-calling metadata and SSE streaming to integrate with existing tooling."},{"title":"Easy to self-host","details":"Run locally or deploy to cloud host providers."}]},"headers":[],"relativePath":"index.md","filePath":"index.md"}'),i={name:"index.md"};function o(n,l,r,s,c,d){return a(),e("div")}const u=t(i,[["render",o]]);export{m as __pageData,u as default};
|
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
<link rel="preload" href="/Unified-AI-Router/assets/inter-roman-latin.Di8DUHzh.woff2" as="font" type="font/woff2" crossorigin="">
|
|
14
14
|
<link rel="modulepreload" href="/Unified-AI-Router/assets/chunks/theme.h95WUA9L.js">
|
|
15
15
|
<link rel="modulepreload" href="/Unified-AI-Router/assets/chunks/framework.DLCvNBTH.js">
|
|
16
|
-
<link rel="modulepreload" href="/Unified-AI-Router/assets/configuration.md.
|
|
16
|
+
<link rel="modulepreload" href="/Unified-AI-Router/assets/configuration.md.CeBGysiY.lean.js">
|
|
17
17
|
<link rel="icon" href="favicon.png">
|
|
18
18
|
<link rel="icon" type="image/png" href="favicon.png">
|
|
19
19
|
<link rel="apple-touch-icon" href="favicon.png">
|
|
@@ -26,8 +26,8 @@
|
|
|
26
26
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> name</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"openai"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">, </span><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;">// simple identifier for logs/debug</span></span>
|
|
27
27
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> apiKey</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: process.env.</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">OPENAI_API_KEY</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">,</span></span>
|
|
28
28
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> model</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"gpt-4"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">, </span><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;">// model id to request from this provider</span></span>
|
|
29
|
-
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> apiUrl</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"https://api.openai.com/v1"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">, </span><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;">// base URL for provider-compatible OpenAI endpoints
|
|
30
|
-
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">}</span></span></code></pre></div><h3 id="important-notes" tabindex="-1">Important notes <a class="header-anchor" href="#important-notes" aria-label="Permalink to “Important notes”"></a></h3><ul><li><code>apiKey</code> should reference the environment variable (use <code>process.env.X</code>). If the env var is missing the router will skip that provider and log a warning.</li><li><code>model</code> should match the provider’s model name exactly. If a provider uses a different naming scheme, use the exact ID that the provider’s API expects.</li><li><code>apiUrl</code> is used to create the OpenAI-compatible client; if a provider exposes a compatibility endpoint (like OpenRouter), set it accordingly.</li><li>Duplicate <code>name</code> values are allowed but can make logs confusing; prefer unique names like <code>openai</code>, <code>openai-alt</code>.</li></ul><h3 id="example-provider-js-snippet" tabindex="-1">Example <code>provider.js</code> snippet <a class="header-anchor" href="#example-provider-js-snippet" aria-label="Permalink to “Example provider.js snippet”"></a></h3><div class="language-js"><button title="Copy Code" class="copy"></button><span class="lang">js</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">module</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">.</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">exports</span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583;"> =</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;"> [</span></span>
|
|
29
|
+
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> apiUrl</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"https://api.openai.com/v1"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">, </span><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;">// base URL for provider-compatible OpenAI endpoints,</span></span>
|
|
30
|
+
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">}</span></span></code></pre></div><h3 id="circuit-breaker-configuration" tabindex="-1">Circuit Breaker Configuration <a class="header-anchor" href="#circuit-breaker-configuration" aria-label="Permalink to “Circuit Breaker Configuration”"></a></h3><p>The router includes built-in circuit breaker protection for each provider using the "opossum" library. This provides fault tolerance by automatically stopping requests to a provider that's experiencing issues and preventing cascading failures.</p><p>Default circuit breaker options:</p><ul><li><strong>timeout</strong>: 300000ms (5 minutes) - time before action is considered failed</li><li><strong>errorThresholdPercentage</strong>: 50% - percentage of failures before opening the circuit</li><li><strong>resetTimeout</strong>: 9000000ms (15 minutes) - time to wait before trying the provider again</li></ul><p>You can override these options per provider by passing <code>circuitOptions</code>.</p><h3 id="important-notes" tabindex="-1">Important notes <a class="header-anchor" href="#important-notes" aria-label="Permalink to “Important notes”"></a></h3><ul><li><code>apiKey</code> should reference the environment variable (use <code>process.env.X</code>). If the env var is missing the router will skip that provider and log a warning.</li><li><code>model</code> should match the provider’s model name exactly. If a provider uses a different naming scheme, use the exact ID that the provider’s API expects.</li><li><code>apiUrl</code> is used to create the OpenAI-compatible client; if a provider exposes a compatibility endpoint (like OpenRouter), set it accordingly.</li><li>Duplicate <code>name</code> values are allowed but can make logs confusing; prefer unique names like <code>openai</code>, <code>openai-alt</code>.</li></ul><h3 id="example-provider-js-snippet" tabindex="-1">Example <code>provider.js</code> snippet <a class="header-anchor" href="#example-provider-js-snippet" aria-label="Permalink to “Example provider.js snippet”"></a></h3><div class="language-js"><button title="Copy Code" class="copy"></button><span class="lang">js</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">module</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">.</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">exports</span><span style="--shiki-light:#D73A49;--shiki-dark:#F97583;"> =</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;"> [</span></span>
|
|
31
31
|
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;"> {</span></span>
|
|
32
32
|
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;"> name: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"openai"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">,</span></span>
|
|
33
33
|
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;"> apiKey: process.env.</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;">OPENAI_API_KEY</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">,</span></span>
|
|
@@ -59,7 +59,7 @@
|
|
|
59
59
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> model</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"z-ai/glm-4.5-air:free"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">,</span></span>
|
|
60
60
|
<span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;"> apiUrl</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">: </span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;">"https://openrouter.ai/api/v1"</span><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">,</span></span>
|
|
61
61
|
<span class="line"><span style="--shiki-light:#24292E;--shiki-dark:#E1E4E8;">}</span></span></code></pre></div><h2 id="model-selection-and-compatibility" tabindex="-1">Model selection and compatibility <a class="header-anchor" href="#model-selection-and-compatibility" aria-label="Permalink to “Model selection and compatibility”"></a></h2><ul><li>Choose a <code>model</code> that the provider actually exposes. The router attempts to list models via the provider client using <code>client.models.list()</code> — if the model is not found it will warn in logs.</li><li>Some providers require different model name formats (e.g. <code>models/gpt-4</code> vs <code>gpt-4</code>). If in doubt, query the provider’s models endpoint or check their docs.</li></ul><h2 id="tool-calling-and-streaming" tabindex="-1">Tool-calling and streaming <a class="header-anchor" href="#tool-calling-and-streaming" aria-label="Permalink to “Tool-calling and streaming”"></a></h2><ul><li>If you plan to use <strong>tools</strong> (the project supports OpenAI-style tool metadata), pass <code>tools</code> into <code>chatCompletion</code> calls and make sure the chosen provider supports tool-calling. Not all providers do.</li><li>Streaming is enabled by passing <code>stream: true</code> to the endpoint or API call. Ensure the provider supports SSE/streaming and model supports streaming.</li></ul><h2 id="local-testing-examples" tabindex="-1">Local testing & examples <a class="header-anchor" href="#local-testing-examples" aria-label="Permalink to “Local testing & examples”"></a></h2><ul><li>Non-streaming test:</li></ul><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">node</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> tests/openai-server-non-stream.js</span></span></code></pre></div><ul><li>Streaming test:</li></ul><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">node</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> tests/openai-server-stream.js</span></span></code></pre></div><ul><li>Library-level test (direct router):</li></ul><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">node</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> tests/chat.js</span></span></code></pre></div><h2 id="deployment-tips" tabindex="-1">Deployment tips <a class="header-anchor" href="#deployment-tips" aria-label="Permalink to “Deployment tips”"></a></h2><ul><li><strong>Render</strong>: Add the same env variables to service settings. Use <code>npm start</code> as the start command (project <code>package.json</code> already sets this).</li><li>If you change <code>.env</code> or <code>provider.js</code>, restart the Node process.</li></ul><h2 id="troubleshooting" tabindex="-1">Troubleshooting <a class="header-anchor" href="#troubleshooting" aria-label="Permalink to “Troubleshooting”"></a></h2><ul><li><code>Skipping provider ... due to missing API key</code> — check <code>.env</code> and deployment env configuration.</li><li><code>Model <name> not found</code> — ensure the <code>model</code> matches what the provider exposes or remove that provider from <code>provider.js</code> until you pick the right model.</li><li><code>All providers failed</code> — examine provider-specific error logs (the router logs each provider failure) and verify network access / API quotas.</li></ul></div></div></main><footer class="VPDocFooter" data-v-7011f0d8 data-v-e257564d><!--[--><!--]--><!----><nav class="prev-next" aria-labelledby="doc-footer-aria-label" data-v-e257564d><span class="visually-hidden" id="doc-footer-aria-label" data-v-e257564d>Pager</span><div class="pager" data-v-e257564d><a class="VPLink link pager-link prev" href="/Unified-AI-Router/quickstart.html" data-v-e257564d><!--[--><span class="desc" data-v-e257564d>Previous page</span><span class="title" data-v-e257564d>Quickstart</span><!--]--></a></div><div class="pager" data-v-e257564d><!----></div></nav></footer><!--[--><!--]--></div></div></div><!--[--><!--]--></div></div><!----><!--[--><!--]--></div></div>
|
|
62
|
-
<script>window.__VP_HASH_MAP__=JSON.parse("{\"configuration.md\":\"
|
|
62
|
+
<script>window.__VP_HASH_MAP__=JSON.parse("{\"configuration.md\":\"CeBGysiY\",\"index.md\":\"D-8hZ8ti\",\"quickstart.md\":\"mGNzdQVa\"}");window.__VP_SITE_DATA__=JSON.parse("{\"lang\":\"en-US\",\"dir\":\"ltr\",\"title\":\"Unified AI Router\",\"description\":\"OpenAI-compatible router with multi-provider fallback.\",\"base\":\"/Unified-AI-Router/\",\"head\":[],\"router\":{\"prefetchLinks\":true},\"appearance\":true,\"themeConfig\":{\"nav\":[{\"text\":\"Home\",\"link\":\"/\"},{\"text\":\"Quickstart\",\"link\":\"/quickstart\"},{\"text\":\"Configuration\",\"link\":\"/configuration\"}],\"sidebar\":[{\"text\":\"Guide\",\"items\":[{\"text\":\"Quickstart\",\"link\":\"/quickstart\"},{\"text\":\"Configuration\",\"link\":\"/configuration\"}]}],\"socialLinks\":[{\"icon\":\"github\",\"link\":\"https://github.com/mlibre/Unified-AI-Router\"},{\"icon\":\"npm\",\"link\":\"https://www.npmjs.com/package/unified-ai-router\"}]},\"locales\":{},\"scrollOffset\":134,\"cleanUrls\":false,\"additionalConfig\":{}}");</script>
|
|
63
63
|
|
|
64
64
|
</body>
|
|
65
65
|
</html>
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"configuration.md":"
|
|
1
|
+
{"configuration.md":"CeBGysiY","index.md":"D-8hZ8ti","quickstart.md":"mGNzdQVa"}
|
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
<link rel="preload" href="/Unified-AI-Router/assets/inter-roman-latin.Di8DUHzh.woff2" as="font" type="font/woff2" crossorigin="">
|
|
14
14
|
<link rel="modulepreload" href="/Unified-AI-Router/assets/chunks/theme.h95WUA9L.js">
|
|
15
15
|
<link rel="modulepreload" href="/Unified-AI-Router/assets/chunks/framework.DLCvNBTH.js">
|
|
16
|
-
<link rel="modulepreload" href="/Unified-AI-Router/assets/index.md.
|
|
16
|
+
<link rel="modulepreload" href="/Unified-AI-Router/assets/index.md.D-8hZ8ti.lean.js">
|
|
17
17
|
<link rel="icon" href="favicon.png">
|
|
18
18
|
<link rel="icon" type="image/png" href="favicon.png">
|
|
19
19
|
<link rel="apple-touch-icon" href="favicon.png">
|
|
@@ -21,8 +21,8 @@
|
|
|
21
21
|
<script id="check-mac-os">document.documentElement.classList.toggle("mac",/Mac|iPhone|iPod|iPad/i.test(navigator.platform));</script>
|
|
22
22
|
</head>
|
|
23
23
|
<body>
|
|
24
|
-
<div id="app"><div class="Layout" data-v-1df9f90f><!--[--><!--]--><!--[--><span tabindex="-1" data-v-0b0ada53></span><a href="#VPContent" class="VPSkipLink visually-hidden" data-v-0b0ada53>Skip to content</a><!--]--><!----><header class="VPNav" data-v-1df9f90f data-v-9f75dce3><div class="VPNavBar" data-v-9f75dce3 data-v-2a96a3d0><div class="wrapper" data-v-2a96a3d0><div class="container" data-v-2a96a3d0><div class="title" data-v-2a96a3d0><div class="VPNavBarTitle" data-v-2a96a3d0 data-v-1e38c6bc><a class="title" href="/Unified-AI-Router/" data-v-1e38c6bc><!--[--><!--]--><!----><span data-v-1e38c6bc>Unified AI Router</span><!--[--><!--]--></a></div></div><div class="content" data-v-2a96a3d0><div class="content-body" data-v-2a96a3d0><!--[--><!--]--><div class="VPNavBarSearch search" data-v-2a96a3d0><!----></div><nav aria-labelledby="main-nav-aria-label" class="VPNavBarMenu menu" data-v-2a96a3d0 data-v-39714824><span id="main-nav-aria-label" class="visually-hidden" data-v-39714824> Main Navigation </span><!--[--><!--[--><a class="VPLink link VPNavBarMenuLink active" href="/Unified-AI-Router/" tabindex="0" data-v-39714824 data-v-e56f3d57><!--[--><span data-v-e56f3d57>Home</span><!--]--></a><!--]--><!--[--><a class="VPLink link VPNavBarMenuLink" href="/Unified-AI-Router/quickstart.html" tabindex="0" data-v-39714824 data-v-e56f3d57><!--[--><span data-v-e56f3d57>Quickstart</span><!--]--></a><!--]--><!--[--><a class="VPLink link VPNavBarMenuLink" href="/Unified-AI-Router/configuration.html" tabindex="0" data-v-39714824 data-v-e56f3d57><!--[--><span data-v-e56f3d57>Configuration</span><!--]--></a><!--]--><!--]--></nav><!----><div class="VPNavBarAppearance appearance" data-v-2a96a3d0 data-v-6c893767><button class="VPSwitch VPSwitchAppearance" type="button" role="switch" title aria-checked="false" data-v-6c893767 data-v-5337faa4 data-v-1d5665e3><span class="check" data-v-1d5665e3><span class="icon" data-v-1d5665e3><!--[--><span class="vpi-sun sun" data-v-5337faa4></span><span class="vpi-moon moon" data-v-5337faa4></span><!--]--></span></span></button></div><div class="VPSocialLinks VPNavBarSocialLinks social-links" data-v-2a96a3d0 data-v-0394ad82 data-v-d07f11e6><!--[--><a class="VPSocialLink no-icon" href="https://github.com/mlibre/Unified-AI-Router" aria-label="github" target="_blank" rel="me noopener" data-v-d07f11e6 data-v-591a6b30><span class="vpi-social-github"></span></a><a class="VPSocialLink no-icon" href="https://www.npmjs.com/package/unified-ai-router" aria-label="npm" target="_blank" rel="me noopener" data-v-d07f11e6 data-v-591a6b30><span class="vpi-social-npm"></span></a><!--]--></div><div class="VPFlyout VPNavBarExtra extra" data-v-2a96a3d0 data-v-bb2aa2f0 data-v-42cb505d><button type="button" class="button" aria-haspopup="true" aria-expanded="false" aria-label="extra navigation" data-v-42cb505d><span class="vpi-more-horizontal icon" data-v-42cb505d></span></button><div class="menu" data-v-42cb505d><div class="VPMenu" data-v-42cb505d data-v-25a6cce8><!----><!--[--><!--[--><!----><div class="group" data-v-bb2aa2f0><div class="item appearance" data-v-bb2aa2f0><p class="label" data-v-bb2aa2f0>Appearance</p><div class="appearance-action" data-v-bb2aa2f0><button class="VPSwitch VPSwitchAppearance" type="button" role="switch" title aria-checked="false" data-v-bb2aa2f0 data-v-5337faa4 data-v-1d5665e3><span class="check" data-v-1d5665e3><span class="icon" data-v-1d5665e3><!--[--><span class="vpi-sun sun" data-v-5337faa4></span><span class="vpi-moon moon" data-v-5337faa4></span><!--]--></span></span></button></div></div></div><div class="group" data-v-bb2aa2f0><div class="item social-links" data-v-bb2aa2f0><div class="VPSocialLinks social-links-list" data-v-bb2aa2f0 data-v-d07f11e6><!--[--><a class="VPSocialLink no-icon" href="https://github.com/mlibre/Unified-AI-Router" aria-label="github" target="_blank" rel="me noopener" data-v-d07f11e6 data-v-591a6b30><span class="vpi-social-github"></span></a><a class="VPSocialLink no-icon" href="https://www.npmjs.com/package/unified-ai-router" aria-label="npm" target="_blank" rel="me noopener" data-v-d07f11e6 data-v-591a6b30><span class="vpi-social-npm"></span></a><!--]--></div></div></div><!--]--><!--]--></div></div></div><!--[--><!--]--><button type="button" class="VPNavBarHamburger hamburger" aria-label="mobile navigation" aria-expanded="false" aria-controls="VPNavScreen" data-v-2a96a3d0 data-v-e5dd9c1c><span class="container" data-v-e5dd9c1c><span class="top" data-v-e5dd9c1c></span><span class="middle" data-v-e5dd9c1c></span><span class="bottom" data-v-e5dd9c1c></span></span></button></div></div></div></div><div class="divider" data-v-2a96a3d0><div class="divider-line" data-v-2a96a3d0></div></div></div><!----></header><!----><!----><div class="VPContent is-home" id="VPContent" data-v-1df9f90f data-v-aff0b8d7><div class="VPHome" data-v-aff0b8d7 data-v-8b561e3d><!--[--><!--]--><div class="VPHero VPHomeHero" data-v-8b561e3d data-v-1e96e9e8><div class="container" data-v-1e96e9e8><div class="main" data-v-1e96e9e8><!--[--><!--]--><!--[--><h1 class="heading" data-v-1e96e9e8><span class="name clip" data-v-1e96e9e8>Unified AI Router</span><span class="text" data-v-1e96e9e8>A lightweight OpenAI-compatible server in Nodejs</span></h1><p class="tagline" data-v-1e96e9e8>OpenAI-compatible endpoints, automatic fallback, streaming support, and tool-calling — all in one lightweight package.</p><!--]--><!--[--><!--]--><div class="actions" data-v-1e96e9e8><!--[--><div class="action" data-v-1e96e9e8><a class="VPButton medium brand" href="/Unified-AI-Router/quickstart.html" data-v-1e96e9e8 data-v-01bff58b><!--[-->Quickstart<!--]--></a></div><div class="action" data-v-1e96e9e8><a class="VPButton medium alt" href="/Unified-AI-Router/configuration.html" data-v-1e96e9e8 data-v-01bff58b><!--[-->Configuration<!--]--></a></div><!--]--></div><!--[--><!--]--></div><!----></div></div><!--[--><!--]--><!--[--><!--]--><div class="VPFeatures VPHomeFeatures" data-v-8b561e3d data-v-a6181336><div class="container" data-v-a6181336><div class="items" data-v-a6181336><!--[--><div class="grid-4 item" data-v-a6181336><div class="VPLink no-icon VPFeature" data-v-a6181336 data-v-5219619b><!--[--><article class="box" data-v-5219619b><!----><h2 class="title" data-v-5219619b>Multi-provider fallback</h2><p class="details" data-v-5219619b>If one provider fails, requests automatically fall back to the next available provider.</p><!----></article><!--]--></div></div><div class="grid-4 item" data-v-a6181336><div class="VPLink no-icon VPFeature" data-v-a6181336 data-v-5219619b><!--[--><article class="box" data-v-5219619b><!----><h2 class="title" data-v-5219619b>OpenAI-compatible API</h2><p class="details" data-v-5219619b>Run a drop-in replacement for the OpenAI chat completion endpoints (streaming & non-streaming).</p><!----></article><!--]--></div></div><div class="grid-4 item" data-v-a6181336><div class="VPLink no-icon VPFeature" data-v-a6181336 data-v-5219619b><!--[--><article class="box" data-v-5219619b><!----><h2 class="title" data-v-5219619b>Tool calling & streaming</h2><p class="details" data-v-5219619b>Supports tool-calling metadata and SSE streaming to integrate with existing tooling.</p><!----></article><!--]--></div></div><div class="grid-4 item" data-v-a6181336><div class="VPLink no-icon VPFeature" data-v-a6181336 data-v-5219619b><!--[--><article class="box" data-v-5219619b><!----><h2 class="title" data-v-5219619b>Easy to self-host</h2><p class="details" data-v-5219619b>Run locally or deploy to cloud host providers.</p><!----></article><!--]--></div></div><!--]--></div></div></div><!--[--><!--]--><div class="vp-doc container" style="" data-v-8b561e3d data-v-8e2d4988><!--[--><div style="position:relative;" data-v-8b561e3d><div></div></div><!--]--></div></div></div><!----><!--[--><!--]--></div></div>
|
|
25
|
-
<script>window.__VP_HASH_MAP__=JSON.parse("{\"configuration.md\":\"
|
|
24
|
+
<div id="app"><div class="Layout" data-v-1df9f90f><!--[--><!--]--><!--[--><span tabindex="-1" data-v-0b0ada53></span><a href="#VPContent" class="VPSkipLink visually-hidden" data-v-0b0ada53>Skip to content</a><!--]--><!----><header class="VPNav" data-v-1df9f90f data-v-9f75dce3><div class="VPNavBar" data-v-9f75dce3 data-v-2a96a3d0><div class="wrapper" data-v-2a96a3d0><div class="container" data-v-2a96a3d0><div class="title" data-v-2a96a3d0><div class="VPNavBarTitle" data-v-2a96a3d0 data-v-1e38c6bc><a class="title" href="/Unified-AI-Router/" data-v-1e38c6bc><!--[--><!--]--><!----><span data-v-1e38c6bc>Unified AI Router</span><!--[--><!--]--></a></div></div><div class="content" data-v-2a96a3d0><div class="content-body" data-v-2a96a3d0><!--[--><!--]--><div class="VPNavBarSearch search" data-v-2a96a3d0><!----></div><nav aria-labelledby="main-nav-aria-label" class="VPNavBarMenu menu" data-v-2a96a3d0 data-v-39714824><span id="main-nav-aria-label" class="visually-hidden" data-v-39714824> Main Navigation </span><!--[--><!--[--><a class="VPLink link VPNavBarMenuLink active" href="/Unified-AI-Router/" tabindex="0" data-v-39714824 data-v-e56f3d57><!--[--><span data-v-e56f3d57>Home</span><!--]--></a><!--]--><!--[--><a class="VPLink link VPNavBarMenuLink" href="/Unified-AI-Router/quickstart.html" tabindex="0" data-v-39714824 data-v-e56f3d57><!--[--><span data-v-e56f3d57>Quickstart</span><!--]--></a><!--]--><!--[--><a class="VPLink link VPNavBarMenuLink" href="/Unified-AI-Router/configuration.html" tabindex="0" data-v-39714824 data-v-e56f3d57><!--[--><span data-v-e56f3d57>Configuration</span><!--]--></a><!--]--><!--]--></nav><!----><div class="VPNavBarAppearance appearance" data-v-2a96a3d0 data-v-6c893767><button class="VPSwitch VPSwitchAppearance" type="button" role="switch" title aria-checked="false" data-v-6c893767 data-v-5337faa4 data-v-1d5665e3><span class="check" data-v-1d5665e3><span class="icon" data-v-1d5665e3><!--[--><span class="vpi-sun sun" data-v-5337faa4></span><span class="vpi-moon moon" data-v-5337faa4></span><!--]--></span></span></button></div><div class="VPSocialLinks VPNavBarSocialLinks social-links" data-v-2a96a3d0 data-v-0394ad82 data-v-d07f11e6><!--[--><a class="VPSocialLink no-icon" href="https://github.com/mlibre/Unified-AI-Router" aria-label="github" target="_blank" rel="me noopener" data-v-d07f11e6 data-v-591a6b30><span class="vpi-social-github"></span></a><a class="VPSocialLink no-icon" href="https://www.npmjs.com/package/unified-ai-router" aria-label="npm" target="_blank" rel="me noopener" data-v-d07f11e6 data-v-591a6b30><span class="vpi-social-npm"></span></a><!--]--></div><div class="VPFlyout VPNavBarExtra extra" data-v-2a96a3d0 data-v-bb2aa2f0 data-v-42cb505d><button type="button" class="button" aria-haspopup="true" aria-expanded="false" aria-label="extra navigation" data-v-42cb505d><span class="vpi-more-horizontal icon" data-v-42cb505d></span></button><div class="menu" data-v-42cb505d><div class="VPMenu" data-v-42cb505d data-v-25a6cce8><!----><!--[--><!--[--><!----><div class="group" data-v-bb2aa2f0><div class="item appearance" data-v-bb2aa2f0><p class="label" data-v-bb2aa2f0>Appearance</p><div class="appearance-action" data-v-bb2aa2f0><button class="VPSwitch VPSwitchAppearance" type="button" role="switch" title aria-checked="false" data-v-bb2aa2f0 data-v-5337faa4 data-v-1d5665e3><span class="check" data-v-1d5665e3><span class="icon" data-v-1d5665e3><!--[--><span class="vpi-sun sun" data-v-5337faa4></span><span class="vpi-moon moon" data-v-5337faa4></span><!--]--></span></span></button></div></div></div><div class="group" data-v-bb2aa2f0><div class="item social-links" data-v-bb2aa2f0><div class="VPSocialLinks social-links-list" data-v-bb2aa2f0 data-v-d07f11e6><!--[--><a class="VPSocialLink no-icon" href="https://github.com/mlibre/Unified-AI-Router" aria-label="github" target="_blank" rel="me noopener" data-v-d07f11e6 data-v-591a6b30><span class="vpi-social-github"></span></a><a class="VPSocialLink no-icon" href="https://www.npmjs.com/package/unified-ai-router" aria-label="npm" target="_blank" rel="me noopener" data-v-d07f11e6 data-v-591a6b30><span class="vpi-social-npm"></span></a><!--]--></div></div></div><!--]--><!--]--></div></div></div><!--[--><!--]--><button type="button" class="VPNavBarHamburger hamburger" aria-label="mobile navigation" aria-expanded="false" aria-controls="VPNavScreen" data-v-2a96a3d0 data-v-e5dd9c1c><span class="container" data-v-e5dd9c1c><span class="top" data-v-e5dd9c1c></span><span class="middle" data-v-e5dd9c1c></span><span class="bottom" data-v-e5dd9c1c></span></span></button></div></div></div></div><div class="divider" data-v-2a96a3d0><div class="divider-line" data-v-2a96a3d0></div></div></div><!----></header><!----><!----><div class="VPContent is-home" id="VPContent" data-v-1df9f90f data-v-aff0b8d7><div class="VPHome" data-v-aff0b8d7 data-v-8b561e3d><!--[--><!--]--><div class="VPHero VPHomeHero" data-v-8b561e3d data-v-1e96e9e8><div class="container" data-v-1e96e9e8><div class="main" data-v-1e96e9e8><!--[--><!--]--><!--[--><h1 class="heading" data-v-1e96e9e8><span class="name clip" data-v-1e96e9e8>Unified AI Router</span><span class="text" data-v-1e96e9e8>A lightweight OpenAI-compatible server in Nodejs</span></h1><p class="tagline" data-v-1e96e9e8>OpenAI-compatible endpoints, automatic fallback, streaming support, and tool-calling — all in one lightweight package.</p><!--]--><!--[--><!--]--><div class="actions" data-v-1e96e9e8><!--[--><div class="action" data-v-1e96e9e8><a class="VPButton medium brand" href="/Unified-AI-Router/quickstart.html" data-v-1e96e9e8 data-v-01bff58b><!--[-->Quickstart<!--]--></a></div><div class="action" data-v-1e96e9e8><a class="VPButton medium alt" href="/Unified-AI-Router/configuration.html" data-v-1e96e9e8 data-v-01bff58b><!--[-->Configuration<!--]--></a></div><!--]--></div><!--[--><!--]--></div><!----></div></div><!--[--><!--]--><!--[--><!--]--><div class="VPFeatures VPHomeFeatures" data-v-8b561e3d data-v-a6181336><div class="container" data-v-a6181336><div class="items" data-v-a6181336><!--[--><div class="grid-4 item" data-v-a6181336><div class="VPLink no-icon VPFeature" data-v-a6181336 data-v-5219619b><!--[--><article class="box" data-v-5219619b><!----><h2 class="title" data-v-5219619b>Multi-provider fallback</h2><p class="details" data-v-5219619b>If one provider fails, requests automatically fall back to the next available provider.</p><!----></article><!--]--></div></div><div class="grid-4 item" data-v-a6181336><div class="VPLink no-icon VPFeature" data-v-a6181336 data-v-5219619b><!--[--><article class="box" data-v-5219619b><!----><h2 class="title" data-v-5219619b>Circuit breaker protection</h2><p class="details" data-v-5219619b>Built-in fault tolerance with automatic circuit breaking for each provider to prevent cascading failures.</p><!----></article><!--]--></div></div><div class="grid-4 item" data-v-a6181336><div class="VPLink no-icon VPFeature" data-v-a6181336 data-v-5219619b><!--[--><article class="box" data-v-5219619b><!----><h2 class="title" data-v-5219619b>OpenAI-compatible API</h2><p class="details" data-v-5219619b>Run a drop-in replacement for the OpenAI chat completion endpoints (streaming & non-streaming).</p><!----></article><!--]--></div></div><div class="grid-4 item" data-v-a6181336><div class="VPLink no-icon VPFeature" data-v-a6181336 data-v-5219619b><!--[--><article class="box" data-v-5219619b><!----><h2 class="title" data-v-5219619b>Tool calling & streaming</h2><p class="details" data-v-5219619b>Supports tool-calling metadata and SSE streaming to integrate with existing tooling.</p><!----></article><!--]--></div></div><div class="grid-4 item" data-v-a6181336><div class="VPLink no-icon VPFeature" data-v-a6181336 data-v-5219619b><!--[--><article class="box" data-v-5219619b><!----><h2 class="title" data-v-5219619b>Easy to self-host</h2><p class="details" data-v-5219619b>Run locally or deploy to cloud host providers.</p><!----></article><!--]--></div></div><!--]--></div></div></div><!--[--><!--]--><div class="vp-doc container" style="" data-v-8b561e3d data-v-8e2d4988><!--[--><div style="position:relative;" data-v-8b561e3d><div></div></div><!--]--></div></div></div><!----><!--[--><!--]--></div></div>
|
|
25
|
+
<script>window.__VP_HASH_MAP__=JSON.parse("{\"configuration.md\":\"CeBGysiY\",\"index.md\":\"D-8hZ8ti\",\"quickstart.md\":\"mGNzdQVa\"}");window.__VP_SITE_DATA__=JSON.parse("{\"lang\":\"en-US\",\"dir\":\"ltr\",\"title\":\"Unified AI Router\",\"description\":\"OpenAI-compatible router with multi-provider fallback.\",\"base\":\"/Unified-AI-Router/\",\"head\":[],\"router\":{\"prefetchLinks\":true},\"appearance\":true,\"themeConfig\":{\"nav\":[{\"text\":\"Home\",\"link\":\"/\"},{\"text\":\"Quickstart\",\"link\":\"/quickstart\"},{\"text\":\"Configuration\",\"link\":\"/configuration\"}],\"sidebar\":[{\"text\":\"Guide\",\"items\":[{\"text\":\"Quickstart\",\"link\":\"/quickstart\"},{\"text\":\"Configuration\",\"link\":\"/configuration\"}]}],\"socialLinks\":[{\"icon\":\"github\",\"link\":\"https://github.com/mlibre/Unified-AI-Router\"},{\"icon\":\"npm\",\"link\":\"https://www.npmjs.com/package/unified-ai-router\"}]},\"locales\":{},\"scrollOffset\":134,\"cleanUrls\":false,\"additionalConfig\":{}}");</script>
|
|
26
26
|
|
|
27
27
|
</body>
|
|
28
28
|
</html>
|
|
@@ -27,7 +27,7 @@
|
|
|
27
27
|
<span class="line"><span style="--shiki-light:#6A737D;--shiki-dark:#6A737D;"># edit .env and add keys (OPENAI_API_KEY, GEMINI_API_KEY, etc.)</span></span></code></pre></div><p>Edit <code>provider.js</code> to enable or reorder providers (the router tries them in array order).</p><h2 id="_3-start-the-server" tabindex="-1">3) Start the server <a class="header-anchor" href="#_3-start-the-server" aria-label="Permalink to “3) Start the server”"></a></h2><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">npm</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> start</span></span></code></pre></div><p>By default the server listens on <code>http://localhost:3000</code> and supports these OpenAI-compatible endpoints:</p><ul><li><code>POST /v1/chat/completions</code> — streaming & non-streaming chat completions</li><li><code>POST /chat/completions</code> — same as above (alternate path)</li><li><code>GET /v1/models</code> & <code>GET /models</code> — lists models available from providers</li><li><code>GET /health</code> — health check</li></ul><h2 id="_4-quick-test-non-streaming" tabindex="-1">4) Quick test (non-streaming) <a class="header-anchor" href="#_4-quick-test-non-streaming" aria-label="Permalink to “4) Quick test (non-streaming)”"></a></h2><p>Use <code>curl</code> or your HTTP client to test a simple chat completion:</p><div class="language-bash"><button title="Copy Code" class="copy"></button><span class="lang">bash</span><pre class="shiki shiki-themes github-light github-dark" style="--shiki-light:#24292e;--shiki-dark:#e1e4e8;--shiki-light-bg:#fff;--shiki-dark-bg:#24292e;" tabindex="0" dir="ltr"><code><span class="line"><span style="--shiki-light:#6F42C1;--shiki-dark:#B392F0;">curl</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;"> -s</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;"> -X</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> POST</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> http://localhost:3000/v1/chat/completions</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;"> \</span></span>
|
|
28
28
|
<span class="line"><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;"> -H</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> "Content-Type: application/json"</span><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;"> \</span></span>
|
|
29
29
|
<span class="line"><span style="--shiki-light:#005CC5;--shiki-dark:#79B8FF;"> -d</span><span style="--shiki-light:#032F62;--shiki-dark:#9ECBFF;"> '{ "messages": [{"role":"system","content":"You are a helpful assistant."},{"role":"user","content":"Say hello." }], "model":"gpt-3.5-turbo" }'</span></span></code></pre></div><h2 id="_5-next-steps" tabindex="-1">5) Next steps <a class="header-anchor" href="#_5-next-steps" aria-label="Permalink to “5) Next steps”"></a></h2><ul><li>Configure additional providers in <code>provider.js</code> and set env keys.</li><li>Try streaming by setting <code>stream: true</code> when calling the endpoint.</li><li>See <code>tests/</code> for example scripts that exercise streaming, non-streaming, and tools.</li></ul></div></div></main><footer class="VPDocFooter" data-v-7011f0d8 data-v-e257564d><!--[--><!--]--><!----><nav class="prev-next" aria-labelledby="doc-footer-aria-label" data-v-e257564d><span class="visually-hidden" id="doc-footer-aria-label" data-v-e257564d>Pager</span><div class="pager" data-v-e257564d><!----></div><div class="pager" data-v-e257564d><a class="VPLink link pager-link next" href="/Unified-AI-Router/configuration.html" data-v-e257564d><!--[--><span class="desc" data-v-e257564d>Next page</span><span class="title" data-v-e257564d>Configuration</span><!--]--></a></div></nav></footer><!--[--><!--]--></div></div></div><!--[--><!--]--></div></div><!----><!--[--><!--]--></div></div>
|
|
30
|
-
<script>window.__VP_HASH_MAP__=JSON.parse("{\"configuration.md\":\"
|
|
30
|
+
<script>window.__VP_HASH_MAP__=JSON.parse("{\"configuration.md\":\"CeBGysiY\",\"index.md\":\"D-8hZ8ti\",\"quickstart.md\":\"mGNzdQVa\"}");window.__VP_SITE_DATA__=JSON.parse("{\"lang\":\"en-US\",\"dir\":\"ltr\",\"title\":\"Unified AI Router\",\"description\":\"OpenAI-compatible router with multi-provider fallback.\",\"base\":\"/Unified-AI-Router/\",\"head\":[],\"router\":{\"prefetchLinks\":true},\"appearance\":true,\"themeConfig\":{\"nav\":[{\"text\":\"Home\",\"link\":\"/\"},{\"text\":\"Quickstart\",\"link\":\"/quickstart\"},{\"text\":\"Configuration\",\"link\":\"/configuration\"}],\"sidebar\":[{\"text\":\"Guide\",\"items\":[{\"text\":\"Quickstart\",\"link\":\"/quickstart\"},{\"text\":\"Configuration\",\"link\":\"/configuration\"}]}],\"socialLinks\":[{\"icon\":\"github\",\"link\":\"https://github.com/mlibre/Unified-AI-Router\"},{\"icon\":\"npm\",\"link\":\"https://www.npmjs.com/package/unified-ai-router\"}]},\"locales\":{},\"scrollOffset\":134,\"cleanUrls\":false,\"additionalConfig\":{}}");</script>
|
|
31
31
|
|
|
32
32
|
</body>
|
|
33
33
|
</html>
|
package/docs/configuration.md
CHANGED
|
@@ -45,10 +45,22 @@ Each provider object supports (at minimum) these fields:
|
|
|
45
45
|
name: "openai", // simple identifier for logs/debug
|
|
46
46
|
apiKey: process.env.OPENAI_API_KEY,
|
|
47
47
|
model: "gpt-4", // model id to request from this provider
|
|
48
|
-
apiUrl: "https://api.openai.com/v1", // base URL for provider-compatible OpenAI endpoints
|
|
48
|
+
apiUrl: "https://api.openai.com/v1", // base URL for provider-compatible OpenAI endpoints,
|
|
49
49
|
}
|
|
50
50
|
```
|
|
51
51
|
|
|
52
|
+
### Circuit Breaker Configuration
|
|
53
|
+
|
|
54
|
+
The router includes built-in circuit breaker protection for each provider using the "opossum" library. This provides fault tolerance by automatically stopping requests to a provider that's experiencing issues and preventing cascading failures.
|
|
55
|
+
|
|
56
|
+
Default circuit breaker options:
|
|
57
|
+
|
|
58
|
+
* **timeout**: 300000ms (5 minutes) - time before action is considered failed
|
|
59
|
+
* **errorThresholdPercentage**: 50% - percentage of failures before opening the circuit
|
|
60
|
+
* **resetTimeout**: 9000000ms (15 minutes) - time to wait before trying the provider again
|
|
61
|
+
|
|
62
|
+
You can override these options per provider by passing `circuitOptions`.
|
|
63
|
+
|
|
52
64
|
### Important notes
|
|
53
65
|
|
|
54
66
|
* `apiKey` should reference the environment variable (use `process.env.X`). If the env var is missing the router will skip that provider and log a warning.
|
package/docs/index.md
CHANGED
|
@@ -16,6 +16,8 @@ hero:
|
|
|
16
16
|
features:
|
|
17
17
|
- title: Multi-provider fallback
|
|
18
18
|
details: If one provider fails, requests automatically fall back to the next available provider.
|
|
19
|
+
- title: Circuit breaker protection
|
|
20
|
+
details: Built-in fault tolerance with automatic circuit breaking for each provider to prevent cascading failures.
|
|
19
21
|
- title: OpenAI-compatible API
|
|
20
22
|
details: Run a drop-in replacement for the OpenAI chat completion endpoints (streaming & non-streaming).
|
|
21
23
|
- title: Tool calling & streaming
|
package/main.js
CHANGED
|
@@ -4,11 +4,63 @@ const pretty = require( "pino-pretty" );
|
|
|
4
4
|
const pinoStream = pretty({ colorize: true, ignore: "pid,hostname" });
|
|
5
5
|
const logger = pino({ base: false }, pinoStream );
|
|
6
6
|
|
|
7
|
+
const CircuitBreaker = require( "opossum" ); // <-- added
|
|
8
|
+
|
|
7
9
|
class AIRouter
|
|
8
10
|
{
|
|
9
11
|
constructor ( providers )
|
|
10
12
|
{
|
|
11
13
|
this.providers = providers;
|
|
14
|
+
|
|
15
|
+
const defaultCircuitOptions = {
|
|
16
|
+
timeout: 300000, // time in ms before action considered failed
|
|
17
|
+
errorThresholdPercentage: 50, // % of failures before opening the circuit
|
|
18
|
+
resetTimeout: 9000000, // time in ms to wait before trying again
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
for ( const provider of this.providers )
|
|
22
|
+
{
|
|
23
|
+
// allow provider to override circuit options
|
|
24
|
+
const circuitOptions = Object.assign({}, defaultCircuitOptions, provider.circuitOptions || {});
|
|
25
|
+
|
|
26
|
+
// action receives an object: { params, withResponse }
|
|
27
|
+
const action = async ({ params, withResponse }) =>
|
|
28
|
+
{
|
|
29
|
+
const client = this.createClient( provider );
|
|
30
|
+
|
|
31
|
+
// If caller requested .withResponse() use it
|
|
32
|
+
if ( withResponse )
|
|
33
|
+
{
|
|
34
|
+
// return whatever .withResponse() returns (assumed promise resolving to { data, response })
|
|
35
|
+
return client.chat.completions.create( params ).withResponse();
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// Normal create (may return Promise resolving to response OR an async iterable for streaming)
|
|
39
|
+
return client.chat.completions.create( params );
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
const breaker = new CircuitBreaker( action, circuitOptions );
|
|
43
|
+
|
|
44
|
+
// simple logging for breaker transitions
|
|
45
|
+
breaker.on( "open", ( ) =>
|
|
46
|
+
{
|
|
47
|
+
return logger.warn( `Circuit open for provider: ${provider.name}` )
|
|
48
|
+
});
|
|
49
|
+
breaker.on( "halfOpen", () => { return logger.info( `Circuit half-open for provider: ${provider.name}` ) });
|
|
50
|
+
breaker.on( "close", () => { return logger.info( `Circuit closed for provider: ${provider.name}` ) });
|
|
51
|
+
breaker.on( "fallback", () => { return logger.warn( `Fallback triggered for provider: ${provider.name}` ) });
|
|
52
|
+
breaker.on( "failure", ( err ) =>
|
|
53
|
+
{
|
|
54
|
+
logger.error({ provider: provider.name, event: "failure", error: err.message }, "Breaker failure event" );
|
|
55
|
+
});
|
|
56
|
+
// optional fallback: we throw so the router will continue to next provider
|
|
57
|
+
breaker.fallback( ( err ) =>
|
|
58
|
+
{
|
|
59
|
+
throw new Error( `Circuit open for ${provider.name}` );
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
provider.breaker = breaker;
|
|
63
|
+
}
|
|
12
64
|
}
|
|
13
65
|
|
|
14
66
|
createClient ( provider )
|
|
@@ -33,8 +85,6 @@ class AIRouter
|
|
|
33
85
|
try
|
|
34
86
|
{
|
|
35
87
|
logger.info( `Attempting with provider: ${provider.name}` );
|
|
36
|
-
const client = this.createClient( provider );
|
|
37
|
-
|
|
38
88
|
const params = {
|
|
39
89
|
messages,
|
|
40
90
|
...tools && tools.length > 0 ? { tools } : {},
|
|
@@ -42,10 +92,11 @@ class AIRouter
|
|
|
42
92
|
...restOptions,
|
|
43
93
|
model: provider.model
|
|
44
94
|
};
|
|
45
|
-
|
|
95
|
+
const result = await provider.breaker.fire({ params, withResponse: false });
|
|
96
|
+
logger.info( `Successful with provider: ${provider.name}` );
|
|
46
97
|
if ( isStreaming )
|
|
47
98
|
{
|
|
48
|
-
const responseStream =
|
|
99
|
+
const responseStream = result;
|
|
49
100
|
return ( async function* ()
|
|
50
101
|
{
|
|
51
102
|
for await ( const chunk of responseStream )
|
|
@@ -78,7 +129,7 @@ class AIRouter
|
|
|
78
129
|
}
|
|
79
130
|
else
|
|
80
131
|
{
|
|
81
|
-
const response =
|
|
132
|
+
const response = result;
|
|
82
133
|
const content = response.choices[0]?.message?.content;
|
|
83
134
|
const reasoning = response.choices[0]?.message?.reasoning;
|
|
84
135
|
const tool_calls = response.choices[0]?.message?.tool_calls
|
|
@@ -104,7 +155,7 @@ class AIRouter
|
|
|
104
155
|
// Continue to next provider
|
|
105
156
|
}
|
|
106
157
|
}
|
|
107
|
-
throw new Error( `All providers failed. Last error: ${lastError
|
|
158
|
+
throw new Error( `All providers failed. Last error: ${lastError?.message || "unknown"}` );
|
|
108
159
|
}
|
|
109
160
|
|
|
110
161
|
async chatCompletionWithResponse ( messages, options = {})
|
|
@@ -120,7 +171,6 @@ class AIRouter
|
|
|
120
171
|
try
|
|
121
172
|
{
|
|
122
173
|
logger.info( `Attempting with provider: ${provider.name}` );
|
|
123
|
-
const client = this.createClient( provider );
|
|
124
174
|
|
|
125
175
|
const params = {
|
|
126
176
|
messages,
|
|
@@ -130,7 +180,8 @@ class AIRouter
|
|
|
130
180
|
model: provider.model
|
|
131
181
|
};
|
|
132
182
|
|
|
133
|
-
const { data, response: rawResponse } = await
|
|
183
|
+
const { data, response: rawResponse } = await provider.breaker.fire({ params, withResponse: true });
|
|
184
|
+
logger.info( `Successful with provider: ${provider.name}` );
|
|
134
185
|
return { data, response: rawResponse }
|
|
135
186
|
}
|
|
136
187
|
catch ( error )
|
|
@@ -140,7 +191,7 @@ class AIRouter
|
|
|
140
191
|
// Continue to next provider
|
|
141
192
|
}
|
|
142
193
|
}
|
|
143
|
-
throw new Error( `All providers failed. Last error: ${lastError
|
|
194
|
+
throw new Error( `All providers failed. Last error: ${lastError?.message || "unknown"}` );
|
|
144
195
|
}
|
|
145
196
|
|
|
146
197
|
async getModels ()
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "unified-ai-router",
|
|
3
|
-
"version": "3.
|
|
3
|
+
"version": "3.4.1",
|
|
4
4
|
"description": "A unified interface for multiple LLM providers with automatic fallback. This project includes an OpenAI-compatible server and a deployable Telegram bot with a Mini App interface. It supports major providers like OpenAI, Google, Grok, and more, ensuring reliability and flexibility for your AI applications.",
|
|
5
5
|
"license": "ISC",
|
|
6
6
|
"author": "mlibre",
|
|
@@ -46,6 +46,7 @@
|
|
|
46
46
|
"eslint": "^9.33.0",
|
|
47
47
|
"express": "^5.1.0",
|
|
48
48
|
"openai": "^6.1.0",
|
|
49
|
+
"opossum": "^9.0.0",
|
|
49
50
|
"pino": "^9.9.0",
|
|
50
51
|
"pino-pretty": "^13.1.1"
|
|
51
52
|
},
|
package/provider.js
CHANGED
|
@@ -6,22 +6,22 @@ module.exports = [
|
|
|
6
6
|
apiUrl: "https://generativelanguage.googleapis.com/v1beta/openai/",
|
|
7
7
|
},
|
|
8
8
|
{
|
|
9
|
-
name: "
|
|
10
|
-
apiKey: process.env.
|
|
9
|
+
name: "gemini_2",
|
|
10
|
+
apiKey: process.env.GEMINI_API_KEY_2,
|
|
11
11
|
model: "gemini-2.5-pro",
|
|
12
12
|
apiUrl: "https://generativelanguage.googleapis.com/v1beta/openai/",
|
|
13
13
|
},
|
|
14
14
|
{
|
|
15
|
-
name: "
|
|
16
|
-
apiKey: process.env.
|
|
15
|
+
name: "gemini_3",
|
|
16
|
+
apiKey: process.env.GEMINI_API_KEY_3,
|
|
17
17
|
model: "gemini-2.5-pro",
|
|
18
18
|
apiUrl: "https://generativelanguage.googleapis.com/v1beta/openai/",
|
|
19
19
|
},
|
|
20
20
|
{
|
|
21
|
-
name: "
|
|
22
|
-
apiKey: process.env.
|
|
23
|
-
model: "
|
|
24
|
-
apiUrl: "https://
|
|
21
|
+
name: "cerebras",
|
|
22
|
+
apiKey: process.env.CEREBRAS_API_KEY,
|
|
23
|
+
model: "gpt-oss-120b",
|
|
24
|
+
apiUrl: "https://api.cerebras.ai/v1",
|
|
25
25
|
},
|
|
26
26
|
{
|
|
27
27
|
name: "cerebras_2",
|
|
@@ -30,22 +30,22 @@ module.exports = [
|
|
|
30
30
|
apiUrl: "https://api.cerebras.ai/v1",
|
|
31
31
|
},
|
|
32
32
|
{
|
|
33
|
-
name: "
|
|
34
|
-
apiKey: process.env.
|
|
35
|
-
model: "
|
|
36
|
-
apiUrl: "https://
|
|
33
|
+
name: "openrouter",
|
|
34
|
+
apiKey: process.env.OPENROUTER_API_KEY,
|
|
35
|
+
model: "qwen/qwen3-coder:free",
|
|
36
|
+
apiUrl: "https://openrouter.ai/api/v1",
|
|
37
37
|
},
|
|
38
38
|
{
|
|
39
|
-
name: "
|
|
40
|
-
apiKey: process.env.
|
|
39
|
+
name: "openrouter",
|
|
40
|
+
apiKey: process.env.OPENROUTER_API_KEY,
|
|
41
41
|
model: "z-ai/glm-4.5-air:free",
|
|
42
42
|
apiUrl: "https://openrouter.ai/api/v1",
|
|
43
43
|
},
|
|
44
44
|
{
|
|
45
|
-
name: "
|
|
46
|
-
apiKey: process.env.
|
|
47
|
-
model: "
|
|
48
|
-
apiUrl: "https://
|
|
45
|
+
name: "openrouter_2",
|
|
46
|
+
apiKey: process.env.OPENROUTER_API_KEY_2,
|
|
47
|
+
model: "z-ai/glm-4.5-air:free",
|
|
48
|
+
apiUrl: "https://openrouter.ai/api/v1",
|
|
49
49
|
},
|
|
50
50
|
{
|
|
51
51
|
name: "openrouter_2",
|
|
@@ -53,12 +53,24 @@ module.exports = [
|
|
|
53
53
|
model: "qwen/qwen3-coder:free",
|
|
54
54
|
apiUrl: "https://openrouter.ai/api/v1",
|
|
55
55
|
},
|
|
56
|
+
{
|
|
57
|
+
name: "openrouter_3",
|
|
58
|
+
apiKey: process.env.OPENROUTER_API_KEY_3,
|
|
59
|
+
model: "qwen/qwen3-coder:free",
|
|
60
|
+
apiUrl: "https://openrouter.ai/api/v1",
|
|
61
|
+
},
|
|
56
62
|
{
|
|
57
63
|
name: "openrouter_3",
|
|
58
64
|
apiKey: process.env.OPENROUTER_API_KEY_3,
|
|
59
65
|
model: "z-ai/glm-4.5-air:free",
|
|
60
66
|
apiUrl: "https://openrouter.ai/api/v1",
|
|
61
67
|
},
|
|
68
|
+
{
|
|
69
|
+
name: "qroq",
|
|
70
|
+
apiKey: process.env.QROQ_API_KEY,
|
|
71
|
+
model: "openai/gpt-oss-120b",
|
|
72
|
+
apiUrl: "https://api.groq.com/openai/v1",
|
|
73
|
+
},
|
|
62
74
|
{
|
|
63
75
|
name: "gemini_1",
|
|
64
76
|
apiKey: process.env.GEMINI_API_KEY,
|
package/readme.md
CHANGED
|
@@ -22,6 +22,7 @@ It supports all the OpenAI-compatible servers, including major providers like Op
|
|
|
22
22
|
|
|
23
23
|
- **Multi-Provider Support**: Works with OpenAI, Google, Grok, OpenRouter, Z.ai, Qroq, Cohere, Cerebras, LLM7 and etc
|
|
24
24
|
- **Automatic Fallback**: If one provider fails for **any reason**, automatically tries the next
|
|
25
|
+
- **Circuit Breaker**: Built-in fault tolerance with automatic circuit breaking for each provider to prevent cascading failures
|
|
25
26
|
- **OpenAI-Compatible Server**: Drop-in replacement for the OpenAI API, enabling easy integration with existing tools and clients
|
|
26
27
|
- **Simple API**: Easy-to-use interface for all supported providers
|
|
27
28
|
- **Streaming and Non-Streaming Support**: Handles both streaming and non-streaming responses
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{_ as s,c as e,o as a,af as n}from"./chunks/framework.DLCvNBTH.js";const c=JSON.parse('{"title":"Configuration","description":"","frontmatter":{},"headers":[],"relativePath":"configuration.md","filePath":"configuration.md"}'),t={name:"configuration.md"};function l(o,i,p,h,r,d){return a(),e("div",null,[...i[0]||(i[0]=[n("",37)])])}const E=s(t,[["render",l]]);export{c as __pageData,E as default};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{_ as t,c as e,o as a}from"./chunks/framework.DLCvNBTH.js";const m=JSON.parse('{"title":"","description":"","frontmatter":{"layout":"home","hero":{"name":"Unified AI Router","text":"A lightweight OpenAI-compatible server in Nodejs","tagline":"OpenAI-compatible endpoints, automatic fallback, streaming support, and tool-calling — all in one lightweight package.","actions":[{"theme":"brand","text":"Quickstart","link":"/quickstart"},{"theme":"alt","text":"Configuration","link":"/configuration"}]},"features":[{"title":"Multi-provider fallback","details":"If one provider fails, requests automatically fall back to the next available provider."},{"title":"OpenAI-compatible API","details":"Run a drop-in replacement for the OpenAI chat completion endpoints (streaming & non-streaming)."},{"title":"Tool calling & streaming","details":"Supports tool-calling metadata and SSE streaming to integrate with existing tooling."},{"title":"Easy to self-host","details":"Run locally or deploy to cloud host providers."}]},"headers":[],"relativePath":"index.md","filePath":"index.md"}'),i={name:"index.md"};function o(n,l,r,s,c,d){return a(),e("div")}const g=t(i,[["render",o]]);export{m as __pageData,g as default};
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
import{_ as t,c as e,o as a}from"./chunks/framework.DLCvNBTH.js";const m=JSON.parse('{"title":"","description":"","frontmatter":{"layout":"home","hero":{"name":"Unified AI Router","text":"A lightweight OpenAI-compatible server in Nodejs","tagline":"OpenAI-compatible endpoints, automatic fallback, streaming support, and tool-calling — all in one lightweight package.","actions":[{"theme":"brand","text":"Quickstart","link":"/quickstart"},{"theme":"alt","text":"Configuration","link":"/configuration"}]},"features":[{"title":"Multi-provider fallback","details":"If one provider fails, requests automatically fall back to the next available provider."},{"title":"OpenAI-compatible API","details":"Run a drop-in replacement for the OpenAI chat completion endpoints (streaming & non-streaming)."},{"title":"Tool calling & streaming","details":"Supports tool-calling metadata and SSE streaming to integrate with existing tooling."},{"title":"Easy to self-host","details":"Run locally or deploy to cloud host providers."}]},"headers":[],"relativePath":"index.md","filePath":"index.md"}'),i={name:"index.md"};function o(n,l,r,s,c,d){return a(),e("div")}const g=t(i,[["render",o]]);export{m as __pageData,g as default};
|