@rungate/llmrouter 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,97 @@
1
+ # llm_router
2
+
3
+ Minimal OpenClaw-first LLM router.
4
+
5
+ For the end-to-end runtime walkthrough of how the router itself works, see [docs/REQUEST_FLOW.md](docs/REQUEST_FLOW.md).
6
+
7
+ Included in this reset:
8
+
9
+ - local OpenAI-compatible proxy
10
+ - simple prompt classifier
11
+ - x402 payment retry client
12
+ - OpenClaw plugin with `/wallet`
13
+
14
+ Not included yet:
15
+
16
+ - advanced routing heuristics
17
+ - weighted scoring
18
+ - provenance receipts
19
+ - complex fallback chains
20
+
21
+ ## Dev
22
+
23
+ ```bash
24
+ npm install
25
+ npm run typecheck
26
+ npm run dev:proxy
27
+ ```
28
+
29
+ ## Production Install
30
+
31
+ Install the published package into OpenClaw:
32
+
33
+ ```bash
34
+ openclaw plugins install @rungate/llmrouter
35
+ openclaw gateway restart
36
+ ```
37
+
38
+ Recommended production environment:
39
+
40
+ ```bash
41
+ INFERENCE_PROVIDER_BASE_URL=https://api.rungate.ai
42
+ X402_NETWORK=eip155:8453
43
+ ```
44
+
45
+ `llm_router` remains local-only in this setup. OpenClaw still talks to the local proxy at `http://127.0.0.1:3000/v1`; the remote upstream is configured by environment.
46
+
47
+ ## Local OpenClaw Install
48
+
49
+ For local unpublished development, install from a packaged tarball:
50
+
51
+ ```bash
52
+ npm run build
53
+ npm pack
54
+ openclaw plugins install ./rungate-llmrouter-0.1.0.tgz
55
+ openclaw gateway restart
56
+ ```
57
+
58
+ For Docker/tempclaw-style testing, stage the tarball into the container and install it there with:
59
+
60
+ ```bash
61
+ openclaw plugins install /staging/rungate-llmrouter-0.1.0.tgz
62
+ ```
63
+
64
+ ## Environment
65
+
66
+ ```bash
67
+ LLM_ROUTER_HOST=127.0.0.1
68
+ LLM_ROUTER_PORT=3000
69
+ INFERENCE_PROVIDER_BASE_URL=http://127.0.0.1:8787
70
+ X402_NETWORK=eip155:84532
71
+ ```
72
+
73
+ For production, point the upstream at Rungate and use Base mainnet:
74
+
75
+ ```bash
76
+ INFERENCE_PROVIDER_BASE_URL=https://api.rungate.ai
77
+ X402_NETWORK=eip155:8453
78
+ ```
79
+
80
+ ## OpenClaw Models
81
+
82
+ - `llmrouter/auto`
83
+ - `llmrouter/simple`
84
+ - `llmrouter/coding`
85
+ - `llmrouter/reasoning`
86
+ - `llmrouter/vision`
87
+
88
+ ## Release Workflow
89
+
90
+ ```bash
91
+ npm run typecheck
92
+ npm run build
93
+ npm pack
94
+ npm publish
95
+ ```
96
+
97
+ Use `npm pack` as the final sanity check before publishing. The packed artifact should contain the built plugin entrypoint in `dist`, the plugin manifest, and only runtime files.
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,3 @@
1
+ import { startProxyServer } from '../src/proxy/server.js';
2
+ const server = await startProxyServer();
3
+ console.log(`llm_router proxy listening on http://${server.host}:${server.port}`);
@@ -0,0 +1,6 @@
1
+ export { startProxyServer } from './proxy/server.js';
2
+ export { classifyPrompt } from './router/classify.js';
3
+ export { routeRequest } from './router/route.js';
4
+ export { createPaymentFetch } from './payment/x402.js';
5
+ export { resolveOrGenerateWalletKey, walletFilePath } from './payment/wallet.js';
6
+ export type { RouterRequest, PromptClassification, RouteDecision, WalletInfo } from './types.js';
@@ -0,0 +1,5 @@
1
+ export { startProxyServer } from './proxy/server.js';
2
+ export { classifyPrompt } from './router/classify.js';
3
+ export { routeRequest } from './router/route.js';
4
+ export { createPaymentFetch } from './payment/x402.js';
5
+ export { resolveOrGenerateWalletKey, walletFilePath } from './payment/wallet.js';
@@ -0,0 +1 @@
1
+ export declare function ensureOpenClawProviderConfig(config: Record<string, unknown>, baseUrl: string): void;
@@ -0,0 +1,38 @@
1
+ const MODEL_LIST = [
2
+ { id: 'auto', name: 'LLM Router Auto', reasoning: true },
3
+ { id: 'simple', name: 'LLM Router Simple', reasoning: false },
4
+ { id: 'coding', name: 'LLM Router Coding', reasoning: true },
5
+ { id: 'reasoning', name: 'LLM Router Reasoning', reasoning: true },
6
+ { id: 'vision', name: 'LLM Router Vision', reasoning: true },
7
+ ];
8
+ // Inject the provider block and default model so OpenClaw can talk to the local proxy.
9
+ export function ensureOpenClawProviderConfig(config, baseUrl) {
10
+ const models = config.models ?? {};
11
+ const providers = models.providers ?? {};
12
+ providers.llmrouter = {
13
+ baseUrl,
14
+ apiKey: 'llmrouter-local-proxy',
15
+ api: 'openai-completions',
16
+ models: MODEL_LIST.map((model) => ({
17
+ id: model.id,
18
+ name: model.name,
19
+ api: 'openai-completions',
20
+ reasoning: model.reasoning,
21
+ input: ['text'],
22
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
23
+ contextWindow: 262144,
24
+ maxTokens: 65536,
25
+ })),
26
+ };
27
+ models.providers = providers;
28
+ config.models = models;
29
+ const agents = config.agents ?? {};
30
+ const defaults = agents.defaults ?? {};
31
+ const modelConfig = defaults.model ?? {};
32
+ if (typeof modelConfig.primary !== 'string' || modelConfig.primary.length === 0) {
33
+ modelConfig.primary = 'llmrouter/auto';
34
+ }
35
+ defaults.model = modelConfig;
36
+ agents.defaults = defaults;
37
+ config.agents = agents;
38
+ }
@@ -0,0 +1,30 @@
1
+ type OpenClawPluginApi = {
2
+ config: Record<string, unknown>;
3
+ logger: {
4
+ info: (message: string) => void;
5
+ };
6
+ registerProvider: (provider: {
7
+ id: string;
8
+ label: string;
9
+ auth: unknown[];
10
+ }) => void;
11
+ registerCommand: (command: {
12
+ name: string;
13
+ description: string;
14
+ acceptsArgs?: boolean;
15
+ requireAuth?: boolean;
16
+ handler: () => Promise<{
17
+ text: string;
18
+ }> | {
19
+ text: string;
20
+ };
21
+ }) => void;
22
+ };
23
+ type OpenClawPluginDefinition = {
24
+ id: string;
25
+ name: string;
26
+ description: string;
27
+ register: (api: OpenClawPluginApi) => void | Promise<void>;
28
+ };
29
+ declare const plugin: OpenClawPluginDefinition;
30
+ export default plugin;
@@ -0,0 +1,47 @@
1
+ import { getWalletInfo, resolveOrGenerateWalletKey } from '../payment/wallet.js';
2
+ import { ensureOpenClawProviderConfig } from './config.js';
3
+ import { ensureLocalProxy, logWalletBootstrap } from './runtime.js';
4
+ const provider = {
5
+ id: 'llmrouter',
6
+ label: 'llmrouter',
7
+ auth: [],
8
+ };
9
+ // Format the wallet details for the native /wallet command.
10
+ async function formatWalletText() {
11
+ const wallet = await getWalletInfo(process.env);
12
+ return [
13
+ 'llm_router Wallet',
14
+ '',
15
+ `Address: ${wallet.address}`,
16
+ `Network: ${wallet.network}`,
17
+ `USDC balance: ${wallet.usdcBalance ?? 'unknown'} USDC`,
18
+ `Native balance: ${wallet.nativeBalance ?? 'unknown'} ${wallet.nativeSymbol ?? 'ETH'}`,
19
+ `Wallet file: ${wallet.walletFile}`,
20
+ `Source: ${wallet.source}`,
21
+ ].join('\n');
22
+ }
23
+ const plugin = {
24
+ id: 'llmrouter',
25
+ name: 'llm_router',
26
+ description: 'Minimal OpenClaw-first LLM router',
27
+ register: async (api) => {
28
+ ensureOpenClawProviderConfig(api.config, 'http://127.0.0.1:3000/v1');
29
+ api.registerProvider(provider);
30
+ api.logger.info('llm_router provider registered');
31
+ api.registerCommand({
32
+ name: 'wallet',
33
+ description: 'Show llm_router wallet info',
34
+ acceptsArgs: false,
35
+ requireAuth: false,
36
+ handler: async () => ({ text: await formatWalletText() }),
37
+ });
38
+ const wallet = resolveOrGenerateWalletKey(process.env);
39
+ logWalletBootstrap(api.logger, wallet);
40
+ if (!process.argv.includes('gateway')) {
41
+ api.logger.info('llm_router loaded outside gateway process; local proxy startup skipped');
42
+ return;
43
+ }
44
+ await ensureLocalProxy(api.logger);
45
+ },
46
+ };
47
+ export default plugin;
@@ -0,0 +1,8 @@
1
+ import type { WalletInfo } from '../types.js';
2
+ export type OpenClawLogger = {
3
+ info: (message: string) => void;
4
+ };
5
+ export declare function ensureLocalProxy(logger: OpenClawLogger): Promise<void>;
6
+ export declare function logWalletBootstrap(logger: OpenClawLogger, wallet: {
7
+ info: Pick<WalletInfo, 'address' | 'walletFile' | 'source' | 'network'>;
8
+ }): void;
@@ -0,0 +1,86 @@
1
+ import { startProxyServer } from '../proxy/server.js';
2
+ let proxy;
3
+ let proxyClosePromise;
4
+ let shutdownHooksInstalled = false;
5
+ function resolveProxyEndpoint() {
6
+ const host = process.env.LLM_ROUTER_HOST ?? '127.0.0.1';
7
+ const port = Number(process.env.LLM_ROUTER_PORT ?? 3000);
8
+ return {
9
+ host,
10
+ port,
11
+ baseUrl: `http://${host}:${port}`,
12
+ };
13
+ }
14
+ async function looksLikeLlmRouter(baseUrl) {
15
+ const response = await fetch(new URL('/v1/models', baseUrl), {
16
+ headers: { accept: 'application/json' },
17
+ signal: AbortSignal.timeout(2_000),
18
+ });
19
+ if (!response.ok)
20
+ return false;
21
+ const body = await response.json();
22
+ return Array.isArray(body.data) && body.data.some((model) => model.id === 'llmrouter/auto');
23
+ }
24
+ function closeProxyOnShutdown() {
25
+ if (!proxy || proxyClosePromise)
26
+ return;
27
+ proxyClosePromise = proxy.close()
28
+ .catch(() => undefined)
29
+ .finally(() => {
30
+ proxy = undefined;
31
+ proxyClosePromise = undefined;
32
+ });
33
+ }
34
+ function installProxyShutdownHooks(logger) {
35
+ if (shutdownHooksInstalled)
36
+ return;
37
+ shutdownHooksInstalled = true;
38
+ const handleSignal = (signal) => {
39
+ logger.info(`llm_router shutting down local proxy before ${signal}`);
40
+ closeProxyOnShutdown();
41
+ };
42
+ process.on('SIGUSR1', () => handleSignal('SIGUSR1'));
43
+ process.on('SIGTERM', () => handleSignal('SIGTERM'));
44
+ process.on('SIGINT', () => handleSignal('SIGINT'));
45
+ process.on('beforeExit', () => closeProxyOnShutdown());
46
+ process.on('exit', () => closeProxyOnShutdown());
47
+ }
48
+ export async function ensureLocalProxy(logger) {
49
+ if (proxy || !process.argv.includes('gateway'))
50
+ return;
51
+ const endpoint = resolveProxyEndpoint();
52
+ try {
53
+ proxy = await startProxyServer();
54
+ installProxyShutdownHooks(logger);
55
+ logger.info(`llm_router proxy listening on http://${proxy.host}:${proxy.port}`);
56
+ }
57
+ catch (error) {
58
+ if (error instanceof Error &&
59
+ 'code' in error &&
60
+ error.code === 'EADDRINUSE') {
61
+ try {
62
+ if (await looksLikeLlmRouter(endpoint.baseUrl)) {
63
+ logger.info(`llm_router proxy already running on ${endpoint.baseUrl}`);
64
+ return;
65
+ }
66
+ }
67
+ catch {
68
+ // Fall through and surface the original bind error.
69
+ }
70
+ }
71
+ throw error;
72
+ }
73
+ }
74
+ export function logWalletBootstrap(logger, wallet) {
75
+ if (wallet.info.source === 'generated') {
76
+ logger.info('════════════════════════════════════════════════');
77
+ logger.info(' NEW llm_router WALLET GENERATED');
78
+ logger.info(` Address : ${wallet.info.address}`);
79
+ logger.info(` Network : ${wallet.info.network}`);
80
+ logger.info(` Wallet : ${wallet.info.walletFile}`);
81
+ logger.info(' Back up this private key before funding it');
82
+ logger.info('════════════════════════════════════════════════');
83
+ return;
84
+ }
85
+ logger.info(`llm_router wallet loaded (${wallet.info.source}) ${wallet.info.address} ${wallet.info.walletFile}`);
86
+ }