swiftroutercli 4.0.1 → 4.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +25 -12
- package/package.json +1 -1
- package/src/index.ts +25 -12
package/dist/index.js
CHANGED
|
@@ -4,8 +4,8 @@ import * as readline from "readline/promises";
|
|
|
4
4
|
import chalk from "chalk";
|
|
5
5
|
import ora from "ora";
|
|
6
6
|
import { loadConfig, saveConfig } from "./config.js";
|
|
7
|
-
import { fetchModels } from "./api/client.js";
|
|
8
7
|
import { startChat } from "./ui/Chat.js";
|
|
8
|
+
import { fetchModels } from "./api/client.js";
|
|
9
9
|
import { DEFAULT_BASE_URL, DEFAULT_MODEL, CONFIG_FILE } from "./constants.js";
|
|
10
10
|
import terminalImage from "terminal-image";
|
|
11
11
|
import { fileURLToPath } from "url";
|
|
@@ -23,7 +23,7 @@ async function ensureConfig() {
|
|
|
23
23
|
try {
|
|
24
24
|
const logoPath = path.join(__dirname, "..", "assets", "logo.png");
|
|
25
25
|
if (fs.existsSync(logoPath)) {
|
|
26
|
-
const smallLogo = await terminalImage.file(logoPath, { height:
|
|
26
|
+
const smallLogo = await terminalImage.file(logoPath, { height: 1, preserveAspectRatio: true });
|
|
27
27
|
// Print logo inline with welcome text
|
|
28
28
|
process.stdout.write(smallLogo.trimEnd() + " ");
|
|
29
29
|
console.log(chalk.cyan.bold("Welcome to SwiftRouterCLI!"));
|
|
@@ -52,10 +52,26 @@ async function ensureConfig() {
|
|
|
52
52
|
console.log(chalk.green("\n✅ Configuration saved securely! Starting your session...\n"));
|
|
53
53
|
return config;
|
|
54
54
|
}
|
|
55
|
+
async function resolveModel(config) {
|
|
56
|
+
const spinner = ora("Auto-detecting model from SwiftRouter...").start();
|
|
57
|
+
try {
|
|
58
|
+
const models = await fetchModels(config);
|
|
59
|
+
if (models.length > 0) {
|
|
60
|
+
const picked = models[0].id || models[0].name || DEFAULT_MODEL;
|
|
61
|
+
spinner.succeed(chalk.green(`Auto-selected model: ${picked}`));
|
|
62
|
+
return picked;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
catch (e) {
|
|
66
|
+
// Silently fall back
|
|
67
|
+
}
|
|
68
|
+
spinner.warn(chalk.yellow(`Could not auto-detect. Falling back to: ${DEFAULT_MODEL}`));
|
|
69
|
+
return DEFAULT_MODEL;
|
|
70
|
+
}
|
|
55
71
|
program
|
|
56
72
|
.name("swiftroutercli")
|
|
57
73
|
.description("CLI for SwiftRouter AI Gateway")
|
|
58
|
-
.version("4.0.
|
|
74
|
+
.version("4.0.3");
|
|
59
75
|
program
|
|
60
76
|
.command("config")
|
|
61
77
|
.description("Manually configure the CLI with your SwiftRouter API Key and Base URL")
|
|
@@ -102,17 +118,13 @@ program
|
|
|
102
118
|
.command("chat")
|
|
103
119
|
.description("Start an interactive chat session")
|
|
104
120
|
.argument("[prompt]", "Initial prompt to start the chat")
|
|
105
|
-
.option("-m, --model <model>", "Model to use
|
|
121
|
+
.option("-m, --model <model>", "Model to use (auto-detected from server if not specified)")
|
|
106
122
|
.option("-a, --approval-mode <mode>", "AI assistant's permission mode (suggest, auto-edit, full-auto)", "suggest")
|
|
107
123
|
.option("-q, --quiet", "Run in headless CI/CD mode without interactive TUI rendering", false)
|
|
108
124
|
.action(async (prompt, options) => {
|
|
109
125
|
const config = await ensureConfig();
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
}
|
|
113
|
-
else {
|
|
114
|
-
startChat(config, options.model, prompt, options.approvalMode, options.quiet);
|
|
115
|
-
}
|
|
126
|
+
const model = options.model || await resolveModel(config);
|
|
127
|
+
startChat(config, model, prompt || "", options.approvalMode, options.quiet);
|
|
116
128
|
});
|
|
117
129
|
program
|
|
118
130
|
.command("status")
|
|
@@ -152,11 +164,12 @@ program
|
|
|
152
164
|
// Default action: if no subcommand is given, launch chat (just like `codex`)
|
|
153
165
|
program
|
|
154
166
|
.argument("[prompt]", "Initial prompt to start chat directly")
|
|
155
|
-
.option("-m, --model <model>", "Model to use
|
|
167
|
+
.option("-m, --model <model>", "Model to use (auto-detected from server if not specified)")
|
|
156
168
|
.option("-a, --approval-mode <mode>", "AI assistant's permission mode (suggest, auto-edit, full-auto)", "suggest")
|
|
157
169
|
.option("-q, --quiet", "Run in headless CI/CD mode without interactive TUI rendering", false)
|
|
158
170
|
.action(async (prompt, options) => {
|
|
159
171
|
const config = await ensureConfig();
|
|
160
|
-
|
|
172
|
+
const model = options.model || await resolveModel(config);
|
|
173
|
+
startChat(config, model, prompt || "", options.approvalMode, options.quiet);
|
|
161
174
|
});
|
|
162
175
|
program.parse();
|
package/package.json
CHANGED
package/src/index.ts
CHANGED
|
@@ -4,8 +4,8 @@ import * as readline from "readline/promises";
|
|
|
4
4
|
import chalk from "chalk";
|
|
5
5
|
import ora from "ora";
|
|
6
6
|
import { loadConfig, saveConfig, Config } from "./config.js";
|
|
7
|
-
import { fetchModels } from "./api/client.js";
|
|
8
7
|
import { startChat } from "./ui/Chat.js";
|
|
8
|
+
import { fetchModels } from "./api/client.js";
|
|
9
9
|
import { DEFAULT_BASE_URL, DEFAULT_MODEL, CONFIG_FILE } from "./constants.js";
|
|
10
10
|
import terminalImage from "terminal-image";
|
|
11
11
|
import { fileURLToPath } from "url";
|
|
@@ -27,7 +27,7 @@ async function ensureConfig(): Promise<Config> {
|
|
|
27
27
|
try {
|
|
28
28
|
const logoPath = path.join(__dirname, "..", "assets", "logo.png");
|
|
29
29
|
if (fs.existsSync(logoPath)) {
|
|
30
|
-
const smallLogo = await terminalImage.file(logoPath, { height:
|
|
30
|
+
const smallLogo = await terminalImage.file(logoPath, { height: 1, preserveAspectRatio: true });
|
|
31
31
|
// Print logo inline with welcome text
|
|
32
32
|
process.stdout.write(smallLogo.trimEnd() + " ");
|
|
33
33
|
console.log(chalk.cyan.bold("Welcome to SwiftRouterCLI!"));
|
|
@@ -65,10 +65,26 @@ async function ensureConfig(): Promise<Config> {
|
|
|
65
65
|
return config;
|
|
66
66
|
}
|
|
67
67
|
|
|
68
|
+
async function resolveModel(config: Config): Promise<string> {
|
|
69
|
+
const spinner = ora("Auto-detecting model from SwiftRouter...").start();
|
|
70
|
+
try {
|
|
71
|
+
const models = await fetchModels(config);
|
|
72
|
+
if (models.length > 0) {
|
|
73
|
+
const picked = models[0].id || models[0].name || DEFAULT_MODEL;
|
|
74
|
+
spinner.succeed(chalk.green(`Auto-selected model: ${picked}`));
|
|
75
|
+
return picked;
|
|
76
|
+
}
|
|
77
|
+
} catch (e) {
|
|
78
|
+
// Silently fall back
|
|
79
|
+
}
|
|
80
|
+
spinner.warn(chalk.yellow(`Could not auto-detect. Falling back to: ${DEFAULT_MODEL}`));
|
|
81
|
+
return DEFAULT_MODEL;
|
|
82
|
+
}
|
|
83
|
+
|
|
68
84
|
program
|
|
69
85
|
.name("swiftroutercli")
|
|
70
86
|
.description("CLI for SwiftRouter AI Gateway")
|
|
71
|
-
.version("4.0.
|
|
87
|
+
.version("4.0.3");
|
|
72
88
|
|
|
73
89
|
program
|
|
74
90
|
.command("config")
|
|
@@ -118,17 +134,13 @@ program
|
|
|
118
134
|
.command("chat")
|
|
119
135
|
.description("Start an interactive chat session")
|
|
120
136
|
.argument("[prompt]", "Initial prompt to start the chat")
|
|
121
|
-
.option("-m, --model <model>", "Model to use
|
|
137
|
+
.option("-m, --model <model>", "Model to use (auto-detected from server if not specified)")
|
|
122
138
|
.option("-a, --approval-mode <mode>", "AI assistant's permission mode (suggest, auto-edit, full-auto)", "suggest")
|
|
123
139
|
.option("-q, --quiet", "Run in headless CI/CD mode without interactive TUI rendering", false)
|
|
124
140
|
.action(async (prompt, options) => {
|
|
125
141
|
const config = await ensureConfig();
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
startChat(config, options.model, "", options.approvalMode, options.quiet);
|
|
129
|
-
} else {
|
|
130
|
-
startChat(config, options.model, prompt, options.approvalMode, options.quiet);
|
|
131
|
-
}
|
|
142
|
+
const model = options.model || await resolveModel(config);
|
|
143
|
+
startChat(config, model, prompt || "", options.approvalMode, options.quiet);
|
|
132
144
|
});
|
|
133
145
|
|
|
134
146
|
program
|
|
@@ -171,12 +183,13 @@ program
|
|
|
171
183
|
// Default action: if no subcommand is given, launch chat (just like `codex`)
|
|
172
184
|
program
|
|
173
185
|
.argument("[prompt]", "Initial prompt to start chat directly")
|
|
174
|
-
.option("-m, --model <model>", "Model to use
|
|
186
|
+
.option("-m, --model <model>", "Model to use (auto-detected from server if not specified)")
|
|
175
187
|
.option("-a, --approval-mode <mode>", "AI assistant's permission mode (suggest, auto-edit, full-auto)", "suggest")
|
|
176
188
|
.option("-q, --quiet", "Run in headless CI/CD mode without interactive TUI rendering", false)
|
|
177
189
|
.action(async (prompt, options) => {
|
|
178
190
|
const config = await ensureConfig();
|
|
179
|
-
|
|
191
|
+
const model = options.model || await resolveModel(config);
|
|
192
|
+
startChat(config, model, prompt || "", options.approvalMode, options.quiet);
|
|
180
193
|
});
|
|
181
194
|
|
|
182
195
|
program.parse();
|