oh-pi 0.1.45 → 0.1.47

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,22 +2,94 @@ import * as p from "@clack/prompts";
2
2
  import chalk from "chalk";
3
3
  import { t } from "../i18n.js";
4
4
  import { PROVIDERS } from "../types.js";
5
- /** Fetch models from OpenAI-compatible /v1/models endpoint */
6
- async function fetchModels(baseUrl, apiKey) {
7
- const url = `${baseUrl.replace(/\/+$/, "")}/v1/models`;
5
+ /** Provider API base URLs for dynamic model fetching */
6
+ const PROVIDER_API_URLS = {
7
+ anthropic: "https://api.anthropic.com",
8
+ openai: "https://api.openai.com",
9
+ google: "https://generativelanguage.googleapis.com",
10
+ groq: "https://api.groq.com",
11
+ openrouter: "https://openrouter.ai",
12
+ xai: "https://api.x.ai",
13
+ mistral: "https://api.mistral.ai",
14
+ };
15
+ /** Fetch models dynamically — tries multiple API styles, returns metadata + detected API type */
16
+ async function fetchModels(provider, baseUrl, apiKey) {
17
+ const base = baseUrl.replace(/\/+$/, "");
18
+ const resolvedKey = process.env[apiKey] ?? apiKey;
19
+ // Try Anthropic-style first (for known anthropic or any provider)
8
20
  try {
9
- const res = await fetch(url, {
10
- headers: { Authorization: `Bearer ${apiKey}` },
21
+ const res = await fetch(`${base}/v1/models`, {
22
+ headers: { "x-api-key": resolvedKey, "anthropic-version": "2023-06-01" },
11
23
  signal: AbortSignal.timeout(8000),
12
24
  });
13
- if (!res.ok)
14
- return [];
15
- const json = await res.json();
16
- return (json.data ?? []).map(m => m.id).sort();
25
+ if (res.ok) {
26
+ const json = await res.json();
27
+ const data = json.data ?? [];
28
+ if (data.length > 0 && data[0].owned_by === "anthropic") {
29
+ return {
30
+ api: "anthropic-messages",
31
+ models: data.map(m => ({
32
+ id: m.id,
33
+ reasoning: m.thinking_enabled ?? false,
34
+ input: ["text", "image"],
35
+ contextWindow: m.max_tokens ?? 200000,
36
+ maxTokens: m.thinking_enabled ? Math.min(m.max_tokens ?? 128000, 128000) : Math.min(m.max_tokens ?? 8192, 16384),
37
+ })).sort((a, b) => a.id.localeCompare(b.id)),
38
+ };
39
+ }
40
+ }
41
+ }
42
+ catch { /* fall through */ }
43
+ // Try Google-style
44
+ if (provider === "google") {
45
+ try {
46
+ const res = await fetch(`${base}/v1beta/models?key=${resolvedKey}`, {
47
+ signal: AbortSignal.timeout(8000),
48
+ });
49
+ if (res.ok) {
50
+ const json = await res.json();
51
+ const data = (json.models ?? []).filter((m) => m.name?.includes("gemini"));
52
+ if (data.length > 0) {
53
+ return {
54
+ api: "google-generative-ai",
55
+ models: data.map((m) => ({
56
+ id: m.name.replace("models/", ""),
57
+ reasoning: m.name.includes("thinking") || m.name.includes("2.5"),
58
+ input: ["text", "image"],
59
+ contextWindow: m.inputTokenLimit ?? 1048576,
60
+ maxTokens: m.outputTokenLimit ?? 65536,
61
+ })).sort((a, b) => a.id.localeCompare(b.id)),
62
+ };
63
+ }
64
+ }
65
+ }
66
+ catch { /* fall through */ }
17
67
  }
18
- catch {
19
- return [];
68
+ // Try OpenAI-compatible
69
+ try {
70
+ const res = await fetch(`${base}/v1/models`, {
71
+ headers: { Authorization: `Bearer ${resolvedKey}` },
72
+ signal: AbortSignal.timeout(8000),
73
+ });
74
+ if (res.ok) {
75
+ const json = await res.json();
76
+ const data = json.data ?? [];
77
+ if (data.length > 0) {
78
+ return {
79
+ api: "openai-completions",
80
+ models: data.map((m) => ({
81
+ id: m.id,
82
+ reasoning: m.thinking_enabled ?? m.id.includes("o3") ?? false,
83
+ input: ["text", "image"],
84
+ contextWindow: m.context_window ?? m.max_tokens ?? 128000,
85
+ maxTokens: m.max_output ?? 16384,
86
+ })).sort((a, b) => a.id.localeCompare(b.id)),
87
+ };
88
+ }
89
+ }
20
90
  }
91
+ catch { /* fall through */ }
92
+ return { models: [] };
21
93
  }
22
94
  export async function setupProviders(env) {
23
95
  const entries = Object.entries(PROVIDERS);
@@ -95,9 +167,10 @@ export async function setupProviders(env) {
95
167
  else {
96
168
  apiKey = await promptKey(info.label);
97
169
  }
98
- // Model selection — try dynamic fetch for custom endpoints, fall back to static list
99
- const defaultModel = await selectModel(info.label, info.models, baseUrl, apiKey);
100
- configs.push({ name, apiKey, defaultModel, baseUrl });
170
+ // Dynamic model fetch always try
171
+ const fetchUrl = baseUrl || PROVIDER_API_URLS[name];
172
+ const { defaultModel, discoveredModels, api } = await selectModelWithMeta(name, info.label, info.models, fetchUrl, apiKey);
173
+ configs.push({ name, apiKey, defaultModel, baseUrl, api, discoveredModels });
101
174
  p.log.success(t("provider.configured", { label: info.label }));
102
175
  }
103
176
  return configs;
@@ -130,26 +203,28 @@ async function setupCustomProvider() {
130
203
  if (needsKey) {
131
204
  apiKey = await promptKey(name);
132
205
  }
133
- // Dynamic model fetch
134
- const s = p.spinner();
135
- s.start(t("provider.fetchingModels", { source: baseUrl }));
136
- const models = await fetchModels(baseUrl, apiKey);
137
- s.stop(models.length > 0 ? t("provider.foundModels", { count: models.length }) : t("provider.noModels"));
138
- let defaultModel;
139
- if (models.length > 0) {
140
- const model = await p.select({
141
- message: t("provider.selectModel", { label: name }),
142
- options: models.slice(0, 30).map(m => ({ value: m, label: m })),
143
- });
144
- if (p.isCancel(model)) {
145
- p.cancel(t("cancelled"));
146
- process.exit(0);
206
+ const { defaultModel, discoveredModels, api } = await selectModelWithMeta(name, name, [], baseUrl, apiKey);
207
+ p.log.success(t("provider.customConfigured", { name, url: baseUrl }));
208
+ return { name, apiKey, defaultModel, baseUrl, api, discoveredModels };
209
+ }
210
+ async function selectModelWithMeta(provider, label, staticModels, baseUrl, apiKey) {
211
+ let modelIds = staticModels;
212
+ let discoveredModels;
213
+ let api;
214
+ if (baseUrl && apiKey) {
215
+ const s = p.spinner();
216
+ s.start(t("provider.fetchingModels", { source: label }));
217
+ const result = await fetchModels(provider, baseUrl, apiKey);
218
+ s.stop(result.models.length > 0 ? t("provider.foundModels", { count: result.models.length }) : t("provider.defaultModelList"));
219
+ if (result.models.length > 0) {
220
+ discoveredModels = result.models;
221
+ api = result.api;
222
+ modelIds = result.models.map(m => m.id);
147
223
  }
148
- defaultModel = model;
149
224
  }
150
- else {
225
+ if (modelIds.length === 0) {
151
226
  const model = await p.text({
152
- message: t("provider.modelName", { label: name }),
227
+ message: t("provider.modelName", { label }),
153
228
  placeholder: t("provider.modelNamePlaceholder"),
154
229
  validate: (v) => (!v || v.trim().length === 0) ? t("provider.modelNameRequired") : undefined,
155
230
  });
@@ -157,33 +232,19 @@ async function setupCustomProvider() {
157
232
  p.cancel(t("cancelled"));
158
233
  process.exit(0);
159
234
  }
160
- defaultModel = model;
161
- }
162
- p.log.success(t("provider.customConfigured", { name, url: baseUrl }));
163
- return { name, apiKey, defaultModel, baseUrl };
164
- }
165
- async function selectModel(label, staticModels, baseUrl, apiKey) {
166
- let models = staticModels;
167
- // Try dynamic fetch if custom URL or known provider
168
- if (baseUrl && apiKey) {
169
- const s = p.spinner();
170
- s.start(t("provider.fetchingModels", { source: label }));
171
- const fetched = await fetchModels(baseUrl, apiKey);
172
- s.stop(fetched.length > 0 ? t("provider.foundModels", { count: fetched.length }) : t("provider.defaultModelList"));
173
- if (fetched.length > 0)
174
- models = fetched;
235
+ return { defaultModel: model, discoveredModels, api };
175
236
  }
176
- if (models.length === 1)
177
- return models[0];
237
+ if (modelIds.length === 1)
238
+ return { defaultModel: modelIds[0], discoveredModels, api };
178
239
  const model = await p.select({
179
240
  message: t("provider.selectModel", { label }),
180
- options: models.slice(0, 30).map(m => ({ value: m, label: m })),
241
+ options: modelIds.slice(0, 50).map(m => ({ value: m, label: m })),
181
242
  });
182
243
  if (p.isCancel(model)) {
183
244
  p.cancel(t("cancelled"));
184
245
  process.exit(0);
185
246
  }
186
- return model;
247
+ return { defaultModel: model, discoveredModels, api };
187
248
  }
188
249
  async function promptKey(label) {
189
250
  const key = await p.password({
package/dist/types.d.ts CHANGED
@@ -1,3 +1,11 @@
1
+ /** 动态发现的模型信息 */
2
+ export interface DiscoveredModel {
3
+ id: string;
4
+ reasoning: boolean;
5
+ input: ("text" | "image")[];
6
+ contextWindow: number;
7
+ maxTokens: number;
8
+ }
1
9
  /** 模型提供商配置 */
2
10
  export interface ProviderConfig {
3
11
  /** 提供商名称 */
@@ -8,6 +16,10 @@ export interface ProviderConfig {
8
16
  defaultModel?: string;
9
17
  /** 自定义 API 地址 */
10
18
  baseUrl?: string;
19
+ /** 检测到的 API 类型 */
20
+ api?: string;
21
+ /** 动态发现的所有模型 */
22
+ discoveredModels?: DiscoveredModel[];
11
23
  /** 上下文窗口大小(自定义提供商用) */
12
24
  contextWindow?: number;
13
25
  /** 最大输出 token 数(自定义提供商用) */
@@ -56,12 +56,14 @@ export function applyConfig(config) {
56
56
  writeFileSync(authPath, JSON.stringify(auth, null, 2), { mode: 0o600 });
57
57
  }
58
58
  // 2. settings.json
59
- const primary = config.providers[0];
59
+ // Issue #4 fix: prefer provider with baseUrl+defaultModel as primary (custom endpoint user intent)
60
+ const primary = config.providers.find(p => p.baseUrl && p.defaultModel) ?? config.providers[0];
60
61
  const providerInfo = primary ? PROVIDERS[primary.name] : undefined;
61
62
  const compactThreshold = config.compactThreshold ?? 0.75;
62
63
  const primaryModel = primary?.defaultModel ?? providerInfo?.models[0];
64
+ const primaryDisc = primary?.discoveredModels?.find(m => m.id === primaryModel);
63
65
  const primaryCaps = primaryModel ? MODEL_CAPABILITIES[primaryModel] : undefined;
64
- const contextWindow = primary?.contextWindow ?? primaryCaps?.contextWindow ?? 128000;
66
+ const contextWindow = primaryDisc?.contextWindow ?? primary?.contextWindow ?? primaryCaps?.contextWindow ?? 128000;
65
67
  const reserveTokens = Math.round(contextWindow * (1 - compactThreshold));
66
68
  const settings = {
67
69
  ...(primary ? { defaultProvider: primary.name, defaultModel: primaryModel } : {}),
@@ -74,6 +76,8 @@ export function applyConfig(config) {
74
76
  };
75
77
  if (config.providers.length > 1) {
76
78
  settings.enabledModels = config.providers.flatMap((p) => {
79
+ if (p.discoveredModels?.length)
80
+ return p.discoveredModels.map(m => m.id);
77
81
  const info = PROVIDERS[p.name];
78
82
  return info ? info.models : [];
79
83
  });
@@ -85,23 +89,35 @@ export function applyConfig(config) {
85
89
  const providers = {};
86
90
  for (const cp of customProviders) {
87
91
  const isBuiltin = !!PROVIDERS[cp.name];
88
- if (isBuiltin) {
89
- // Known provider with custom baseUrl — just override endpoint, keep built-in models
92
+ if (isBuiltin && !cp.discoveredModels?.length) {
93
+ // Known provider with custom baseUrl, no discovered models — just override endpoint
90
94
  const entry = { baseUrl: cp.baseUrl };
91
95
  if (cp.apiKey !== "none")
92
96
  entry.apiKey = cp.apiKey;
93
97
  providers[cp.name] = entry;
94
98
  }
95
99
  else {
96
- // Fully custom provider need api, models, etc.
97
- const caps = cp.defaultModel ? MODEL_CAPABILITIES[cp.defaultModel] : undefined;
100
+ // Custom provider or builtin with discovered models — write full config
98
101
  const entry = {
99
102
  baseUrl: cp.baseUrl,
100
- api: "openai-completions",
103
+ api: cp.api ?? "openai-completions",
101
104
  };
102
105
  if (cp.apiKey !== "none")
103
106
  entry.apiKey = cp.apiKey;
104
- if (cp.defaultModel) {
107
+ if (cp.discoveredModels?.length) {
108
+ // Write ALL discovered models with their metadata
109
+ entry.models = cp.discoveredModels.map(m => ({
110
+ id: m.id,
111
+ name: m.id,
112
+ reasoning: m.reasoning,
113
+ input: m.input,
114
+ cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
115
+ contextWindow: m.contextWindow,
116
+ maxTokens: m.maxTokens,
117
+ }));
118
+ }
119
+ else if (cp.defaultModel) {
120
+ const caps = MODEL_CAPABILITIES[cp.defaultModel];
105
121
  entry.models = [{
106
122
  id: cp.defaultModel,
107
123
  name: cp.defaultModel,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "oh-pi",
3
- "version": "0.1.45",
3
+ "version": "0.1.47",
4
4
  "description": "One-click setup for pi-coding-agent. Like oh-my-zsh for pi.",
5
5
  "type": "module",
6
6
  "bin": {