@orchagent/cli 0.3.16 → 0.3.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -138,6 +138,7 @@ function registerCallCommand(program) {
138
138
  .option('--input <json>', 'Alias for --data')
139
139
  .option('--key <key>', 'LLM API key (overrides env vars)')
140
140
  .option('--provider <provider>', 'LLM provider (openai, anthropic, gemini)')
141
+ .option('--model <model>', 'LLM model to use (overrides agent default)')
141
142
  .option('--json', 'Output raw JSON')
142
143
  .option('--output <file>', 'Save response body to a file')
143
144
  .option('--skills <skills>', 'Add skills (comma-separated)')
@@ -189,6 +190,20 @@ argument or --file option instead.
189
190
  throw new errors_1.CliError('When using --key, you must also specify --provider (openai, anthropic, or gemini)');
190
191
  }
191
192
  (0, llm_1.validateProvider)(options.provider);
193
+ // Warn on potential model/provider mismatch
194
+ if (options.model && options.provider) {
195
+ const modelLower = options.model.toLowerCase();
196
+ const providerPatterns = {
197
+ openai: /^(gpt-|o1-|o3-|davinci|text-)/,
198
+ anthropic: /^claude-/,
199
+ gemini: /^gemini-/,
200
+ ollama: /^(llama|mistral|deepseek|phi|qwen)/,
201
+ };
202
+ const expectedPattern = providerPatterns[options.provider];
203
+ if (expectedPattern && !expectedPattern.test(modelLower)) {
204
+ process.stderr.write(`Warning: Model '${options.model}' may not be a ${options.provider} model.\n\n`);
205
+ }
206
+ }
192
207
  llmKey = options.key;
193
208
  llmProvider = options.provider;
194
209
  }
@@ -204,13 +219,17 @@ argument or --file option instead.
204
219
  // Headers can be logged by proxies/load balancers, body is not logged by default
205
220
  let llmCredentials;
206
221
  if (llmKey && llmProvider) {
207
- llmCredentials = { api_key: llmKey, provider: llmProvider };
222
+ llmCredentials = {
223
+ api_key: llmKey,
224
+ provider: llmProvider,
225
+ ...(options.model && { model: options.model }),
226
+ };
208
227
  }
209
228
  else if (agentMeta.type === 'prompt') {
210
229
  // Warn if no key found for prompt-based agent
211
230
  const providerList = supportedProviders.join(', ');
212
231
  process.stderr.write(`Warning: No LLM key found for providers: ${providerList}\n` +
213
- `Set an env var (e.g., OPENAI_API_KEY), use --key, or configure in web dashboard\n\n`);
232
+ `Set an env var (e.g., OPENAI_API_KEY), run 'orchagent keys add <provider>', use --key, or configure in web dashboard\n\n`);
214
233
  }
215
234
  // Add skill headers
216
235
  if (options.skills) {
@@ -291,7 +291,7 @@ async function detectAllLlmKeys(supportedProviders, config) {
291
291
  }
292
292
  return providers;
293
293
  }
294
- async function executePromptLocally(agentData, inputData, skillPrompts = [], config, providerOverride) {
294
+ async function executePromptLocally(agentData, inputData, skillPrompts = [], config, providerOverride, modelOverride) {
295
295
  // If provider override specified, validate and use only that provider
296
296
  if (providerOverride) {
297
297
  (0, llm_1.validateProvider)(providerOverride);
@@ -313,12 +313,12 @@ async function executePromptLocally(agentData, inputData, skillPrompts = [], con
313
313
  if (allProviders.length === 0) {
314
314
  const providers = providersToCheck.join(', ');
315
315
  throw new errors_1.CliError(`No LLM key found for: ${providers}\n` +
316
- `Set an environment variable (e.g., OPENAI_API_KEY) or configure in web dashboard`);
316
+ `Set an environment variable (e.g., OPENAI_API_KEY), run 'orchagent keys add <provider>', or configure in web dashboard`);
317
317
  }
318
318
  // Apply agent default models to each provider config
319
319
  const providersWithModels = allProviders.map((p) => ({
320
320
  ...p,
321
- model: agentData.default_models?.[p.provider] || p.model,
321
+ model: modelOverride || agentData.default_models?.[p.provider] || p.model,
322
322
  }));
323
323
  // Show which provider is being used (primary)
324
324
  const primary = providersWithModels[0];
@@ -342,11 +342,11 @@ async function executePromptLocally(agentData, inputData, skillPrompts = [], con
342
342
  if (!detected) {
343
343
  const providers = providersToCheck.join(', ');
344
344
  throw new errors_1.CliError(`No LLM key found for: ${providers}\n` +
345
- `Set an environment variable (e.g., OPENAI_API_KEY) or configure in web dashboard`);
345
+ `Set an environment variable (e.g., OPENAI_API_KEY), run 'orchagent keys add <provider>', or configure in web dashboard`);
346
346
  }
347
347
  const { provider, key, model: serverModel } = detected;
348
- // Priority: server config model > agent default model > hardcoded default
349
- const model = serverModel || agentData.default_models?.[provider] || (0, llm_1.getDefaultModel)(provider);
348
+ // Priority: CLI override > server config model > agent default model > hardcoded default
349
+ const model = modelOverride || serverModel || agentData.default_models?.[provider] || (0, llm_1.getDefaultModel)(provider);
350
350
  // Show which provider is being used (helpful for debugging rate limits)
351
351
  process.stderr.write(`Running with ${provider} (${model})...\n`);
352
352
  // Call the LLM directly
@@ -772,6 +772,7 @@ function registerRunCommand(program) {
772
772
  .option('--here', 'Scan current directory (passes absolute path to agent)')
773
773
  .option('--path <dir>', 'Shorthand for --input \'{"path": "<dir>"}\'')
774
774
  .option('--provider <name>', 'LLM provider to use (openai, anthropic, gemini, ollama)')
775
+ .option('--model <model>', 'LLM model to use (overrides agent default)')
775
776
  .addHelpText('after', `
776
777
  Examples:
777
778
  orch run orchagent/leak-finder --input '{"path": "."}'
@@ -793,6 +794,19 @@ Note: Use 'run' for local execution, 'call' for server-side execution.
793
794
  else if (options.path) {
794
795
  options.input = JSON.stringify({ path: options.path });
795
796
  }
797
+ if (options.model && options.provider) {
798
+ const modelLower = options.model.toLowerCase();
799
+ const providerPatterns = {
800
+ openai: /^(gpt-|o1-|o3-|davinci|text-)/,
801
+ anthropic: /^claude-/,
802
+ gemini: /^gemini-/,
803
+ ollama: /^(llama|mistral|deepseek|phi|qwen)/,
804
+ };
805
+ const expectedPattern = providerPatterns[options.provider];
806
+ if (expectedPattern && !expectedPattern.test(modelLower)) {
807
+ process.stderr.write(`Warning: Model '${options.model}' may not be a ${options.provider} model.\n\n`);
808
+ }
809
+ }
796
810
  const resolved = await (0, config_1.getResolvedConfig)();
797
811
  const parsed = parseAgentRef(agentRef);
798
812
  const org = parsed.org ?? resolved.defaultOrg;
@@ -941,7 +955,7 @@ Note: Use 'run' for local execution, 'call' for server-side execution.
941
955
  }
942
956
  // Execute locally
943
957
  process.stderr.write(`Executing locally...\n\n`);
944
- const result = await executePromptLocally(agentData, inputData, skillPrompts, resolved, options.provider);
958
+ const result = await executePromptLocally(agentData, inputData, skillPrompts, resolved, options.provider, options.model);
945
959
  if (options.json) {
946
960
  (0, output_1.printJson)(result);
947
961
  }
package/dist/lib/llm.js CHANGED
@@ -68,11 +68,11 @@ exports.PROVIDER_ENV_VARS = {
68
68
  gemini: 'GEMINI_API_KEY',
69
69
  ollama: 'OLLAMA_HOST',
70
70
  };
71
- // Default models for each provider
71
+ // Default models for each provider (best models)
72
72
  exports.DEFAULT_MODELS = {
73
- openai: 'gpt-4o',
74
- anthropic: 'claude-sonnet-4-20250514',
75
- gemini: 'gemini-1.5-pro',
73
+ openai: 'gpt-5.2',
74
+ anthropic: 'claude-opus-4-5-20251101',
75
+ gemini: 'gemini-2.5-pro',
76
76
  ollama: 'llama3.2',
77
77
  };
78
78
  /**
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@orchagent/cli",
3
- "version": "0.3.16",
3
+ "version": "0.3.18",
4
4
  "description": "Command-line interface for the orchagent AI agent marketplace",
5
5
  "license": "MIT",
6
6
  "author": "orchagent <hello@orchagent.io>",