@orchagent/cli 0.3.16 → 0.3.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -210,7 +210,7 @@ argument or --file option instead.
210
210
  // Warn if no key found for prompt-based agent
211
211
  const providerList = supportedProviders.join(', ');
212
212
  process.stderr.write(`Warning: No LLM key found for providers: ${providerList}\n` +
213
- `Set an env var (e.g., OPENAI_API_KEY), use --key, or configure in web dashboard\n\n`);
213
+ `Set an env var (e.g., OPENAI_API_KEY), run 'orchagent keys add <provider>', use --key, or configure in web dashboard\n\n`);
214
214
  }
215
215
  // Add skill headers
216
216
  if (options.skills) {
@@ -313,7 +313,7 @@ async function executePromptLocally(agentData, inputData, skillPrompts = [], con
313
313
  if (allProviders.length === 0) {
314
314
  const providers = providersToCheck.join(', ');
315
315
  throw new errors_1.CliError(`No LLM key found for: ${providers}\n` +
316
- `Set an environment variable (e.g., OPENAI_API_KEY) or configure in web dashboard`);
316
+ `Set an environment variable (e.g., OPENAI_API_KEY), run 'orchagent keys add <provider>', or configure in web dashboard`);
317
317
  }
318
318
  // Apply agent default models to each provider config
319
319
  const providersWithModels = allProviders.map((p) => ({
@@ -342,7 +342,7 @@ async function executePromptLocally(agentData, inputData, skillPrompts = [], con
342
342
  if (!detected) {
343
343
  const providers = providersToCheck.join(', ');
344
344
  throw new errors_1.CliError(`No LLM key found for: ${providers}\n` +
345
- `Set an environment variable (e.g., OPENAI_API_KEY) or configure in web dashboard`);
345
+ `Set an environment variable (e.g., OPENAI_API_KEY), run 'orchagent keys add <provider>', or configure in web dashboard`);
346
346
  }
347
347
  const { provider, key, model: serverModel } = detected;
348
348
  // Priority: server config model > agent default model > hardcoded default
package/dist/lib/llm.js CHANGED
@@ -68,11 +68,11 @@ exports.PROVIDER_ENV_VARS = {
68
68
  gemini: 'GEMINI_API_KEY',
69
69
  ollama: 'OLLAMA_HOST',
70
70
  };
71
- // Default models for each provider
71
+ // Default models for each provider (best models)
72
72
  exports.DEFAULT_MODELS = {
73
- openai: 'gpt-4o',
74
- anthropic: 'claude-sonnet-4-20250514',
75
- gemini: 'gemini-1.5-pro',
73
+ openai: 'gpt-5.2',
74
+ anthropic: 'claude-opus-4-5-20251101',
75
+ gemini: 'gemini-2.5-pro',
76
76
  ollama: 'llama3.2',
77
77
  };
78
78
  /**
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@orchagent/cli",
3
- "version": "0.3.16",
3
+ "version": "0.3.17",
4
4
  "description": "Command-line interface for the orchagent AI agent marketplace",
5
5
  "license": "MIT",
6
6
  "author": "orchagent <hello@orchagent.io>",