recoder-code 2.4.7 → 2.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,4 @@
1
+ /**
2
+ * Agent creation command - Create custom agents from templates
3
+ */
4
+ export declare function createAgent(): Promise<void>;
@@ -0,0 +1,175 @@
1
+ /**
2
+ * Agent creation command - Create custom agents from templates
3
+ */
4
+ import inquirer from 'inquirer';
5
+ import fs from 'fs';
6
+ import path from 'path';
7
+ import os from 'os';
8
+ import chalk from 'chalk';
9
+ const AGENTS_DIR_PROJECT = '.recoder/agents';
10
+ const AGENTS_DIR_USER = path.join(os.homedir(), '.recoder-code', 'agents');
11
+ const AGENT_TEMPLATES = [
12
+ {
13
+ name: 'explorer',
14
+ description: 'Code discovery & research specialist',
15
+ systemPrompt: `You are an expert code explorer and researcher. Your role is to:
16
+ - Search and analyze codebases to understand structure and patterns
17
+ - Find relevant code examples and implementations
18
+ - Identify dependencies and relationships between components
19
+ - Discover best practices and conventions used in the project
20
+ - Provide comprehensive reports on code organization
21
+
22
+ Always be thorough and cite specific file paths and line numbers.`,
23
+ },
24
+ {
25
+ name: 'coder',
26
+ description: 'Implementation specialist',
27
+ systemPrompt: `You are an expert software engineer focused on implementation. Your role is to:
28
+ - Write clean, production-ready code following best practices
29
+ - Implement features with proper error handling and edge cases
30
+ - Follow the project's existing patterns and conventions
31
+ - Write self-documenting code with clear variable names
32
+ - Consider performance and maintainability
33
+
34
+ Always write complete, working implementations - no placeholders or TODOs.`,
35
+ },
36
+ {
37
+ name: 'reviewer',
38
+ description: 'Code quality & security specialist',
39
+ systemPrompt: `You are an expert code reviewer focused on quality and security. Your role is to:
40
+ - Review code for bugs, security vulnerabilities, and performance issues
41
+ - Check adherence to best practices and design patterns
42
+ - Identify potential edge cases and error scenarios
43
+ - Suggest improvements for readability and maintainability
44
+ - Verify proper error handling and input validation
45
+
46
+ Provide specific, actionable feedback with examples.`,
47
+ },
48
+ {
49
+ name: 'tester',
50
+ description: 'Test creation specialist',
51
+ systemPrompt: `You are an expert test engineer. Your role is to:
52
+ - Write comprehensive unit, integration, and e2e tests
53
+ - Cover edge cases and error scenarios
54
+ - Follow testing best practices (AAA pattern, clear assertions)
55
+ - Create meaningful test descriptions
56
+ - Ensure high code coverage
57
+
58
+ Write tests that are maintainable and serve as documentation.`,
59
+ },
60
+ {
61
+ name: 'documenter',
62
+ description: 'Documentation specialist',
63
+ systemPrompt: `You are an expert technical writer. Your role is to:
64
+ - Create clear, comprehensive documentation
65
+ - Write API documentation with examples
66
+ - Document architecture and design decisions
67
+ - Create user guides and tutorials
68
+ - Maintain README files and changelogs
69
+
70
+ Write documentation that is accessible to both beginners and experts.`,
71
+ },
72
+ {
73
+ name: 'custom',
74
+ description: 'Start from scratch',
75
+ systemPrompt: `You are a helpful AI assistant. Customize this prompt for your specific needs.`,
76
+ },
77
+ ];
78
+ export async function createAgent() {
79
+ console.log(chalk.bold.cyan('\nšŸ¤– Create Custom Agent\n'));
80
+ const answers = await inquirer.prompt([
81
+ {
82
+ type: 'list',
83
+ name: 'template',
84
+ message: 'Choose a template:',
85
+ choices: AGENT_TEMPLATES.map(t => ({
86
+ name: `${t.name} - ${t.description}`,
87
+ value: t.name,
88
+ })),
89
+ },
90
+ {
91
+ type: 'input',
92
+ name: 'name',
93
+ message: 'Agent name:',
94
+ validate: (input) => {
95
+ if (!input)
96
+ return 'Name is required';
97
+ if (!/^[a-z0-9-]+$/.test(input))
98
+ return 'Use lowercase letters, numbers, and hyphens only';
99
+ return true;
100
+ },
101
+ },
102
+ {
103
+ type: 'input',
104
+ name: 'description',
105
+ message: 'Description:',
106
+ default: (answers) => {
107
+ const template = AGENT_TEMPLATES.find(t => t.name === answers.template);
108
+ return template?.description || '';
109
+ },
110
+ },
111
+ {
112
+ type: 'list',
113
+ name: 'location',
114
+ message: 'Save location:',
115
+ choices: [
116
+ { name: 'Project (.recoder/agents/) - Available in this project only', value: 'project' },
117
+ { name: 'User (~/.recoder-code/agents/) - Available globally', value: 'user' },
118
+ ],
119
+ default: 'project',
120
+ },
121
+ {
122
+ type: 'confirm',
123
+ name: 'customize',
124
+ message: 'Customize system prompt now?',
125
+ default: false,
126
+ },
127
+ ]);
128
+ const template = AGENT_TEMPLATES.find(t => t.name === answers.template);
129
+ let systemPrompt = template?.systemPrompt || '';
130
+ if (answers.customize) {
131
+ const { prompt } = await inquirer.prompt([
132
+ {
133
+ type: 'editor',
134
+ name: 'prompt',
135
+ message: 'Edit system prompt:',
136
+ default: systemPrompt,
137
+ },
138
+ ]);
139
+ systemPrompt = prompt;
140
+ }
141
+ const dir = answers.location === 'project' ? AGENTS_DIR_PROJECT : AGENTS_DIR_USER;
142
+ if (!fs.existsSync(dir)) {
143
+ fs.mkdirSync(dir, { recursive: true });
144
+ }
145
+ const filePath = path.join(dir, `${answers.name}.md`);
146
+ if (fs.existsSync(filePath)) {
147
+ const { overwrite } = await inquirer.prompt([
148
+ {
149
+ type: 'confirm',
150
+ name: 'overwrite',
151
+ message: `Agent "${answers.name}" already exists. Overwrite?`,
152
+ default: false,
153
+ },
154
+ ]);
155
+ if (!overwrite) {
156
+ console.log(chalk.yellow('\nāŒ Cancelled'));
157
+ return;
158
+ }
159
+ }
160
+ const content = `---
161
+ name: ${answers.name}
162
+ description: ${answers.description}
163
+ template: ${answers.template}
164
+ ---
165
+
166
+ ${systemPrompt}
167
+ `;
168
+ fs.writeFileSync(filePath, content, 'utf-8');
169
+ console.log(chalk.green(`\nāœ… Agent "${answers.name}" created successfully!`));
170
+ console.log(chalk.gray(` Location: ${filePath}`));
171
+ console.log(chalk.cyan('\nšŸ’” Usage:'));
172
+ console.log(chalk.gray(` In chat: "Let the ${answers.name} agent help with this"`));
173
+ console.log(chalk.gray(` Edit: ${filePath}`));
174
+ console.log();
175
+ }
@@ -3,6 +3,7 @@
3
3
  */
4
4
  import chalk from 'chalk';
5
5
  import { listAgents, createAgentFromTemplate } from './agents/list.js';
6
+ import { createAgent } from './agents/create.js';
6
7
  const listCommand = {
7
8
  command: 'list',
8
9
  describe: 'List all available agents',
@@ -11,6 +12,14 @@ const listCommand = {
11
12
  process.exit(0);
12
13
  },
13
14
  };
15
+ const createInteractiveCommand = {
16
+ command: 'new',
17
+ describe: 'Create a custom agent interactively',
18
+ handler: async () => {
19
+ await createAgent();
20
+ process.exit(0);
21
+ },
22
+ };
14
23
  const createCommand = {
15
24
  command: 'create <name>',
16
25
  describe: 'Create a new agent from template',
@@ -47,7 +56,12 @@ const createCommand = {
47
56
  export const agentsCommand = {
48
57
  command: 'agents',
49
58
  describe: 'Manage AI agents (built-in and custom)',
50
- builder: (yargs) => yargs.command(listCommand).command(createCommand).demandCommand(0).version(false),
59
+ builder: (yargs) => yargs
60
+ .command(listCommand)
61
+ .command(createInteractiveCommand)
62
+ .command(createCommand)
63
+ .demandCommand(0)
64
+ .version(false),
51
65
  handler: async () => {
52
66
  await listAgents();
53
67
  process.exit(0);
@@ -0,0 +1,4 @@
1
+ /**
2
+ * Multi-model comparison mode
3
+ */
4
+ export declare function compareModels(): Promise<void>;
@@ -0,0 +1,92 @@
1
+ /**
2
+ * Multi-model comparison mode
3
+ */
4
+ import inquirer from 'inquirer';
5
+ import chalk from 'chalk';
6
+ import { getProviderRegistry } from '../../providers/registry.js';
7
+ export async function compareModels() {
8
+ console.log(chalk.bold.cyan('\nāš–ļø Multi-Model Comparison\n'));
9
+ const registry = getProviderRegistry();
10
+ const providers = registry.getAllProviders();
11
+ // Get available models
12
+ const modelChoices = [];
13
+ for (const provider of providers) {
14
+ try {
15
+ const models = await registry.getModels(provider.id);
16
+ models.forEach(m => {
17
+ modelChoices.push(`${provider.id}/${m.id}`);
18
+ });
19
+ }
20
+ catch {
21
+ // Skip
22
+ }
23
+ }
24
+ if (modelChoices.length < 2) {
25
+ console.log(chalk.red('āŒ Need at least 2 models to compare'));
26
+ return;
27
+ }
28
+ const answers = await inquirer.prompt([
29
+ {
30
+ type: 'checkbox',
31
+ name: 'models',
32
+ message: 'Select models to compare (2-4):',
33
+ choices: modelChoices.slice(0, 20),
34
+ validate: (input) => {
35
+ if (input.length < 2)
36
+ return 'Select at least 2 models';
37
+ if (input.length > 4)
38
+ return 'Select at most 4 models';
39
+ return true;
40
+ },
41
+ },
42
+ {
43
+ type: 'input',
44
+ name: 'prompt',
45
+ message: 'Enter prompt to test:',
46
+ validate: (input) => input.length > 0 || 'Prompt required',
47
+ },
48
+ ]);
49
+ console.log(chalk.cyan('\nšŸ”„ Running comparison...\n'));
50
+ const results = [];
51
+ for (const model of answers.models) {
52
+ const start = Date.now();
53
+ try {
54
+ // Simulate API call (replace with actual implementation)
55
+ const response = `Response from ${model}`;
56
+ const responseTime = Date.now() - start;
57
+ results.push({
58
+ model,
59
+ response,
60
+ responseTime,
61
+ });
62
+ }
63
+ catch (err) {
64
+ results.push({
65
+ model,
66
+ response: '',
67
+ responseTime: Date.now() - start,
68
+ error: err.message,
69
+ });
70
+ }
71
+ }
72
+ // Display results
73
+ console.log(chalk.bold('Comparison Results:'));
74
+ console.log(chalk.gray('═'.repeat(60)));
75
+ results.forEach((result, i) => {
76
+ console.log(chalk.bold.cyan(`\n${i + 1}. ${result.model}`));
77
+ console.log(chalk.gray(` Response time: ${result.responseTime}ms`));
78
+ if (result.error) {
79
+ console.log(chalk.red(` Error: ${result.error}`));
80
+ }
81
+ else {
82
+ console.log(chalk.white(` ${result.response.substring(0, 200)}...`));
83
+ }
84
+ });
85
+ console.log(chalk.gray('\n═'.repeat(60)));
86
+ // Show fastest
87
+ const fastest = results.filter(r => !r.error).sort((a, b) => a.responseTime - b.responseTime)[0];
88
+ if (fastest) {
89
+ console.log(chalk.green(`\n⚔ Fastest: ${fastest.model} (${fastest.responseTime}ms)`));
90
+ }
91
+ console.log();
92
+ }
@@ -0,0 +1,4 @@
1
+ /**
2
+ * Interactive model selection with fuzzy search
3
+ */
4
+ export declare function selectModel(): Promise<void>;
@@ -0,0 +1,62 @@
1
+ /**
2
+ * Interactive model selection with fuzzy search
3
+ */
4
+ import inquirer from 'inquirer';
5
+ import { getProviderRegistry } from '../../providers/registry.js';
6
+ import chalk from 'chalk';
7
+ export async function selectModel() {
8
+ console.log(chalk.bold.cyan('\nšŸ¤– Select AI Model\n'));
9
+ const registry = getProviderRegistry();
10
+ const providers = registry.getAllProviders();
11
+ // Gather all models from all providers
12
+ const allModels = [];
13
+ for (const provider of providers) {
14
+ try {
15
+ const models = await registry.getModels(provider.id);
16
+ models.forEach(model => {
17
+ const contextInfo = model.contextLength ? ` (${Math.round(model.contextLength / 1000)}k ctx)` : '';
18
+ const freeTag = model.isFree ? chalk.green(' [FREE]') : '';
19
+ const providerTag = chalk.gray(` - ${provider.name}`);
20
+ allModels.push({
21
+ name: `${model.id}${contextInfo}${freeTag}${providerTag}`,
22
+ value: `${provider.id}/${model.id}`,
23
+ provider: provider.id,
24
+ contextLength: model.contextLength,
25
+ isFree: model.isFree,
26
+ });
27
+ });
28
+ }
29
+ catch (err) {
30
+ // Skip providers that fail to load models
31
+ }
32
+ }
33
+ if (allModels.length === 0) {
34
+ console.log(chalk.red('āŒ No models available'));
35
+ console.log(chalk.gray(' Run: recoder providers detect'));
36
+ return;
37
+ }
38
+ // Sort: free models first, then by context length
39
+ allModels.sort((a, b) => {
40
+ if (a.isFree && !b.isFree)
41
+ return -1;
42
+ if (!a.isFree && b.isFree)
43
+ return 1;
44
+ return (b.contextLength || 0) - (a.contextLength || 0);
45
+ });
46
+ const answer = await inquirer.prompt([
47
+ {
48
+ type: 'list',
49
+ name: 'model',
50
+ message: 'Select a model:',
51
+ choices: allModels.slice(0, 30),
52
+ pageSize: 15,
53
+ },
54
+ ]);
55
+ const selected = allModels.find(m => m.value === answer.model);
56
+ if (selected) {
57
+ console.log(chalk.green(`\nāœ… Selected: ${selected.value}`));
58
+ console.log(chalk.cyan('\nšŸ’” Usage:'));
59
+ console.log(chalk.gray(` recoder --model ${selected.value}`));
60
+ console.log(chalk.gray(` Or set as default: recoder models set-default ${selected.value}`));
61
+ }
62
+ }
@@ -6,6 +6,8 @@ import * as fs from 'fs';
6
6
  import * as path from 'path';
7
7
  import * as os from 'os';
8
8
  import { getOllamaProvider, getOpenRouterProvider, getAnthropicProvider, getOpenAIProvider, getGroqProvider, parseModelId, } from '../providers/index.js';
9
+ import { selectModel } from './models/select.js';
10
+ import { compareModels } from './models/compare.js';
9
11
  const CONFIG_DIR = path.join(os.homedir(), '.recoder-code');
10
12
  const CUSTOM_MODELS_FILE = path.join(CONFIG_DIR, 'custom-models.json');
11
13
  const DEFAULT_MODEL_FILE = path.join(CONFIG_DIR, 'default-model.json');
@@ -162,11 +164,29 @@ const setDefaultCommand = {
162
164
  process.exit(0);
163
165
  },
164
166
  };
167
+ const selectCommand = {
168
+ command: 'select',
169
+ describe: 'Interactive model selection with fuzzy search',
170
+ handler: async () => {
171
+ await selectModel();
172
+ process.exit(0);
173
+ },
174
+ };
175
+ const compareCommand = {
176
+ command: 'compare',
177
+ describe: 'Compare multiple models side-by-side',
178
+ handler: async () => {
179
+ await compareModels();
180
+ process.exit(0);
181
+ },
182
+ };
165
183
  export const modelsCommand = {
166
184
  command: 'models',
167
185
  describe: 'Manage AI models',
168
186
  builder: (yargs) => yargs
169
187
  .command(listCommand)
188
+ .command(selectCommand)
189
+ .command(compareCommand)
170
190
  .command(addCommand)
171
191
  .command(removeCommand)
172
192
  .command(setDefaultCommand)
@@ -0,0 +1,4 @@
1
+ /**
2
+ * Provider health monitoring
3
+ */
4
+ export declare function monitorProviders(): Promise<void>;
@@ -0,0 +1,90 @@
1
+ /**
2
+ * Provider health monitoring
3
+ */
4
+ import chalk from 'chalk';
5
+ import { getProviderRegistry } from '../../providers/registry.js';
6
+ async function checkProviderHealth(provider) {
7
+ const start = Date.now();
8
+ try {
9
+ const controller = new AbortController();
10
+ const timeout = setTimeout(() => controller.abort(), 5000);
11
+ let endpoint = provider.baseUrl;
12
+ if (provider.engine === 'openai') {
13
+ endpoint = `${provider.baseUrl}/models`;
14
+ }
15
+ else if (provider.engine === 'ollama') {
16
+ endpoint = `${provider.baseUrl}/api/tags`;
17
+ }
18
+ const response = await fetch(endpoint, {
19
+ method: 'GET',
20
+ headers: {
21
+ 'Content-Type': 'application/json',
22
+ ...provider.headers,
23
+ },
24
+ signal: controller.signal,
25
+ });
26
+ clearTimeout(timeout);
27
+ const responseTime = Date.now() - start;
28
+ if (response.ok) {
29
+ return {
30
+ provider: provider.id,
31
+ name: provider.name,
32
+ status: 'online',
33
+ responseTime,
34
+ };
35
+ }
36
+ else {
37
+ return {
38
+ provider: provider.id,
39
+ name: provider.name,
40
+ status: 'error',
41
+ error: `HTTP ${response.status}`,
42
+ };
43
+ }
44
+ }
45
+ catch (err) {
46
+ return {
47
+ provider: provider.id,
48
+ name: provider.name,
49
+ status: 'offline',
50
+ error: err.message || 'Connection failed',
51
+ };
52
+ }
53
+ }
54
+ export async function monitorProviders() {
55
+ console.log(chalk.bold.cyan('\nšŸ„ Provider Health Monitor\n'));
56
+ console.log(chalk.gray('Checking all providers...\n'));
57
+ const registry = getProviderRegistry();
58
+ const providers = registry.getAllProviders();
59
+ const results = [];
60
+ for (const provider of providers) {
61
+ const status = await checkProviderHealth(provider);
62
+ results.push(status);
63
+ }
64
+ // Display results
65
+ console.log(chalk.bold('Status Report:'));
66
+ console.log(chalk.gray('─'.repeat(60)));
67
+ const online = results.filter(r => r.status === 'online');
68
+ const offline = results.filter(r => r.status === 'offline');
69
+ const errors = results.filter(r => r.status === 'error');
70
+ online.forEach(r => {
71
+ console.log(chalk.green('āœ“') + ' ' +
72
+ chalk.bold(r.name) + ' ' +
73
+ chalk.gray(`(${r.responseTime}ms)`));
74
+ });
75
+ errors.forEach(r => {
76
+ console.log(chalk.yellow('⚠') + ' ' +
77
+ chalk.bold(r.name) + ' ' +
78
+ chalk.gray(`- ${r.error}`));
79
+ });
80
+ offline.forEach(r => {
81
+ console.log(chalk.red('āœ—') + ' ' +
82
+ chalk.bold(r.name) + ' ' +
83
+ chalk.gray(`- ${r.error}`));
84
+ });
85
+ console.log(chalk.gray('─'.repeat(60)));
86
+ console.log(chalk.green(`${online.length} online`) + ' | ' +
87
+ chalk.yellow(`${errors.length} errors`) + ' | ' +
88
+ chalk.red(`${offline.length} offline`));
89
+ console.log();
90
+ }
@@ -6,6 +6,8 @@ import { listProviderModels } from './providers/models.js';
6
6
  import { pullModel } from './providers/pull.js';
7
7
  import { configureProvider } from './providers/config.js';
8
8
  import { RecoderAuthService } from '../services/RecoderAuthService.js';
9
+ import { detectAndReport } from '../providers/local-detection.js';
10
+ import { monitorProviders } from './providers/health.js';
9
11
  const listCommand = {
10
12
  command: 'list',
11
13
  describe: 'List all available providers',
@@ -20,6 +22,22 @@ const listCommand = {
20
22
  process.exit(0);
21
23
  },
22
24
  };
25
+ const detectCommand = {
26
+ command: 'detect',
27
+ describe: 'Detect running local AI servers',
28
+ handler: async () => {
29
+ await detectAndReport();
30
+ process.exit(0);
31
+ },
32
+ };
33
+ const healthCommand = {
34
+ command: 'health',
35
+ describe: 'Monitor provider health status',
36
+ handler: async () => {
37
+ await monitorProviders();
38
+ process.exit(0);
39
+ },
40
+ };
23
41
  const modelsCommand = {
24
42
  command: 'models [provider]',
25
43
  describe: 'List models from all providers',
@@ -80,6 +98,8 @@ export const providersCommand = {
80
98
  describe: 'Manage AI providers (Ollama, OpenRouter, etc.)',
81
99
  builder: (yargs) => yargs
82
100
  .command(listCommand)
101
+ .command(detectCommand)
102
+ .command(healthCommand)
83
103
  .command(modelsCommand)
84
104
  .command(pullCommand)
85
105
  .command(configCommand)
@@ -0,0 +1,6 @@
1
+ /**
2
+ * Local Provider Detection - Auto-detect running local AI servers
3
+ */
4
+ import type { AIProvider } from './types.js';
5
+ export declare function detectLocalProviders(): Promise<AIProvider[]>;
6
+ export declare function detectAndReport(): Promise<void>;
@@ -0,0 +1,86 @@
1
+ /**
2
+ * Local Provider Detection - Auto-detect running local AI servers
3
+ */
4
+ const LOCAL_PROVIDERS = [
5
+ {
6
+ id: 'lmstudio',
7
+ name: 'LM Studio',
8
+ port: 1234,
9
+ baseUrl: 'http://localhost:1234/v1',
10
+ engine: 'openai',
11
+ },
12
+ {
13
+ id: 'ollama',
14
+ name: 'Ollama',
15
+ port: 11434,
16
+ baseUrl: 'http://localhost:11434',
17
+ engine: 'ollama',
18
+ },
19
+ {
20
+ id: 'llamacpp',
21
+ name: 'llama.cpp',
22
+ port: 8080,
23
+ baseUrl: 'http://localhost:8080/v1',
24
+ engine: 'openai',
25
+ },
26
+ {
27
+ id: 'vllm',
28
+ name: 'vLLM',
29
+ port: 8000,
30
+ baseUrl: 'http://localhost:8000/v1',
31
+ engine: 'openai',
32
+ },
33
+ ];
34
+ async function checkServer(url) {
35
+ try {
36
+ const controller = new AbortController();
37
+ const timeout = setTimeout(() => controller.abort(), 1000);
38
+ const response = await fetch(url, {
39
+ method: 'GET',
40
+ signal: controller.signal,
41
+ });
42
+ clearTimeout(timeout);
43
+ return response.ok;
44
+ }
45
+ catch {
46
+ return false;
47
+ }
48
+ }
49
+ export async function detectLocalProviders() {
50
+ const detected = [];
51
+ for (const provider of LOCAL_PROVIDERS) {
52
+ const isRunning = await checkServer(provider.baseUrl);
53
+ if (isRunning) {
54
+ detected.push({
55
+ id: provider.id,
56
+ name: provider.name,
57
+ engine: provider.engine,
58
+ baseUrl: provider.baseUrl,
59
+ isLocal: true,
60
+ isEnabled: true,
61
+ isBuiltin: false,
62
+ supportsStreaming: true,
63
+ });
64
+ }
65
+ }
66
+ return detected;
67
+ }
68
+ export async function detectAndReport() {
69
+ console.log('šŸ” Detecting local AI servers...\n');
70
+ const detected = await detectLocalProviders();
71
+ if (detected.length === 0) {
72
+ console.log('āŒ No local AI servers detected');
73
+ console.log('\nšŸ’” Start one of these:');
74
+ LOCAL_PROVIDERS.forEach(p => {
75
+ console.log(` • ${p.name} (port ${p.port})`);
76
+ });
77
+ }
78
+ else {
79
+ console.log('āœ… Detected local servers:\n');
80
+ detected.forEach(p => {
81
+ console.log(` • ${p.name} - ${p.baseUrl}`);
82
+ });
83
+ console.log('\nšŸ’” Use with: recoder --provider ' + detected[0].id);
84
+ }
85
+ console.log();
86
+ }
@@ -11,8 +11,8 @@ const PROVIDERS_FILE = path.join(CONFIG_DIR, 'providers.json');
11
11
  const CUSTOM_PROVIDERS_DIR = path.join(CONFIG_DIR, 'custom_providers');
12
12
  const DEFAULT_CONFIG = {
13
13
  version: '1.0',
14
- defaultProvider: 'anthropic',
15
- defaultModel: 'claude-sonnet-4-20250514',
14
+ defaultProvider: 'openrouter',
15
+ defaultModel: 'anthropic/claude-sonnet-4-20250514',
16
16
  customProviders: [],
17
17
  lastUpdated: new Date().toISOString(),
18
18
  };