cognautic-cli 1.0.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. cognautic_cli-1.0.0/LICENSE +21 -0
  2. cognautic_cli-1.0.0/PKG-INFO +560 -0
  3. cognautic_cli-1.0.0/README.md +498 -0
  4. cognautic_cli-1.0.0/cognautic/__init__.py +7 -0
  5. cognautic_cli-1.0.0/cognautic/ai_engine.py +1908 -0
  6. cognautic_cli-1.0.0/cognautic/cli.py +825 -0
  7. cognautic_cli-1.0.0/cognautic/config.py +246 -0
  8. cognautic_cli-1.0.0/cognautic/file_tagger.py +194 -0
  9. cognautic_cli-1.0.0/cognautic/memory.py +419 -0
  10. cognautic_cli-1.0.0/cognautic/provider_endpoints.py +424 -0
  11. cognautic_cli-1.0.0/cognautic/tools/__init__.py +19 -0
  12. cognautic_cli-1.0.0/cognautic/tools/base.py +59 -0
  13. cognautic_cli-1.0.0/cognautic/tools/code_analysis.py +391 -0
  14. cognautic_cli-1.0.0/cognautic/tools/command_runner.py +292 -0
  15. cognautic_cli-1.0.0/cognautic/tools/file_operations.py +389 -0
  16. cognautic_cli-1.0.0/cognautic/tools/registry.py +115 -0
  17. cognautic_cli-1.0.0/cognautic/tools/response_control.py +48 -0
  18. cognautic_cli-1.0.0/cognautic/tools/web_search.py +336 -0
  19. cognautic_cli-1.0.0/cognautic/utils.py +297 -0
  20. cognautic_cli-1.0.0/cognautic/websocket_server.py +485 -0
  21. cognautic_cli-1.0.0/cognautic_cli.egg-info/PKG-INFO +560 -0
  22. cognautic_cli-1.0.0/cognautic_cli.egg-info/SOURCES.txt +27 -0
  23. cognautic_cli-1.0.0/cognautic_cli.egg-info/dependency_links.txt +1 -0
  24. cognautic_cli-1.0.0/cognautic_cli.egg-info/entry_points.txt +2 -0
  25. cognautic_cli-1.0.0/cognautic_cli.egg-info/requires.txt +30 -0
  26. cognautic_cli-1.0.0/cognautic_cli.egg-info/top_level.txt +1 -0
  27. cognautic_cli-1.0.0/pyproject.toml +177 -0
  28. cognautic_cli-1.0.0/setup.cfg +4 -0
  29. cognautic_cli-1.0.0/setup.py +71 -0
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Cognautic
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,560 @@
1
+ Metadata-Version: 2.4
2
+ Name: cognautic-cli
3
+ Version: 1.0.0
4
+ Summary: A Python-based CLI AI coding agent that provides agentic development capabilities with multi-provider AI support and real-time interaction
5
+ Home-page: https://github.com/cognautic/cli
6
+ Author: Cognautic
7
+ Author-email: Cognautic <cognautic@gmail.com>
8
+ Maintainer-email: Cognautic <cognautic@gmail.com>
9
+ License: Proprietary - All Rights Reserved
10
+ Project-URL: Homepage, https://github.com/cognautic/cli
11
+ Project-URL: Documentation, https://cognautic.vercel.app/cognautic-cli.html
12
+ Project-URL: Repository, https://github.com/cognautic/cli.git
13
+ Project-URL: Issues, https://github.com/cognautic/cli/issues
14
+ Project-URL: Changelog, https://github.com/cognautic/cognautic-cli/blob/main/CHANGELOG.md
15
+ Keywords: ai,cli,coding,assistant,development,automation
16
+ Classifier: Development Status :: 4 - Beta
17
+ Classifier: Intended Audience :: Developers
18
+ Classifier: Operating System :: OS Independent
19
+ Classifier: Programming Language :: Python :: 3
20
+ Classifier: Programming Language :: Python :: 3.8
21
+ Classifier: Programming Language :: Python :: 3.9
22
+ Classifier: Programming Language :: Python :: 3.10
23
+ Classifier: Programming Language :: Python :: 3.11
24
+ Classifier: Programming Language :: Python :: 3.12
25
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
26
+ Classifier: Topic :: Software Development :: Code Generators
27
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
28
+ Requires-Python: >=3.8
29
+ Description-Content-Type: text/markdown
30
+ License-File: LICENSE
31
+ Requires-Dist: click>=8.0.0
32
+ Requires-Dist: websockets>=10.0
33
+ Requires-Dist: aiohttp>=3.8.0
34
+ Requires-Dist: pydantic>=2.0.0
35
+ Requires-Dist: rich>=13.0.0
36
+ Requires-Dist: requests>=2.28.0
37
+ Requires-Dist: beautifulsoup4>=4.11.0
38
+ Requires-Dist: psutil>=5.9.0
39
+ Requires-Dist: cryptography>=3.4.0
40
+ Requires-Dist: keyring>=23.0.0
41
+ Requires-Dist: openai>=1.0.0
42
+ Requires-Dist: anthropic>=0.7.0
43
+ Requires-Dist: google-generativeai>=0.3.0
44
+ Requires-Dist: together>=0.2.0
45
+ Requires-Dist: nest-asyncio>=1.5.0
46
+ Provides-Extra: tools
47
+ Requires-Dist: gitpython>=3.1.0; extra == "tools"
48
+ Requires-Dist: pyyaml>=6.0; extra == "tools"
49
+ Provides-Extra: dev
50
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
51
+ Requires-Dist: pytest-asyncio>=0.21.0; extra == "dev"
52
+ Requires-Dist: black>=22.0.0; extra == "dev"
53
+ Requires-Dist: flake8>=5.0.0; extra == "dev"
54
+ Requires-Dist: mypy>=1.0.0; extra == "dev"
55
+ Requires-Dist: coverage>=6.0.0; extra == "dev"
56
+ Provides-Extra: all
57
+ Requires-Dist: cognautic-cli[dev,tools]; extra == "all"
58
+ Dynamic: author
59
+ Dynamic: home-page
60
+ Dynamic: license-file
61
+ Dynamic: requires-python
62
+
63
+ # Cognautic CLI
64
+
65
+ **A Python-based CLI AI coding agent that provides agentic development capabilities with multi-provider AI support and real-time interaction.**
66
+
67
+ ⚠️ **Under Development** - Some features may be unavailable
68
+
69
+ ---
70
+
71
+ ## Overview
72
+
73
+ Cognautic CLI is a Python-based command-line interface that brings AI-powered development capabilities directly to your terminal. It provides agentic tools for file operations, command execution, web search, and code analysis with support for multiple AI providers. The tool is accessed through a single `cognautic` command with various subcommands.
74
+
75
+ > **⚠️ Development Notice:** Cognautic CLI is currently under development. Some features may be unavailable or subject to change.
76
+
77
+ ### Project Information
78
+
79
+ | Property | Value |
80
+ |----------|-------|
81
+ | **Developer** | Cognautic |
82
+ | **Written in** | Python |
83
+ | **Operating system** | Cross-platform |
84
+ | **Type** | AI Development Tool |
85
+ | **Status** | Under Development |
86
+ | **Repository** | [github.com/cognautic/cli](https://github.com/cognautic/cli) |
87
+
88
+ ---
89
+
90
+ ## Features
91
+
92
+ - **Multi-Provider AI Support**: Integrate with OpenAI, Anthropic, Google, Together AI, OpenRouter, and 15+ other AI providers
93
+ - **Local Model Support**: Run free open-source Hugging Face models locally without API keys (NEW! 🎉)
94
+ - **Agentic Tools**: File operations, command execution, web search, and code analysis
95
+ - **Real-time Communication**: WebSocket server for live AI responses and tool execution
96
+ - **Secure Configuration**: Encrypted API key storage and permission management
97
+ - **Interactive CLI**: Rich terminal interface with progress indicators, colored output, and command history
98
+
99
+ ---
100
+
101
+ ## Installation
102
+
103
+ ### Prerequisites
104
+
105
+ Ensure you have Python 3.8 or higher installed:
106
+
107
+ ```bash
108
+ python --version
109
+ ```
110
+
111
+ ### Download the Wheel File
112
+
113
+ Download the latest `.whl` file from the official repository:
114
+
115
+ ```bash
116
+ # Visit https://github.com/cognautic/cli/releases
117
+ # Download the latest cognautic_cli-z.z.z-py3-none-any.whl file
118
+ ```
119
+
120
+ ### Installation with pip
121
+
122
+ Install the downloaded wheel file using pip:
123
+
124
+ ```bash
125
+ # Navigate to your downloads folder
126
+ cd ~/Downloads
127
+
128
+ # Install the wheel file
129
+ pip install cognautic_cli-z.z.z-py3-none-any.whl
130
+ ```
131
+
132
+ ### Installation with pipx (Recommended)
133
+
134
+ For isolated installation, use pipx:
135
+
136
+ ```bash
137
+ # Install pipx if you don't have it
138
+ pip install pipx
139
+ pipx ensurepath
140
+
141
+ # Install Cognautic CLI with pipx
142
+ pipx install cognautic_cli-z.z.z-py3-none-any.whl
143
+ ```
144
+
145
+ ### Verify Installation
146
+
147
+ Check that Cognautic CLI is installed correctly:
148
+
149
+ ```bash
150
+ cognautic --version
151
+ ```
152
+
153
+ ### Updating Cognautic CLI
154
+
155
+ To update to a newer version, download the new wheel file and:
156
+
157
+ ```bash
158
+ # With pip (force reinstall)
159
+ pip install cognautic_cli-y.y.y-py3-none-any.whl --force-reinstall
160
+
161
+ # With pipx
162
+ pipx upgrade cognautic-cli
163
+ # Or force reinstall with pipx
164
+ pipx install cognautic_cli-y.y.y-py3-none-any.whl --force
165
+ ```
166
+
167
+ _**Note:** Replace `y.y.y` and `z.z.z` with actual version numbers (e.g., 1.2.0, 2.1.5)._
168
+
169
+ ### Uninstallation
170
+
171
+ To remove Cognautic CLI:
172
+
173
+ ```bash
174
+ # With pip
175
+ pip uninstall cognautic-cli
176
+
177
+ # With pipx
178
+ pipx uninstall cognautic-cli
179
+ ```
180
+
181
+ ---
182
+
183
+ ## Quick Start
184
+
185
+ ### Step 1: Install Cognautic CLI
186
+
187
+ ```bash
188
+ pip install cognautic_cli-x.x.x-py3-none-any.whl
189
+ ```
190
+
191
+ ### Step 2: Run Setup
192
+
193
+ ```bash
194
+ cognautic setup --interactive
195
+ ```
196
+
197
+ This will guide you through:
198
+ - Configuring API keys for your preferred AI providers
199
+ - Setting default provider and model
200
+ - Basic preferences
201
+
202
+ ### Step 3: Start Chatting
203
+
204
+ ```bash
205
+ cognautic chat
206
+ ```
207
+
208
+ Now you can chat with AI and use slash commands like:
209
+ - `/help` - Show available commands
210
+ - `/provider openai` - Switch AI provider
211
+ - `/model gpt-4` - Change model
212
+ - `/workspace ~/myproject` - Set working directory
213
+ - `/lmodel microsoft/phi-2` - Load local model
214
+
215
+ **That's it!** Start chatting and let the AI help you code.
216
+
217
+ ---
218
+
219
+ ## Available Slash Commands
220
+
221
+ Once you're in chat mode (`cognautic chat`), use these commands:
222
+
223
+ ### Workspace & Configuration
224
+
225
+ ```bash
226
+ /workspace <path> # Change working directory (alias: /ws)
227
+ /setup # Run interactive setup wizard
228
+ /config list # Show current configuration
229
+ /config set <key> <value> # Set configuration value
230
+ /help # Show all available commands
231
+ ```
232
+
233
+ ### AI Provider & Model Management
234
+
235
+ ```bash
236
+ /provider [name] # Switch AI provider (openai, anthropic, google, etc.)
237
+ /model [model_id] # Switch AI model
238
+ /model list # Fetch available models from provider's API
239
+ /lmodel <path> # Load local Hugging Face model
240
+ /lmodel unload # Unload current local model
241
+ ```
242
+
243
+ ### Session Management
244
+
245
+ ```bash
246
+ /session # Show current session info
247
+ /session list # List all sessions
248
+ /session new # Create new session
249
+ /session load <id> # Load existing session
250
+ /session delete <id> # Delete session
251
+ /session title <text> # Update session title
252
+ ```
253
+
254
+ ### Display & Interface
255
+
256
+ ```bash
257
+ /speed [instant|fast|normal|slow] # Set typing speed
258
+ /clear # Clear chat screen
259
+ /exit or /quit # Exit chat session
260
+ ```
261
+
262
+ ---
263
+
264
+ ## Command-Line Usage
265
+
266
+ Cognautic CLI provides these main commands:
267
+
268
+ ### Setup Command
269
+
270
+ ```bash
271
+ cognautic setup --interactive # Interactive setup wizard
272
+ cognautic setup --provider openai # Quick provider setup
273
+ ```
274
+
275
+ ### Chat Command
276
+
277
+ ```bash
278
+ cognautic chat # Start interactive chat
279
+ cognautic chat --provider anthropic # Chat with specific provider
280
+ cognautic chat --model claude-3-sonnet # Chat with specific model
281
+ cognautic chat --project-path ./my_project # Set workspace
282
+ cognautic chat --session <id> # Continue existing session
283
+ ```
284
+
285
+ ### Config Command
286
+
287
+ ```bash
288
+ cognautic config list # Show all configuration
289
+ cognautic config set <key> <value> # Set configuration value
290
+ cognautic config get <key> # Get configuration value
291
+ cognautic config delete <key> # Delete configuration key
292
+ cognautic config reset # Reset to defaults
293
+ ```
294
+
295
+ ### Providers Command
296
+
297
+ ```bash
298
+ cognautic providers # List all AI providers and endpoints
299
+ ```
300
+
301
+ ---
302
+
303
+ ## Supported AI Providers
304
+
305
+ | Provider | Models | API Key Required |
306
+ |----------|--------|------------------|
307
+ | **OpenAI** | GPT models (GPT-4, GPT-3.5) | `OPENAI_API_KEY` |
308
+ | **Anthropic** | Claude models (Claude-3 Sonnet, Haiku) | `ANTHROPIC_API_KEY` |
309
+ | **Google** | Gemini models | `GOOGLE_API_KEY` |
310
+ | **Together AI** | Various open-source models | `TOGETHER_API_KEY` |
311
+ | **OpenRouter** | Access to multiple providers | `OPENROUTER_API_KEY` |
312
+ | **Local Models** | Hugging Face models (Llama, Mistral, Phi, etc.) | ❌ No API key needed! |
313
+
314
+ ### Using Local Models (NEW! 🎉)
315
+
316
+ Run free open-source AI models locally without any API keys:
317
+
318
+ ```bash
319
+ # Install dependencies
320
+ pip install transformers torch accelerate
321
+
322
+ # Start chat and load a local model
323
+ cognautic chat
324
+ /lmodel microsoft/phi-2
325
+ /provider local
326
+
327
+ # Now chat with your local model!
328
+ ```
329
+
330
+ **Popular local models:**
331
+ - `microsoft/phi-2` - Small and fast (2.7B)
332
+ - `TinyLlama/TinyLlama-1.1B-Chat-v1.0` - Ultra lightweight (1.1B)
333
+ - `meta-llama/Llama-2-7b-chat-hf` - High quality (7B)
334
+ - `mistralai/Mistral-7B-Instruct-v0.2` - Excellent performance (7B)
335
+
336
+ **Benefits:**
337
+ - Complete privacy - no data sent externally
338
+ - No API costs
339
+ - Works offline
340
+ - Full control over model behavior
341
+
342
+ 📖 **[Read the full Local Models Guide →](LOCAL_MODELS.md)**
343
+
344
+ ---
345
+
346
+ ## Configuration
347
+
348
+ Configuration files are stored in `~/.cognautic/`:
349
+
350
+ - `config.json`: General settings and preferences
351
+ - `api_keys.json`: Encrypted API keys for AI providers
352
+ - `sessions/`: Chat session history and context
353
+ - `cache/`: Temporary files and model cache
354
+
355
+ ---
356
+
357
+ ## Command Usage
358
+
359
+ All Cognautic CLI functionality is accessed through the single `cognautic` command. The general syntax is:
360
+
361
+ ```bash
362
+ cognautic <subcommand> [options] [arguments]
363
+ ```
364
+
365
+ ### Getting Help
366
+
367
+ ```bash
368
+ # Show general help
369
+ cognautic --help
370
+
371
+ # Show help for specific command
372
+ cognautic chat --help
373
+ ```
374
+
375
+ ### Version Information
376
+
377
+ ```bash
378
+ cognautic --version
379
+ ```
380
+
381
+ ---
382
+
383
+ ## WebSocket Server & Real-time Streaming
384
+
385
+ Cognautic CLI includes a powerful WebSocket server that enables **real-time, streaming AI responses**. Instead of waiting for the complete response, you receive AI-generated content as it's being produced, providing a much more interactive experience.
386
+
387
+ ### Starting the WebSocket Server
388
+
389
+ The WebSocket server starts automatically when you run chat mode:
390
+
391
+ ```bash
392
+ # Start with default settings (port 8765)
393
+ cognautic chat
394
+
395
+ # Specify custom port
396
+ cognautic chat --websocket-port 9000
397
+
398
+ # With specific provider and model
399
+ cognautic chat --provider openai --model gpt-4o-mini --websocket-port 8765
400
+ ```
401
+
402
+ ### Key Features
403
+
404
+ - **Real-time Streaming**: AI responses stream chunk-by-chunk as they're generated
405
+ - **Bi-directional**: Full duplex WebSocket communication
406
+ - **Session Management**: Automatic session creation and context preservation
407
+ - **Multi-provider**: Works with all supported AI providers
408
+ - **Tool Execution**: Execute tools and file operations via WebSocket
409
+
410
+ ### Client Examples
411
+
412
+ **Python Client:**
413
+ ```bash
414
+ python examples/websocket_client_example.py
415
+
416
+ # Interactive mode
417
+ python examples/websocket_client_example.py interactive
418
+ ```
419
+
420
+ **Web Browser:**
421
+ ```bash
422
+ # Open in your browser
423
+ open examples/websocket_client.html
424
+ ```
425
+
426
+ ### Basic Usage Example
427
+
428
+ ```python
429
+ import asyncio
430
+ import json
431
+ import websockets
432
+
433
+ async def chat():
434
+ uri = "ws://localhost:8765"
435
+ async with websockets.connect(uri) as ws:
436
+ # Receive welcome message
437
+ welcome = json.loads(await ws.recv())
438
+ print(f"Connected! Session: {welcome['session_id']}")
439
+
440
+ # Send chat message with streaming enabled
441
+ await ws.send(json.dumps({
442
+ "type": "chat",
443
+ "message": "Explain Python async/await",
444
+ "stream": true
445
+ }))
446
+
447
+ # Receive streaming response in real-time
448
+ while True:
449
+ response = json.loads(await ws.recv())
450
+
451
+ if response['type'] == 'stream_chunk':
452
+ print(response['chunk'], end='', flush=True)
453
+ elif response['type'] == 'stream_end':
454
+ break
455
+
456
+ asyncio.run(chat())
457
+ ```
458
+
459
+ ### API Documentation
460
+
461
+ For complete WebSocket API documentation, see **[WEBSOCKET_API.md](WEBSOCKET_API.md)**.
462
+
463
+ ---
464
+
465
+ ## Examples
466
+
467
+ ### Simple Chat Session
468
+
469
+ Start chatting with AI:
470
+
471
+ ```bash
472
+ $ cognautic chat
473
+ ██████╗ ██████╗ ██████╗ ███╗ ██╗ █████╗ ██╗ ██╗████████╗██╗ ██████╗
474
+ ██╔════╝██╔═══██╗██╔════╝ ████╗ ██║██╔══██╗██║ ██║╚══██╔══╝██║██╔════╝
475
+ ██║ ██║ ██║██║ ███╗██╔██╗ ██║███████║██║ ██║ ██║ ██║██║
476
+ ██║ ██║ ██║██║ ██║██║╚██╗██║██╔══██║██║ ██║ ██║ ██║██║
477
+ ╚██████╗╚██████╔╝╚██████╔╝██║ ╚████║██║ ██║╚██████╔╝ ██║ ██║╚██████╗
478
+ ╚═════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝
479
+
480
+ 💡 Type '/help' for commands, 'exit' to quit
481
+ 🌐 WebSocket server: ws://localhost:8765
482
+ 📁 Workspace: /home/user/projects
483
+ --------------------------------------------------
484
+
485
+ You [projects]: Can you help me create a Python function?
486
+ AI: Of course! I'd be happy to help you create a Python function...
487
+
488
+ You [projects]: /workspace ~/myproject
489
+ ✅ Workspace changed to: /home/user/myproject
490
+
491
+ You [myproject]: Create a file called utils.py with helper functions
492
+ AI: I'll create that file for you...
493
+ ```
494
+
495
+ ### First-Time Setup
496
+
497
+ ```bash
498
+ $ cognautic
499
+ 🎉 Welcome to Cognautic! Let's get you set up.
500
+ 🔑 No API keys found. Let's configure them.
501
+
502
+ Which AI provider would you like to use?
503
+ 1. OpenAI (GPT-4, GPT-3.5)
504
+ 2. Anthropic (Claude)
505
+ 3. Google (Gemini)
506
+ 4. Other providers...
507
+
508
+ Choice [1-4]: 2
509
+ 🔐 Please enter your Anthropic API key: sk-ant-...
510
+ ✅ API key saved securely!
511
+
512
+ 🚀 Setup complete! You're ready to go.
513
+ ```
514
+
515
+ ### Using Local Models
516
+
517
+ Run AI models locally without API keys:
518
+
519
+ ```bash
520
+ $ cognautic chat
521
+ You: /lmodel microsoft/phi-2
522
+ 🔄 Loading local model from: microsoft/phi-2
523
+ ⏳ This may take a few minutes depending on model size...
524
+ Loading local model from microsoft/phi-2 on cuda...
525
+ ✅ Model loaded successfully on cuda
526
+ ✅ Local model loaded successfully!
527
+ 💡 Use: /provider local - to switch to the local model
528
+
529
+ You: /provider local
530
+ ✅ Switched to provider: local
531
+
532
+ You: Hello! Can you help me code?
533
+ AI: Hello! Yes, I'd be happy to help you with coding...
534
+ ```
535
+
536
+ ### Working with Multiple Providers
537
+
538
+ Switch between different AI providers:
539
+
540
+ ```bash
541
+ You: /provider openai
542
+ ✅ Switched to provider: openai
543
+
544
+ You: /model gpt-4o
545
+ ✅ Switched to model: gpt-4o
546
+
547
+ You: Write a Python function to sort a list
548
+ AI: Here's a Python function...
549
+
550
+ You: /provider anthropic
551
+ ✅ Switched to provider: anthropic
552
+
553
+ You: /model claude-3-sonnet-20240229
554
+ ✅ Switched to model: claude-3-sonnet-20240229
555
+ ```
556
+
557
+ ---
558
+
559
+ ## License
560
+ MIT