apexbot 1.0.0 → 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +127 -146
- package/dist/agent/agentManager.js +13 -13
- package/dist/cli/index.js +40 -39
- package/dist/gateway/dashboard.js +182 -266
- package/dist/gateway/index.js +3 -3
- package/dist/index.js +12 -12
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
#
|
|
1
|
+
# ApexBot
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
Personal AI assistant you run on your own devices. Free with Ollama (local AI).
|
|
4
4
|
|
|
5
5
|
```
|
|
6
6
|
___ ____ _______ ______ ____ ______
|
|
@@ -9,112 +9,119 @@
|
|
|
9
9
|
/ ___ |/ ____/ /___ / / /_/ / /_/ / / /
|
|
10
10
|
/_/ |_/_/ /_____//_/|_/_____/\____/ /_/
|
|
11
11
|
|
|
12
|
-
|
|
12
|
+
Your Free AI Assistant
|
|
13
13
|
```
|
|
14
14
|
|
|
15
15
|
[](https://github.com/YOUR_USERNAME/apexbot/actions)
|
|
16
|
-
[](https://www.npmjs.com/package/apexbot)
|
|
17
|
+
[](LICENSE)
|
|
18
18
|
[](https://www.typescriptlang.org/)
|
|
19
19
|
|
|
20
|
-
|
|
20
|
+
100% Free | Open Source | Private | Self-Hosted
|
|
21
21
|
|
|
22
|
-
[Installation](
|
|
23
|
-
|
|
24
|
-
|
|
22
|
+
[Installation](#installation) |
|
|
23
|
+
[Features](#features) |
|
|
24
|
+
[Quick Start](#quick-start) |
|
|
25
|
+
[Commands](#commands) |
|
|
26
|
+
[Contributing](#contributing)
|
|
25
27
|
|
|
26
28
|
---
|
|
27
29
|
|
|
28
|
-
##
|
|
30
|
+
## Installation
|
|
31
|
+
|
|
32
|
+
Runtime: Node 18+
|
|
33
|
+
|
|
34
|
+
### npm (recommended)
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
npm install -g apexbot
|
|
38
|
+
apexbot onboard
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
### One-line install
|
|
29
42
|
|
|
30
|
-
|
|
43
|
+
Windows (PowerShell):
|
|
31
44
|
|
|
32
|
-
**Windows (PowerShell):**
|
|
33
45
|
```powershell
|
|
34
46
|
iwr -useb https://raw.githubusercontent.com/YOUR_USERNAME/apexbot/main/scripts/install.ps1 | iex
|
|
35
47
|
```
|
|
36
48
|
|
|
37
|
-
|
|
38
|
-
```bash
|
|
39
|
-
curl -fsSL https://raw.githubusercontent.com/YOUR_USERNAME/apexbot/main/scripts/install.sh | bash
|
|
40
|
-
```
|
|
49
|
+
macOS/Linux:
|
|
41
50
|
|
|
42
|
-
**npm (global):**
|
|
43
51
|
```bash
|
|
44
|
-
|
|
45
|
-
apexbot onboard
|
|
52
|
+
curl -fsSL https://raw.githubusercontent.com/YOUR_USERNAME/apexbot/main/scripts/install.sh | bash
|
|
46
53
|
```
|
|
47
54
|
|
|
48
|
-
###
|
|
55
|
+
### From source
|
|
49
56
|
|
|
50
57
|
```bash
|
|
51
|
-
# Clone repository
|
|
52
58
|
git clone https://github.com/YOUR_USERNAME/apexbot.git
|
|
53
59
|
cd apexbot
|
|
54
60
|
npm install
|
|
55
61
|
npm run build
|
|
56
|
-
|
|
57
|
-
# Run setup wizard
|
|
58
62
|
npm run onboard
|
|
59
63
|
```
|
|
60
64
|
|
|
61
65
|
---
|
|
62
66
|
|
|
63
|
-
##
|
|
64
|
-
|
|
65
|
-
###
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
- **
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
67
|
+
## Features
|
|
68
|
+
|
|
69
|
+
### Local AI with Ollama
|
|
70
|
+
|
|
71
|
+
ApexBot runs AI models locally on your machine using Ollama. No API keys, no cloud costs, complete privacy.
|
|
72
|
+
|
|
73
|
+
- **Free**: No subscriptions or usage fees
|
|
74
|
+
- **Private**: Conversations never leave your computer
|
|
75
|
+
- **Offline**: Works without internet after model download
|
|
76
|
+
- **Models**: llama3.2, mistral, codellama, qwen2.5, and more
|
|
77
|
+
|
|
78
|
+
### Multi-channel support
|
|
79
|
+
|
|
80
|
+
| Channel | Status | Description |
|
|
81
|
+
|------------|----------|------------------------|
|
|
82
|
+
| Telegram | Ready | Full bot integration |
|
|
83
|
+
| Discord | Ready | Guilds and DMs |
|
|
84
|
+
| WebChat | Ready | Built-in web dashboard |
|
|
85
|
+
| WhatsApp | Planned | Via Baileys |
|
|
86
|
+
|
|
87
|
+
### AI providers
|
|
88
|
+
|
|
89
|
+
| Provider | Cost | Notes |
|
|
90
|
+
|------------------|---------|----------------------------|
|
|
91
|
+
| **Ollama** | Free | Local AI (recommended) |
|
|
92
|
+
| Google Gemini | Free tier | Cloud API |
|
|
93
|
+
| Anthropic Claude | Paid | Cloud API |
|
|
94
|
+
| OpenAI GPT | Paid | Cloud API |
|
|
91
95
|
|
|
92
96
|
---
|
|
93
97
|
|
|
94
|
-
##
|
|
98
|
+
## Quick Start
|
|
95
99
|
|
|
96
|
-
### 1. Install Ollama
|
|
100
|
+
### 1. Install Ollama
|
|
101
|
+
|
|
102
|
+
Windows:
|
|
97
103
|
|
|
98
|
-
**Windows:**
|
|
99
104
|
```powershell
|
|
100
105
|
winget install Ollama.Ollama
|
|
101
106
|
```
|
|
102
107
|
|
|
103
|
-
|
|
108
|
+
macOS:
|
|
109
|
+
|
|
104
110
|
```bash
|
|
105
111
|
brew install ollama
|
|
106
112
|
```
|
|
107
113
|
|
|
108
|
-
|
|
114
|
+
Linux:
|
|
115
|
+
|
|
109
116
|
```bash
|
|
110
117
|
curl -fsSL https://ollama.com/install.sh | sh
|
|
111
118
|
```
|
|
112
119
|
|
|
113
|
-
### 2.
|
|
120
|
+
### 2. Download a model
|
|
114
121
|
|
|
115
122
|
```bash
|
|
116
123
|
ollama pull llama3.2
|
|
117
|
-
ollama serve
|
|
124
|
+
ollama serve
|
|
118
125
|
```
|
|
119
126
|
|
|
120
127
|
### 3. Install ApexBot
|
|
@@ -123,38 +130,35 @@ ollama serve # Keep running in background
|
|
|
123
130
|
npm install -g apexbot
|
|
124
131
|
```
|
|
125
132
|
|
|
126
|
-
### 4. Run setup
|
|
133
|
+
### 4. Run setup
|
|
127
134
|
|
|
128
135
|
```bash
|
|
129
136
|
apexbot onboard
|
|
130
137
|
```
|
|
131
138
|
|
|
132
|
-
The wizard
|
|
133
|
-
- Choosing AI provider (Ollama recommended!)
|
|
134
|
-
- Setting up Telegram/Discord
|
|
135
|
-
- Configuring your bot
|
|
139
|
+
The wizard walks you through AI provider selection, channel setup (Telegram/Discord), and configuration.
|
|
136
140
|
|
|
137
|
-
### 5. Start
|
|
141
|
+
### 5. Start the bot
|
|
138
142
|
|
|
139
143
|
```bash
|
|
140
144
|
apexbot daemon start
|
|
141
145
|
```
|
|
142
146
|
|
|
143
|
-
|
|
147
|
+
Your assistant is now running in the background.
|
|
144
148
|
|
|
145
149
|
---
|
|
146
150
|
|
|
147
|
-
##
|
|
151
|
+
## Commands
|
|
148
152
|
|
|
149
|
-
### CLI
|
|
153
|
+
### CLI
|
|
150
154
|
|
|
151
155
|
```bash
|
|
152
|
-
# Setup
|
|
156
|
+
# Setup
|
|
153
157
|
apexbot onboard # Interactive setup wizard
|
|
154
158
|
apexbot config # Show configuration
|
|
155
159
|
apexbot config --reset # Reset configuration
|
|
156
160
|
|
|
157
|
-
# Daemon (background
|
|
161
|
+
# Daemon (background)
|
|
158
162
|
apexbot daemon start # Start in background
|
|
159
163
|
apexbot daemon stop # Stop daemon
|
|
160
164
|
apexbot daemon restart # Restart daemon
|
|
@@ -162,85 +166,73 @@ apexbot daemon status # Check status
|
|
|
162
166
|
|
|
163
167
|
# Gateway (foreground)
|
|
164
168
|
apexbot gateway # Start gateway server
|
|
165
|
-
apexbot gateway --verbose # With debug
|
|
169
|
+
apexbot gateway --verbose # With debug output
|
|
166
170
|
|
|
167
|
-
#
|
|
171
|
+
# Utilities
|
|
168
172
|
apexbot status # Show status
|
|
169
173
|
apexbot models # Manage Ollama models
|
|
170
174
|
```
|
|
171
175
|
|
|
172
|
-
### Chat
|
|
176
|
+
### Chat commands
|
|
173
177
|
|
|
174
|
-
| Command
|
|
175
|
-
|
|
176
|
-
| `/help`
|
|
177
|
-
| `/status`
|
|
178
|
-
| `/new`
|
|
179
|
-
| `/model <name
|
|
178
|
+
| Command | Description |
|
|
179
|
+
|----------------|--------------------------|
|
|
180
|
+
| `/help` | Show available commands |
|
|
181
|
+
| `/status` | Show session info |
|
|
182
|
+
| `/new` | Start new conversation |
|
|
183
|
+
| `/model <name>`| Switch AI model |
|
|
180
184
|
|
|
181
185
|
---
|
|
182
186
|
|
|
183
|
-
##
|
|
187
|
+
## Architecture
|
|
184
188
|
|
|
185
189
|
```
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
│ Ollama │ Gemini │ Claude │ OpenAI │
|
|
203
|
-
└─────────────────────────────────────────────────────────────┘
|
|
190
|
+
Messaging Channels
|
|
191
|
+
Telegram | Discord | WebChat | WhatsApp
|
|
192
|
+
| | |
|
|
193
|
+
v v v
|
|
194
|
+
+----------------------------------------+
|
|
195
|
+
| Gateway Server |
|
|
196
|
+
| (HTTP + WebSocket :18789) |
|
|
197
|
+
+----------------------------------------+
|
|
198
|
+
| Sessions | Event Bus | Rate Limit |
|
|
199
|
+
+----------------------------------------+
|
|
200
|
+
|
|
|
201
|
+
v
|
|
202
|
+
+----------------------------------------+
|
|
203
|
+
| AI Agent Manager |
|
|
204
|
+
| Ollama | Gemini | Claude | OpenAI |
|
|
205
|
+
+----------------------------------------+
|
|
204
206
|
```
|
|
205
207
|
|
|
206
208
|
---
|
|
207
209
|
|
|
208
|
-
##
|
|
209
|
-
|
|
210
|
-
| Feature | Clawdbot | ApexBot |
|
|
211
|
-
|---------|----------|---------|
|
|
212
|
-
| **Cost** | Cloud APIs (paid) | **100% FREE with Ollama** |
|
|
213
|
-
| **Privacy** | Cloud-dependent | **Fully local & private** |
|
|
214
|
-
| **Channels** | WhatsApp, Telegram, Discord, iMessage | Telegram, Discord, WebChat |
|
|
215
|
-
| **AI** | Claude, GPT | **Ollama (free!)**, Gemini, Claude, GPT |
|
|
216
|
-
| **Setup** | Complex | **Simple wizard** |
|
|
217
|
-
| **License** | Proprietary | **MIT (Open Source)** |
|
|
218
|
-
|
|
219
|
-
---
|
|
220
|
-
|
|
221
|
-
## 📁 Project Structure
|
|
210
|
+
## Project Structure
|
|
222
211
|
|
|
223
212
|
```
|
|
224
213
|
apexbot/
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
214
|
+
src/
|
|
215
|
+
adapters/ Provider integrations (Manifold, etc.)
|
|
216
|
+
agent/ AI agent manager and message handling
|
|
217
|
+
backtest/ Backtesting framework
|
|
218
|
+
channels/ Channel adapters (Telegram, Discord)
|
|
219
|
+
cli/ Command-line interface
|
|
220
|
+
core/ Event bus and shared utilities
|
|
221
|
+
gateway/ HTTP/WebSocket server and dashboard
|
|
222
|
+
math/ Expected value and Kelly criterion
|
|
223
|
+
safety/ Rate limiting and content filtering
|
|
224
|
+
sessions/ Conversation state management
|
|
225
|
+
strategy/ Trading strategies (arbitrage)
|
|
226
|
+
scripts/
|
|
227
|
+
install.ps1 Windows installer
|
|
228
|
+
install.sh Unix installer
|
|
237
229
|
```
|
|
238
230
|
|
|
239
231
|
---
|
|
240
232
|
|
|
241
|
-
##
|
|
233
|
+
## Configuration
|
|
242
234
|
|
|
243
|
-
Configuration
|
|
235
|
+
Configuration lives in `~/.apexbot/config.json`:
|
|
244
236
|
|
|
245
237
|
```json
|
|
246
238
|
{
|
|
@@ -260,7 +252,7 @@ Configuration is stored in `~/.apexbot/config.json`:
|
|
|
260
252
|
}
|
|
261
253
|
```
|
|
262
254
|
|
|
263
|
-
|
|
255
|
+
Environment variables also work:
|
|
264
256
|
|
|
265
257
|
```bash
|
|
266
258
|
export OLLAMA_URL=http://localhost:11434
|
|
@@ -270,38 +262,27 @@ export TELEGRAM_BOT_TOKEN=your-token
|
|
|
270
262
|
|
|
271
263
|
---
|
|
272
264
|
|
|
273
|
-
##
|
|
265
|
+
## Contributing
|
|
274
266
|
|
|
275
|
-
|
|
267
|
+
See [CONTRIBUTING.md](CONTRIBUTING.md) for guidelines.
|
|
276
268
|
|
|
277
269
|
```bash
|
|
278
|
-
# Development
|
|
279
270
|
git clone https://github.com/YOUR_USERNAME/apexbot.git
|
|
280
271
|
cd apexbot
|
|
281
272
|
npm install
|
|
282
|
-
npm run dev
|
|
273
|
+
npm run dev
|
|
283
274
|
```
|
|
284
275
|
|
|
285
276
|
---
|
|
286
277
|
|
|
287
|
-
##
|
|
278
|
+
## License
|
|
288
279
|
|
|
289
|
-
MIT License
|
|
280
|
+
MIT License. See [LICENSE](LICENSE).
|
|
290
281
|
|
|
291
282
|
---
|
|
292
283
|
|
|
293
|
-
##
|
|
294
|
-
|
|
295
|
-
- Inspired by [Clawdbot](https://github.com/clawdbot/clawdbot)
|
|
296
|
-
- Powered by [Ollama](https://ollama.com) for free local AI
|
|
297
|
-
- Built with TypeScript, grammY, discord.js
|
|
298
|
-
|
|
299
|
-
---
|
|
300
|
-
|
|
301
|
-
<div align="center">
|
|
302
|
-
|
|
303
|
-
**Made with ❤️ by the ApexBot community**
|
|
304
|
-
|
|
305
|
-
⭐ Star this repo if you find it useful!
|
|
284
|
+
## Acknowledgments
|
|
306
285
|
|
|
307
|
-
|
|
286
|
+
- Inspired by [Clawdbot](https://github.com/moltbot/moltbot)
|
|
287
|
+
- Powered by [Ollama](https://ollama.com) for local AI
|
|
288
|
+
- Built with TypeScript, grammy, discord.js
|
|
@@ -65,7 +65,7 @@ You are running locally on the user's machine. No data leaves their computer. Yo
|
|
|
65
65
|
async process(session, message) {
|
|
66
66
|
if (!this.config) {
|
|
67
67
|
console.warn('[Agent] Not configured');
|
|
68
|
-
return { text: '
|
|
68
|
+
return { text: 'Agent not configured. Please set up an AI provider.' };
|
|
69
69
|
}
|
|
70
70
|
const userText = message.text || '';
|
|
71
71
|
// Handle slash commands
|
|
@@ -93,7 +93,7 @@ You are running locally on the user's machine. No data leaves their computer. Yo
|
|
|
93
93
|
response = await this.processWithKimi(history);
|
|
94
94
|
break;
|
|
95
95
|
default:
|
|
96
|
-
response = { text: '
|
|
96
|
+
response = { text: 'Unknown AI provider' };
|
|
97
97
|
}
|
|
98
98
|
// Save to session
|
|
99
99
|
session.messages.push({ role: 'user', content: userText, timestamp: Date.now() });
|
|
@@ -120,7 +120,7 @@ You are running locally on the user's machine. No data leaves their computer. Yo
|
|
|
120
120
|
const apiKey = cfg.apiKey || process.env.KIMI_API_KEY;
|
|
121
121
|
const apiUrl = cfg.apiUrl || process.env.KIMI_API_URL;
|
|
122
122
|
if (!apiKey || !apiUrl) {
|
|
123
|
-
return { text: '
|
|
123
|
+
return { text: 'Kimi provider not configured. Set config.apiUrl and apiKey (or KIMI_API_URL/KIMI_API_KEY).' };
|
|
124
124
|
}
|
|
125
125
|
// Build prompt from history (system prompt + conversation)
|
|
126
126
|
const systemPrompt = history.find(m => m.role === 'system')?.content || '';
|
|
@@ -150,7 +150,7 @@ You are running locally on the user's machine. No data leaves their computer. Yo
|
|
|
150
150
|
const data = await res.json().catch(() => ({}));
|
|
151
151
|
const text = data.text || data.output || (data.choices && data.choices[0]?.text) || '';
|
|
152
152
|
return {
|
|
153
|
-
text: String(text || '').trim() || '
|
|
153
|
+
text: String(text || '').trim() || 'Kimi returned an empty response.',
|
|
154
154
|
};
|
|
155
155
|
}
|
|
156
156
|
async processWithOllama(history) {
|
|
@@ -188,7 +188,7 @@ You are running locally on the user's machine. No data leaves their computer. Yo
|
|
|
188
188
|
const data = await res.json();
|
|
189
189
|
const text = data.message?.content || '';
|
|
190
190
|
return {
|
|
191
|
-
text: String(text).trim() || '
|
|
191
|
+
text: String(text).trim() || 'No response from model',
|
|
192
192
|
usage: {
|
|
193
193
|
inputTokens: data.prompt_eval_count || 0,
|
|
194
194
|
outputTokens: data.eval_count || 0,
|
|
@@ -198,7 +198,7 @@ You are running locally on the user's machine. No data leaves their computer. Yo
|
|
|
198
198
|
catch (error) {
|
|
199
199
|
if (error.code === 'ECONNREFUSED' || error.message?.includes('fetch failed')) {
|
|
200
200
|
return {
|
|
201
|
-
text:
|
|
201
|
+
text: `Ollama not running.\n\nPlease:\n1. Install Ollama: https://ollama.com\n2. Pull a model: ollama pull llama3.2\n3. Start Ollama: ollama serve`,
|
|
202
202
|
};
|
|
203
203
|
}
|
|
204
204
|
throw error;
|
|
@@ -257,19 +257,19 @@ You are running locally on the user's machine. No data leaves their computer. Yo
|
|
|
257
257
|
async processWithClaude(history) {
|
|
258
258
|
// TODO: Implement Anthropic Claude API
|
|
259
259
|
// Requires @anthropic-ai/sdk
|
|
260
|
-
return { text: '
|
|
260
|
+
return { text: 'Claude integration not yet implemented. Use Google/Gemini for now.' };
|
|
261
261
|
}
|
|
262
262
|
async processWithOpenAI(history) {
|
|
263
263
|
// TODO: Implement OpenAI API
|
|
264
264
|
// Requires openai package
|
|
265
|
-
return { text: '
|
|
265
|
+
return { text: 'OpenAI integration not yet implemented. Use Google/Gemini for now.' };
|
|
266
266
|
}
|
|
267
267
|
handleCommand(session, command) {
|
|
268
268
|
const [cmd, ...args] = command.slice(1).split(' ');
|
|
269
269
|
switch (cmd.toLowerCase()) {
|
|
270
270
|
case 'status':
|
|
271
271
|
return {
|
|
272
|
-
text:
|
|
272
|
+
text: `*Session Status*\n` +
|
|
273
273
|
`Session ID: \`${session.id}\`\n` +
|
|
274
274
|
`Messages: ${session.messageCount}\n` +
|
|
275
275
|
`Model: ${session.model || this.config?.model || 'default'}\n` +
|
|
@@ -283,10 +283,10 @@ You are running locally on the user's machine. No data leaves their computer. Yo
|
|
|
283
283
|
const systemMsgs = session.messages.filter(m => m.role === 'system');
|
|
284
284
|
session.messages = systemMsgs;
|
|
285
285
|
session.messageCount = 0;
|
|
286
|
-
return { text: '
|
|
286
|
+
return { text: 'Conversation reset. Let\'s start fresh!' };
|
|
287
287
|
case 'help':
|
|
288
288
|
return {
|
|
289
|
-
text:
|
|
289
|
+
text: `*ApexBot Commands*\n\n` +
|
|
290
290
|
`/status - Show session info\n` +
|
|
291
291
|
`/new - Reset conversation\n` +
|
|
292
292
|
`/model <name> - Change AI model\n` +
|
|
@@ -296,11 +296,11 @@ You are running locally on the user's machine. No data leaves their computer. Yo
|
|
|
296
296
|
case 'model':
|
|
297
297
|
if (args.length > 0) {
|
|
298
298
|
session.model = args.join(' ');
|
|
299
|
-
return { text:
|
|
299
|
+
return { text: `Model changed to: ${session.model}` };
|
|
300
300
|
}
|
|
301
301
|
return { text: `Current model: ${session.model || this.config?.model || 'default'}` };
|
|
302
302
|
default:
|
|
303
|
-
return { text:
|
|
303
|
+
return { text: `Unknown command: /${cmd}\nType /help for available commands.` };
|
|
304
304
|
}
|
|
305
305
|
}
|
|
306
306
|
getStatus() {
|