squidclaw 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +149 -0
- package/bin/squidclaw.js +512 -0
- package/lib/ai/gateway.js +283 -0
- package/lib/ai/prompt-builder.js +149 -0
- package/lib/api/server.js +235 -0
- package/lib/behavior/engine.js +187 -0
- package/lib/channels/hub-media.js +128 -0
- package/lib/channels/hub.js +89 -0
- package/lib/channels/whatsapp/manager.js +319 -0
- package/lib/channels/whatsapp/media.js +228 -0
- package/lib/cli/agent-cmd.js +182 -0
- package/lib/cli/brain-cmd.js +49 -0
- package/lib/cli/broadcast-cmd.js +28 -0
- package/lib/cli/channels-cmd.js +157 -0
- package/lib/cli/config-cmd.js +26 -0
- package/lib/cli/conversations-cmd.js +27 -0
- package/lib/cli/engine-cmd.js +115 -0
- package/lib/cli/handoff-cmd.js +26 -0
- package/lib/cli/hours-cmd.js +38 -0
- package/lib/cli/key-cmd.js +62 -0
- package/lib/cli/knowledge-cmd.js +59 -0
- package/lib/cli/memory-cmd.js +50 -0
- package/lib/cli/platform-cmd.js +51 -0
- package/lib/cli/setup.js +226 -0
- package/lib/cli/stats-cmd.js +66 -0
- package/lib/cli/tui.js +308 -0
- package/lib/cli/update-cmd.js +25 -0
- package/lib/cli/webhook-cmd.js +40 -0
- package/lib/core/agent-manager.js +83 -0
- package/lib/core/agent.js +162 -0
- package/lib/core/config.js +172 -0
- package/lib/core/logger.js +43 -0
- package/lib/engine.js +117 -0
- package/lib/features/heartbeat.js +71 -0
- package/lib/storage/interface.js +56 -0
- package/lib/storage/sqlite.js +409 -0
- package/package.json +48 -0
- package/templates/BEHAVIOR.md +42 -0
- package/templates/IDENTITY.md +7 -0
- package/templates/RULES.md +9 -0
- package/templates/SOUL.md +19 -0
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* 🦑 AI Gateway
|
|
3
|
+
* Multi-provider AI router with auto-fallback
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { logger } from '../core/logger.js';
|
|
7
|
+
import { MODEL_MAP, MODEL_PRICING } from '../core/config.js';
|
|
8
|
+
|
|
9
|
+
export class AIGateway {
|
|
10
|
+
constructor(config) {
|
|
11
|
+
this.config = config;
|
|
12
|
+
this.providers = {};
|
|
13
|
+
this._initProviders();
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
_initProviders() {
|
|
17
|
+
const providerConfigs = this.config.ai?.providers || {};
|
|
18
|
+
for (const [name, conf] of Object.entries(providerConfigs)) {
|
|
19
|
+
if (conf.key) {
|
|
20
|
+
this.providers[name] = conf;
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
logger.info('ai-gateway', `Initialized providers: ${Object.keys(this.providers).join(', ') || 'none'}`);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Send a chat completion request with auto-fallback
|
|
28
|
+
*/
|
|
29
|
+
async chat(messages, options = {}) {
|
|
30
|
+
const model = options.model || this.config.ai?.defaultModel;
|
|
31
|
+
const fallbackChain = options.fallbackChain || this.config.ai?.fallbackChain || [];
|
|
32
|
+
const modelsToTry = [model, ...fallbackChain].filter(Boolean);
|
|
33
|
+
|
|
34
|
+
if (modelsToTry.length === 0) {
|
|
35
|
+
throw new Error('No AI model configured. Run: squidclaw brain set <model>');
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
let lastError;
|
|
39
|
+
for (const m of modelsToTry) {
|
|
40
|
+
try {
|
|
41
|
+
const result = await this._callProvider(m, messages, options);
|
|
42
|
+
return result;
|
|
43
|
+
} catch (err) {
|
|
44
|
+
lastError = err;
|
|
45
|
+
logger.warn('ai-gateway', `${m} failed: ${err.message}, trying next...`);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
throw lastError;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Route to the correct provider based on model name
|
|
53
|
+
*/
|
|
54
|
+
async _callProvider(model, messages, options) {
|
|
55
|
+
const provider = this._resolveProvider(model);
|
|
56
|
+
const modelId = MODEL_MAP[model] || model;
|
|
57
|
+
|
|
58
|
+
switch (provider) {
|
|
59
|
+
case 'anthropic': return this._callAnthropic(modelId, messages, options);
|
|
60
|
+
case 'openai': return this._callOpenAI(modelId, messages, options);
|
|
61
|
+
case 'google': return this._callGoogle(modelId, messages, options);
|
|
62
|
+
case 'groq': return this._callOpenAICompat(model, modelId, messages, options, 'https://api.groq.com/openai/v1', 'groq');
|
|
63
|
+
case 'together': return this._callOpenAICompat(model, modelId, messages, options, 'https://api.together.xyz/v1', 'together');
|
|
64
|
+
case 'cerebras': return this._callOpenAICompat(model, modelId, messages, options, 'https://api.cerebras.ai/v1', 'cerebras');
|
|
65
|
+
case 'openrouter': return this._callOpenAICompat(model, modelId, messages, options, 'https://openrouter.ai/api/v1', 'openrouter');
|
|
66
|
+
case 'mistral': return this._callOpenAICompat(model, modelId, messages, options, 'https://api.mistral.ai/v1', 'mistral');
|
|
67
|
+
case 'ollama': return this._callOpenAICompat(model, modelId, messages, options, this.providers.ollama?.baseUrl || 'http://localhost:11434/v1', 'ollama');
|
|
68
|
+
case 'lmstudio': return this._callOpenAICompat(model, modelId, messages, options, this.providers.lmstudio?.baseUrl || 'http://localhost:1234/v1', 'lmstudio');
|
|
69
|
+
default: throw new Error(`Unknown provider for model: ${model}`);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
_resolveProvider(model) {
|
|
74
|
+
if (model.startsWith('claude')) return 'anthropic';
|
|
75
|
+
if (model.startsWith('gpt') || model.startsWith('o3') || model.startsWith('o4')) return 'openai';
|
|
76
|
+
if (model.startsWith('gemini')) return 'google';
|
|
77
|
+
if (model.startsWith('groq/')) return 'groq';
|
|
78
|
+
if (model.startsWith('together/')) return 'together';
|
|
79
|
+
if (model.startsWith('cerebras/')) return 'cerebras';
|
|
80
|
+
if (model.startsWith('mistral')) return 'mistral';
|
|
81
|
+
if (model.startsWith('ollama/')) return 'ollama';
|
|
82
|
+
if (model.startsWith('lmstudio/')) return 'lmstudio';
|
|
83
|
+
if (model.startsWith('openrouter/')) return 'openrouter';
|
|
84
|
+
// Check if model exists in any provider
|
|
85
|
+
for (const [prov, conf] of Object.entries(this.config.ai?.providers || {})) {
|
|
86
|
+
if (conf.models?.includes(model)) return prov;
|
|
87
|
+
}
|
|
88
|
+
return 'openai'; // default fallback
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
_isOAuthToken(key) {
|
|
92
|
+
return key?.includes('sk-ant-oat');
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
async _callAnthropic(modelId, messages, options) {
|
|
96
|
+
const key = this.providers.anthropic?.key;
|
|
97
|
+
if (!key) throw new Error('Anthropic API key not configured');
|
|
98
|
+
|
|
99
|
+
const isOAuth = this._isOAuthToken(key);
|
|
100
|
+
const systemMessage = messages.find(m => m.role === 'system')?.content || '';
|
|
101
|
+
const chatMessages = messages.filter(m => m.role !== 'system').map(m => ({
|
|
102
|
+
role: m.role === 'assistant' ? 'assistant' : 'user',
|
|
103
|
+
content: m.content,
|
|
104
|
+
}));
|
|
105
|
+
|
|
106
|
+
const headers = {
|
|
107
|
+
'content-type': 'application/json',
|
|
108
|
+
'anthropic-version': '2023-06-01',
|
|
109
|
+
};
|
|
110
|
+
|
|
111
|
+
if (isOAuth) {
|
|
112
|
+
headers['authorization'] = `Bearer ${key}`;
|
|
113
|
+
headers['anthropic-beta'] = 'claude-code-20250219,oauth-2025-04-20';
|
|
114
|
+
headers['user-agent'] = 'squidclaw/0.1.0';
|
|
115
|
+
headers['x-app'] = 'squidclaw';
|
|
116
|
+
headers['anthropic-dangerous-direct-browser-access'] = 'true';
|
|
117
|
+
} else {
|
|
118
|
+
headers['x-api-key'] = key;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const body = {
|
|
122
|
+
model: modelId,
|
|
123
|
+
max_tokens: options.maxTokens || 4096,
|
|
124
|
+
system: systemMessage,
|
|
125
|
+
messages: chatMessages,
|
|
126
|
+
...(options.temperature != null ? { temperature: options.temperature } : {}),
|
|
127
|
+
};
|
|
128
|
+
|
|
129
|
+
const res = await fetch('https://api.anthropic.com/v1/messages', {
|
|
130
|
+
method: 'POST',
|
|
131
|
+
headers,
|
|
132
|
+
body: JSON.stringify(body),
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
if (!res.ok) {
|
|
136
|
+
const err = await res.json().catch(() => ({}));
|
|
137
|
+
throw new Error(`Anthropic API error ${res.status}: ${err.error?.message || res.statusText}`);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
const data = await res.json();
|
|
141
|
+
const content = data.content?.map(c => c.text).join('') || '';
|
|
142
|
+
const inputTokens = data.usage?.input_tokens || 0;
|
|
143
|
+
const outputTokens = data.usage?.output_tokens || 0;
|
|
144
|
+
|
|
145
|
+
return {
|
|
146
|
+
content,
|
|
147
|
+
model: data.model,
|
|
148
|
+
provider: 'anthropic',
|
|
149
|
+
inputTokens,
|
|
150
|
+
outputTokens,
|
|
151
|
+
costUsd: this._calculateCost(options.model || 'claude-sonnet-4', inputTokens, outputTokens),
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
async _callOpenAI(modelId, messages, options) {
|
|
156
|
+
const key = this.providers.openai?.key;
|
|
157
|
+
if (!key) throw new Error('OpenAI API key not configured');
|
|
158
|
+
|
|
159
|
+
const res = await fetch('https://api.openai.com/v1/chat/completions', {
|
|
160
|
+
method: 'POST',
|
|
161
|
+
headers: {
|
|
162
|
+
'content-type': 'application/json',
|
|
163
|
+
'authorization': `Bearer ${key}`,
|
|
164
|
+
},
|
|
165
|
+
body: JSON.stringify({
|
|
166
|
+
model: modelId,
|
|
167
|
+
messages,
|
|
168
|
+
max_completion_tokens: options.maxTokens || 4096,
|
|
169
|
+
...(options.temperature != null ? { temperature: options.temperature } : {}),
|
|
170
|
+
}),
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
if (!res.ok) {
|
|
174
|
+
const err = await res.json().catch(() => ({}));
|
|
175
|
+
throw new Error(`OpenAI API error ${res.status}: ${err.error?.message || res.statusText}`);
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
const data = await res.json();
|
|
179
|
+
const content = data.choices?.[0]?.message?.content || '';
|
|
180
|
+
const inputTokens = data.usage?.prompt_tokens || 0;
|
|
181
|
+
const outputTokens = data.usage?.completion_tokens || 0;
|
|
182
|
+
|
|
183
|
+
return {
|
|
184
|
+
content,
|
|
185
|
+
model: data.model,
|
|
186
|
+
provider: 'openai',
|
|
187
|
+
inputTokens,
|
|
188
|
+
outputTokens,
|
|
189
|
+
costUsd: this._calculateCost(options.model || 'gpt-4o', inputTokens, outputTokens),
|
|
190
|
+
};
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
async _callGoogle(modelId, messages, options) {
|
|
194
|
+
const key = this.providers.google?.key;
|
|
195
|
+
if (!key) throw new Error('Google API key not configured');
|
|
196
|
+
|
|
197
|
+
const systemMessage = messages.find(m => m.role === 'system')?.content;
|
|
198
|
+
const chatMessages = messages.filter(m => m.role !== 'system').map(m => ({
|
|
199
|
+
role: m.role === 'assistant' ? 'model' : 'user',
|
|
200
|
+
parts: [{ text: m.content }],
|
|
201
|
+
}));
|
|
202
|
+
|
|
203
|
+
const body = {
|
|
204
|
+
contents: chatMessages,
|
|
205
|
+
...(systemMessage ? { systemInstruction: { parts: [{ text: systemMessage }] } } : {}),
|
|
206
|
+
generationConfig: {
|
|
207
|
+
maxOutputTokens: options.maxTokens || 4096,
|
|
208
|
+
...(options.temperature != null ? { temperature: options.temperature } : {}),
|
|
209
|
+
},
|
|
210
|
+
};
|
|
211
|
+
|
|
212
|
+
const url = `https://generativelanguage.googleapis.com/v1beta/models/${modelId}:generateContent?key=${key}`;
|
|
213
|
+
const res = await fetch(url, {
|
|
214
|
+
method: 'POST',
|
|
215
|
+
headers: { 'content-type': 'application/json' },
|
|
216
|
+
body: JSON.stringify(body),
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
if (!res.ok) {
|
|
220
|
+
const err = await res.json().catch(() => ({}));
|
|
221
|
+
throw new Error(`Google API error ${res.status}: ${err.error?.message || res.statusText}`);
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
const data = await res.json();
|
|
225
|
+
const content = data.candidates?.[0]?.content?.parts?.map(p => p.text).join('') || '';
|
|
226
|
+
const inputTokens = data.usageMetadata?.promptTokenCount || 0;
|
|
227
|
+
const outputTokens = data.usageMetadata?.candidatesTokenCount || 0;
|
|
228
|
+
|
|
229
|
+
return {
|
|
230
|
+
content,
|
|
231
|
+
model: modelId,
|
|
232
|
+
provider: 'google',
|
|
233
|
+
inputTokens,
|
|
234
|
+
outputTokens,
|
|
235
|
+
costUsd: this._calculateCost(options.model || 'gemini-2.5-flash', inputTokens, outputTokens),
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
/**
|
|
240
|
+
* OpenAI-compatible API (Groq, Together, Cerebras, Mistral, Ollama, LM Studio, OpenRouter)
|
|
241
|
+
*/
|
|
242
|
+
async _callOpenAICompat(friendlyModel, modelId, messages, options, baseUrl, providerName) {
|
|
243
|
+
const key = this.providers[providerName]?.key;
|
|
244
|
+
const headers = { 'content-type': 'application/json' };
|
|
245
|
+
if (key && key !== 'local') headers['authorization'] = `Bearer ${key}`;
|
|
246
|
+
|
|
247
|
+
const res = await fetch(`${baseUrl}/chat/completions`, {
|
|
248
|
+
method: 'POST',
|
|
249
|
+
headers,
|
|
250
|
+
body: JSON.stringify({
|
|
251
|
+
model: modelId,
|
|
252
|
+
messages,
|
|
253
|
+
max_tokens: options.maxTokens || 4096,
|
|
254
|
+
...(options.temperature != null ? { temperature: options.temperature } : {}),
|
|
255
|
+
}),
|
|
256
|
+
});
|
|
257
|
+
|
|
258
|
+
if (!res.ok) {
|
|
259
|
+
const err = await res.json().catch(() => ({}));
|
|
260
|
+
throw new Error(`${providerName} API error ${res.status}: ${err.error?.message || res.statusText}`);
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
const data = await res.json();
|
|
264
|
+
const content = data.choices?.[0]?.message?.content || '';
|
|
265
|
+
const inputTokens = data.usage?.prompt_tokens || 0;
|
|
266
|
+
const outputTokens = data.usage?.completion_tokens || 0;
|
|
267
|
+
|
|
268
|
+
return {
|
|
269
|
+
content,
|
|
270
|
+
model: data.model || modelId,
|
|
271
|
+
provider: providerName,
|
|
272
|
+
inputTokens,
|
|
273
|
+
outputTokens,
|
|
274
|
+
costUsd: this._calculateCost(friendlyModel, inputTokens, outputTokens),
|
|
275
|
+
};
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
_calculateCost(model, inputTokens, outputTokens) {
|
|
279
|
+
const pricing = MODEL_PRICING[model];
|
|
280
|
+
if (!pricing) return 0;
|
|
281
|
+
return (inputTokens * pricing.input / 1_000_000) + (outputTokens * pricing.output / 1_000_000);
|
|
282
|
+
}
|
|
283
|
+
}
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* 🦑 Prompt Builder
|
|
3
|
+
* Assembles system prompt from soul + rules + memory + knowledge + behavior
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { logger } from '../core/logger.js';
|
|
7
|
+
|
|
8
|
+
export class PromptBuilder {
|
|
9
|
+
constructor(storage) {
|
|
10
|
+
this.storage = storage;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Build full system prompt for an agent
|
|
15
|
+
*/
|
|
16
|
+
async build(agent, contactId, userMessage) {
|
|
17
|
+
const parts = [];
|
|
18
|
+
|
|
19
|
+
// 1. Soul / Identity
|
|
20
|
+
if (agent.soul) {
|
|
21
|
+
parts.push(agent.soul);
|
|
22
|
+
} else {
|
|
23
|
+
parts.push(this._defaultSoul(agent));
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// 2. Behavior rules
|
|
27
|
+
const behavior = agent.behavior || {};
|
|
28
|
+
parts.push(this._buildBehaviorRules(behavior));
|
|
29
|
+
|
|
30
|
+
// 3. Long-term memories
|
|
31
|
+
const memories = await this.storage.getMemories(agent.id);
|
|
32
|
+
if (memories.length > 0) {
|
|
33
|
+
parts.push('\n## What You Remember\n');
|
|
34
|
+
for (const mem of memories.slice(0, 30)) {
|
|
35
|
+
parts.push(`- ${mem.key}: ${mem.value}`);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// 4. Contact context
|
|
40
|
+
const contact = await this.storage.getContact(agent.id, contactId);
|
|
41
|
+
if (contact) {
|
|
42
|
+
parts.push(`\n## About This Person`);
|
|
43
|
+
if (contact.name) parts.push(`- Name: ${contact.name}`);
|
|
44
|
+
parts.push(`- Messages exchanged: ${contact.message_count || 0}`);
|
|
45
|
+
parts.push(`- First seen: ${contact.first_seen || 'just now'}`);
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// 5. Knowledge base (RAG)
|
|
49
|
+
// TODO: implement vector search, for now inject all chunks
|
|
50
|
+
const docs = await this.storage.listDocuments(agent.id);
|
|
51
|
+
if (docs.length > 0) {
|
|
52
|
+
const allChunks = [];
|
|
53
|
+
for (const doc of docs.slice(0, 5)) {
|
|
54
|
+
const chunks = await this.storage.searchKnowledge(agent.id, null, 10);
|
|
55
|
+
allChunks.push(...chunks);
|
|
56
|
+
}
|
|
57
|
+
if (allChunks.length > 0) {
|
|
58
|
+
parts.push('\n## Knowledge Base\nUse this information to answer questions:\n');
|
|
59
|
+
for (const chunk of allChunks.slice(0, 10)) {
|
|
60
|
+
parts.push(chunk.content);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// 6. Current context
|
|
66
|
+
const now = new Date();
|
|
67
|
+
parts.push(`\n## Current Context`);
|
|
68
|
+
parts.push(`- Date: ${now.toLocaleDateString('en-US', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' })}`);
|
|
69
|
+
parts.push(`- Time: ${now.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit', timeZone: agent.timezone || 'UTC' })} (${agent.timezone || 'UTC'})`);
|
|
70
|
+
|
|
71
|
+
// 7. Business hours context
|
|
72
|
+
if (behavior.businessHours) {
|
|
73
|
+
const isOpen = this._isBusinessHours(behavior.businessHours, agent.timezone);
|
|
74
|
+
if (!isOpen) {
|
|
75
|
+
parts.push(`- ⚠️ Currently OUTSIDE business hours. ${behavior.afterHoursMessage || 'Let them know and offer to follow up.'}`);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return parts.join('\n');
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
_defaultSoul(agent) {
|
|
83
|
+
return `# ${agent.name || 'Assistant'}
|
|
84
|
+
|
|
85
|
+
You are ${agent.name || 'an AI assistant'}.
|
|
86
|
+
|
|
87
|
+
## How You Communicate
|
|
88
|
+
- Language: ${agent.language === 'ar' ? 'Arabic' : agent.language === 'bilingual' ? 'Bilingual (Arabic & English) — match the language the person writes in' : 'English'}
|
|
89
|
+
- Tone: ${agent.tone > 60 ? 'Casual and friendly' : agent.tone > 30 ? 'Professional but warm' : 'Formal and polished'}
|
|
90
|
+
- Platform: WhatsApp — keep messages short, conversational, use emojis naturally
|
|
91
|
+
- Never write walls of text. Short messages, like a real person texting.
|
|
92
|
+
- Never say "As an AI" or "I'd be happy to help" or "Great question!"
|
|
93
|
+
- Be direct. Be human. Be useful.`;
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
_buildBehaviorRules(behavior) {
|
|
97
|
+
const rules = ['\n## Behavior Rules'];
|
|
98
|
+
|
|
99
|
+
if (behavior.avoidPhrases?.length) {
|
|
100
|
+
rules.push('Never say:');
|
|
101
|
+
for (const phrase of behavior.avoidPhrases) {
|
|
102
|
+
rules.push(` - "${phrase}"`);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
rules.push('\n### Response Format');
|
|
107
|
+
rules.push('- Split long responses into 2-4 short messages (marked with ---SPLIT--- between them)');
|
|
108
|
+
rules.push('- Each message should be under 200 characters when possible');
|
|
109
|
+
rules.push('- Use ---SPLIT--- to indicate where to break messages');
|
|
110
|
+
rules.push('- Example: "Hey! 👋---SPLIT---How can I help you today?"');
|
|
111
|
+
|
|
112
|
+
rules.push('\n### Reactions');
|
|
113
|
+
rules.push('- When someone says "thanks", "ok", "👍" or similar → respond with just ---REACT:❤️--- (no text reply needed)');
|
|
114
|
+
rules.push('- You can react AND reply: ---REACT:😍---That looks amazing!');
|
|
115
|
+
rules.push('- Available reactions: 👍 ❤️ 😂 😮 😢 🙏 🔥 👏');
|
|
116
|
+
|
|
117
|
+
rules.push('\n### Conversation Endings');
|
|
118
|
+
rules.push('- Recognize when a conversation is over');
|
|
119
|
+
rules.push('- Don\'t keep asking "anything else?" — just react and let it end naturally');
|
|
120
|
+
|
|
121
|
+
rules.push('\n### Memory');
|
|
122
|
+
rules.push('- Remember important facts about people (name, preferences, past orders/requests)');
|
|
123
|
+
rules.push('- Reference past interactions naturally ("The usual?" "How was that thing you mentioned?")');
|
|
124
|
+
rules.push('- If you learn something important, note it with ---MEMORY:key:value---');
|
|
125
|
+
rules.push('- Example: ---MEMORY:food_preference:vegetarian---');
|
|
126
|
+
|
|
127
|
+
if (behavior.handoff?.enabled) {
|
|
128
|
+
rules.push('\n### Escalation');
|
|
129
|
+
rules.push('- If you cannot help or the person is very upset, say ---HANDOFF:reason--- to escalate');
|
|
130
|
+
rules.push('- Tell the person you\'re connecting them with the team');
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
return rules.join('\n');
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
_isBusinessHours(schedule, timezone) {
|
|
137
|
+
// Simple business hours check
|
|
138
|
+
// schedule format: "sun-thu 9:00-18:00"
|
|
139
|
+
try {
|
|
140
|
+
const now = new Date();
|
|
141
|
+
const options = { timeZone: timezone || 'UTC', weekday: 'short', hour: '2-digit', minute: '2-digit', hour12: false };
|
|
142
|
+
const localTime = now.toLocaleString('en-US', options);
|
|
143
|
+
// Simplified — always return true for now, implement properly later
|
|
144
|
+
return true;
|
|
145
|
+
} catch {
|
|
146
|
+
return true;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
@@ -0,0 +1,235 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* 🦑 API Server
|
|
3
|
+
* REST API for dashboard integration and external access
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import express from 'express';
|
|
7
|
+
import { logger } from '../core/logger.js';
|
|
8
|
+
|
|
9
|
+
export function createAPIServer(engine) {
|
|
10
|
+
const app = express();
|
|
11
|
+
app.use(express.json());
|
|
12
|
+
|
|
13
|
+
const { agentManager, whatsappManager, storage, config } = engine;
|
|
14
|
+
|
|
15
|
+
// ── Health ──
|
|
16
|
+
app.get('/health', (req, res) => {
|
|
17
|
+
const agents = agentManager.getAll();
|
|
18
|
+
const waStatuses = whatsappManager.getStatuses();
|
|
19
|
+
res.json({
|
|
20
|
+
status: 'ok',
|
|
21
|
+
service: 'squidclaw',
|
|
22
|
+
version: '0.1.0',
|
|
23
|
+
agents: agents.length,
|
|
24
|
+
whatsapp: Object.values(waStatuses).filter(s => s.connected).length,
|
|
25
|
+
uptime: process.uptime(),
|
|
26
|
+
});
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
// ── Agents CRUD ──
|
|
30
|
+
app.get('/api/agents', async (req, res) => {
|
|
31
|
+
const agents = agentManager.getAll().map(a => ({
|
|
32
|
+
id: a.id, name: a.name, model: a.model, language: a.language,
|
|
33
|
+
status: a.status, whatsappNumber: a.whatsappNumber,
|
|
34
|
+
whatsappConnected: whatsappManager.getStatuses()[a.id]?.connected || false,
|
|
35
|
+
}));
|
|
36
|
+
res.json(agents);
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
app.get('/api/agents/:id', async (req, res) => {
|
|
40
|
+
const agent = agentManager.get(req.params.id);
|
|
41
|
+
if (!agent) return res.status(404).json({ error: 'Agent not found' });
|
|
42
|
+
const usage = await storage.getUsage(agent.id, '30d');
|
|
43
|
+
res.json({
|
|
44
|
+
id: agent.id, name: agent.name, soul: agent.soul, model: agent.model,
|
|
45
|
+
language: agent.language, tone: agent.tone, behavior: agent.behavior,
|
|
46
|
+
timezone: agent.timezone, status: agent.status,
|
|
47
|
+
whatsappNumber: agent.whatsappNumber,
|
|
48
|
+
whatsappConnected: whatsappManager.getStatuses()[agent.id]?.connected || false,
|
|
49
|
+
usage,
|
|
50
|
+
});
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
app.post('/api/agents', async (req, res) => {
|
|
54
|
+
try {
|
|
55
|
+
const agent = await agentManager.create(req.body);
|
|
56
|
+
res.json({ id: agent.id, name: agent.name, status: 'created' });
|
|
57
|
+
} catch (err) {
|
|
58
|
+
res.status(400).json({ error: err.message });
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
app.put('/api/agents/:id', async (req, res) => {
|
|
63
|
+
try {
|
|
64
|
+
await agentManager.update(req.params.id, req.body);
|
|
65
|
+
res.json({ status: 'updated' });
|
|
66
|
+
} catch (err) {
|
|
67
|
+
res.status(400).json({ error: err.message });
|
|
68
|
+
}
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
app.delete('/api/agents/:id', async (req, res) => {
|
|
72
|
+
try {
|
|
73
|
+
await whatsappManager.stopSession(req.params.id);
|
|
74
|
+
await agentManager.delete(req.params.id);
|
|
75
|
+
res.json({ status: 'deleted' });
|
|
76
|
+
} catch (err) {
|
|
77
|
+
res.status(400).json({ error: err.message });
|
|
78
|
+
}
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
// ── Chat ──
|
|
82
|
+
app.post('/api/agents/:id/chat', async (req, res) => {
|
|
83
|
+
const agent = agentManager.get(req.params.id);
|
|
84
|
+
if (!agent) return res.status(404).json({ error: 'Agent not found' });
|
|
85
|
+
|
|
86
|
+
const { message, contactId } = req.body;
|
|
87
|
+
if (!message) return res.status(400).json({ error: 'message required' });
|
|
88
|
+
|
|
89
|
+
const sender = contactId || 'dashboard';
|
|
90
|
+
const result = await agent.processMessage(sender, message, { pushName: 'Dashboard' });
|
|
91
|
+
res.json({
|
|
92
|
+
messages: result.messages,
|
|
93
|
+
reaction: result.reaction,
|
|
94
|
+
usage: result.usage,
|
|
95
|
+
});
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
app.get('/api/agents/:id/history', async (req, res) => {
|
|
99
|
+
const contactId = req.query.contactId || 'dashboard';
|
|
100
|
+
const limit = parseInt(req.query.limit) || 50;
|
|
101
|
+
const history = await storage.getConversation(req.params.id, contactId, limit);
|
|
102
|
+
res.json(history);
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
// ── WhatsApp ──
|
|
106
|
+
app.post('/api/agents/:id/whatsapp/pair', async (req, res) => {
|
|
107
|
+
const { phone } = req.body;
|
|
108
|
+
if (!phone) return res.status(400).json({ error: 'phone required' });
|
|
109
|
+
|
|
110
|
+
try {
|
|
111
|
+
// Start session if not exists
|
|
112
|
+
await whatsappManager.startSession(req.params.id);
|
|
113
|
+
// Wait for socket to be ready
|
|
114
|
+
await new Promise(r => setTimeout(r, 3000));
|
|
115
|
+
const code = await whatsappManager.requestPairingCode(req.params.id, phone);
|
|
116
|
+
const formatted = code.length === 8 ? `${code.slice(0, 4)}-${code.slice(4)}` : code;
|
|
117
|
+
res.json({ pairingCode: formatted, status: 'code_ready' });
|
|
118
|
+
} catch (err) {
|
|
119
|
+
res.status(500).json({ error: err.message });
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
app.post('/api/agents/:id/whatsapp/start', async (req, res) => {
|
|
124
|
+
try {
|
|
125
|
+
await whatsappManager.startSession(req.params.id);
|
|
126
|
+
res.json({ status: 'starting' });
|
|
127
|
+
} catch (err) {
|
|
128
|
+
res.status(500).json({ error: err.message });
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
app.post('/api/agents/:id/whatsapp/stop', async (req, res) => {
|
|
133
|
+
await whatsappManager.stopSession(req.params.id);
|
|
134
|
+
res.json({ status: 'stopped' });
|
|
135
|
+
});
|
|
136
|
+
|
|
137
|
+
app.get('/api/whatsapp/status', (req, res) => {
|
|
138
|
+
res.json(whatsappManager.getStatuses());
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
// ── Memory ──
|
|
142
|
+
app.get('/api/agents/:id/memory', async (req, res) => {
|
|
143
|
+
const memories = await storage.getMemories(req.params.id);
|
|
144
|
+
res.json(memories);
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
app.delete('/api/agents/:id/memory', async (req, res) => {
|
|
148
|
+
await storage.clearMemories(req.params.id);
|
|
149
|
+
res.json({ status: 'cleared' });
|
|
150
|
+
});
|
|
151
|
+
|
|
152
|
+
// ── Knowledge ──
|
|
153
|
+
app.get('/api/agents/:id/knowledge', async (req, res) => {
|
|
154
|
+
const docs = await storage.listDocuments(req.params.id);
|
|
155
|
+
res.json(docs);
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
app.delete('/api/knowledge/:docId', async (req, res) => {
|
|
159
|
+
await storage.deleteDocument(req.params.docId);
|
|
160
|
+
res.json({ status: 'deleted' });
|
|
161
|
+
});
|
|
162
|
+
|
|
163
|
+
// ── Usage ──
|
|
164
|
+
app.get('/api/usage', async (req, res) => {
|
|
165
|
+
const period = req.query.period || '30d';
|
|
166
|
+
const agentId = req.query.agentId;
|
|
167
|
+
if (agentId) {
|
|
168
|
+
res.json(await storage.getUsage(agentId, period));
|
|
169
|
+
} else {
|
|
170
|
+
res.json(await storage.getUsageAll(period));
|
|
171
|
+
}
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
// ── Handoffs ──
|
|
175
|
+
app.get('/api/handoffs', async (req, res) => {
|
|
176
|
+
res.json(await storage.getActiveHandoffs());
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
app.post('/api/handoffs/:id/resolve', async (req, res) => {
|
|
180
|
+
await storage.resolveHandoff(req.params.id);
|
|
181
|
+
// Also resolve in agent memory
|
|
182
|
+
const handoffs = await storage.getActiveHandoffs();
|
|
183
|
+
const handoff = handoffs.find(h => h.id == req.params.id);
|
|
184
|
+
if (handoff) {
|
|
185
|
+
const agent = agentManager.get(handoff.agent_id);
|
|
186
|
+
agent?.resolveHandoff(handoff.contact_id);
|
|
187
|
+
}
|
|
188
|
+
res.json({ status: 'resolved' });
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
// ── Webhooks ──
|
|
192
|
+
app.get('/api/webhooks', async (req, res) => {
|
|
193
|
+
res.json(await storage.getWebhooks());
|
|
194
|
+
});
|
|
195
|
+
|
|
196
|
+
app.post('/api/webhooks', async (req, res) => {
|
|
197
|
+
const { url, events } = req.body;
|
|
198
|
+
if (!url) return res.status(400).json({ error: 'url required' });
|
|
199
|
+
const id = await storage.addWebhook(url, events);
|
|
200
|
+
res.json({ id, status: 'created' });
|
|
201
|
+
});
|
|
202
|
+
|
|
203
|
+
app.delete('/api/webhooks/:id', async (req, res) => {
|
|
204
|
+
await storage.removeWebhook(req.params.id);
|
|
205
|
+
res.json({ status: 'deleted' });
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
// ── Contacts ──
|
|
209
|
+
app.get('/api/agents/:id/contacts', async (req, res) => {
|
|
210
|
+
res.json(await storage.listContacts(req.params.id));
|
|
211
|
+
});
|
|
212
|
+
|
|
213
|
+
// ── Broadcast ──
|
|
214
|
+
app.post('/api/agents/:id/broadcast', async (req, res) => {
|
|
215
|
+
const { message, contactIds } = req.body;
|
|
216
|
+
if (!message || !contactIds?.length) {
|
|
217
|
+
return res.status(400).json({ error: 'message and contactIds required' });
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
let sent = 0, failed = 0;
|
|
221
|
+
for (const contactId of contactIds) {
|
|
222
|
+
try {
|
|
223
|
+
await whatsappManager.sendMessage(req.params.id, contactId, message);
|
|
224
|
+
sent++;
|
|
225
|
+
// Rate limit: 1 message per 2 seconds
|
|
226
|
+
await new Promise(r => setTimeout(r, 2000));
|
|
227
|
+
} catch {
|
|
228
|
+
failed++;
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
res.json({ sent, failed, total: contactIds.length });
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
return app;
|
|
235
|
+
}
|