squidclaw 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +149 -0
- package/bin/squidclaw.js +512 -0
- package/lib/ai/gateway.js +283 -0
- package/lib/ai/prompt-builder.js +149 -0
- package/lib/api/server.js +235 -0
- package/lib/behavior/engine.js +187 -0
- package/lib/channels/hub-media.js +128 -0
- package/lib/channels/hub.js +89 -0
- package/lib/channels/whatsapp/manager.js +319 -0
- package/lib/channels/whatsapp/media.js +228 -0
- package/lib/cli/agent-cmd.js +182 -0
- package/lib/cli/brain-cmd.js +49 -0
- package/lib/cli/broadcast-cmd.js +28 -0
- package/lib/cli/channels-cmd.js +157 -0
- package/lib/cli/config-cmd.js +26 -0
- package/lib/cli/conversations-cmd.js +27 -0
- package/lib/cli/engine-cmd.js +115 -0
- package/lib/cli/handoff-cmd.js +26 -0
- package/lib/cli/hours-cmd.js +38 -0
- package/lib/cli/key-cmd.js +62 -0
- package/lib/cli/knowledge-cmd.js +59 -0
- package/lib/cli/memory-cmd.js +50 -0
- package/lib/cli/platform-cmd.js +51 -0
- package/lib/cli/setup.js +226 -0
- package/lib/cli/stats-cmd.js +66 -0
- package/lib/cli/tui.js +308 -0
- package/lib/cli/update-cmd.js +25 -0
- package/lib/cli/webhook-cmd.js +40 -0
- package/lib/core/agent-manager.js +83 -0
- package/lib/core/agent.js +162 -0
- package/lib/core/config.js +172 -0
- package/lib/core/logger.js +43 -0
- package/lib/engine.js +117 -0
- package/lib/features/heartbeat.js +71 -0
- package/lib/storage/interface.js +56 -0
- package/lib/storage/sqlite.js +409 -0
- package/package.json +48 -0
- package/templates/BEHAVIOR.md +42 -0
- package/templates/IDENTITY.md +7 -0
- package/templates/RULES.md +9 -0
- package/templates/SOUL.md +19 -0
|
@@ -0,0 +1,187 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* 🦑 Behavior Engine
|
|
3
|
+
* Makes AI responses feel human — splitting, reactions, emotion detection, etc.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { logger } from '../core/logger.js';
|
|
7
|
+
|
|
8
|
+
export class BehaviorEngine {
|
|
9
|
+
constructor() {}
|
|
10
|
+
|
|
11
|
+
/**
|
|
12
|
+
* Process an AI response into human-like message chunks
|
|
13
|
+
* Returns: { messages: string[], reaction: string|null, memoryUpdates: object[], handoff: string|null }
|
|
14
|
+
*/
|
|
15
|
+
process(rawResponse) {
|
|
16
|
+
const result = {
|
|
17
|
+
messages: [],
|
|
18
|
+
reaction: null,
|
|
19
|
+
memoryUpdates: [],
|
|
20
|
+
handoff: null,
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
let text = rawResponse;
|
|
24
|
+
|
|
25
|
+
// Extract reaction
|
|
26
|
+
const reactMatch = text.match(/---REACT:(.+?)---/);
|
|
27
|
+
if (reactMatch) {
|
|
28
|
+
result.reaction = reactMatch[1].trim();
|
|
29
|
+
text = text.replace(/---REACT:.+?---/g, '').trim();
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
// Extract memory updates
|
|
33
|
+
const memoryRegex = /---MEMORY:(.+?):(.+?)---/g;
|
|
34
|
+
let memMatch;
|
|
35
|
+
while ((memMatch = memoryRegex.exec(text)) !== null) {
|
|
36
|
+
result.memoryUpdates.push({ key: memMatch[1].trim(), value: memMatch[2].trim() });
|
|
37
|
+
}
|
|
38
|
+
text = text.replace(/---MEMORY:.+?:.+?---/g, '').trim();
|
|
39
|
+
|
|
40
|
+
// Extract handoff
|
|
41
|
+
const handoffMatch = text.match(/---HANDOFF:(.+?)---/);
|
|
42
|
+
if (handoffMatch) {
|
|
43
|
+
result.handoff = handoffMatch[1].trim();
|
|
44
|
+
text = text.replace(/---HANDOFF:.+?---/g, '').trim();
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Split messages
|
|
48
|
+
if (text) {
|
|
49
|
+
if (text.includes('---SPLIT---')) {
|
|
50
|
+
result.messages = text.split('---SPLIT---')
|
|
51
|
+
.map(s => s.trim())
|
|
52
|
+
.filter(s => s.length > 0);
|
|
53
|
+
} else {
|
|
54
|
+
// Auto-split long messages
|
|
55
|
+
result.messages = this._autoSplit(text);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
return result;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Auto-split long text into natural message chunks
|
|
64
|
+
*/
|
|
65
|
+
_autoSplit(text) {
|
|
66
|
+
// If short enough, return as-is
|
|
67
|
+
if (text.length <= 250) return [text];
|
|
68
|
+
|
|
69
|
+
const chunks = [];
|
|
70
|
+
const sentences = text.split(/(?<=[.!?،؟])\s+/);
|
|
71
|
+
let current = '';
|
|
72
|
+
|
|
73
|
+
for (const sentence of sentences) {
|
|
74
|
+
if ((current + ' ' + sentence).trim().length > 250 && current) {
|
|
75
|
+
chunks.push(current.trim());
|
|
76
|
+
current = sentence;
|
|
77
|
+
} else {
|
|
78
|
+
current = current ? current + ' ' + sentence : sentence;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
if (current.trim()) chunks.push(current.trim());
|
|
82
|
+
|
|
83
|
+
// If we still have chunks that are too long, split by newlines
|
|
84
|
+
const final = [];
|
|
85
|
+
for (const chunk of chunks) {
|
|
86
|
+
if (chunk.length > 400) {
|
|
87
|
+
const lines = chunk.split('\n').filter(l => l.trim());
|
|
88
|
+
final.push(...lines);
|
|
89
|
+
} else {
|
|
90
|
+
final.push(chunk);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// Max 4 chunks
|
|
95
|
+
if (final.length > 4) {
|
|
96
|
+
return [
|
|
97
|
+
final.slice(0, 2).join(' '),
|
|
98
|
+
final.slice(2, 3).join(' '),
|
|
99
|
+
final.slice(3).join(' '),
|
|
100
|
+
].filter(s => s.trim());
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
return final.length > 0 ? final : [text];
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
/**
|
|
107
|
+
* Detect the emotional tone of an incoming message
|
|
108
|
+
*/
|
|
109
|
+
detectEmotion(text) {
|
|
110
|
+
const lower = text.toLowerCase();
|
|
111
|
+
|
|
112
|
+
// Angry indicators
|
|
113
|
+
const angryWords = ['!!', 'terrible', 'worst', 'awful', 'hate', 'angry', 'furious', 'unacceptable',
|
|
114
|
+
'زفت', 'سيء', 'أسوأ', 'غضب'];
|
|
115
|
+
if (angryWords.some(w => lower.includes(w)) || (text.match(/!/g) || []).length >= 2) {
|
|
116
|
+
return 'angry';
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// Happy indicators
|
|
120
|
+
const happyWords = ['thank', 'love', 'amazing', 'great', 'awesome', 'perfect', 'excellent',
|
|
121
|
+
'شكر', 'ممتاز', 'رائع', 'حلو', '❤️', '😍', '🥰', '😊'];
|
|
122
|
+
if (happyWords.some(w => lower.includes(w))) {
|
|
123
|
+
return 'happy';
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Confused indicators
|
|
127
|
+
const confusedWords = ['don\'t understand', 'confused', 'what do you mean', 'how', 'explain',
|
|
128
|
+
'مش فاهم', 'كيف', 'وش يعني'];
|
|
129
|
+
if (confusedWords.some(w => lower.includes(w)) || text.includes('??')) {
|
|
130
|
+
return 'confused';
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Urgent indicators
|
|
134
|
+
const urgentWords = ['urgent', 'asap', 'immediately', 'emergency', 'now', 'hurry',
|
|
135
|
+
'ضروري', 'مستعجل', 'فوري', 'الحين'];
|
|
136
|
+
if (urgentWords.some(w => lower.includes(w))) {
|
|
137
|
+
return 'urgent';
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
return 'neutral';
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Detect if a message is a conversation ending
|
|
145
|
+
*/
|
|
146
|
+
isConversationEnding(text) {
|
|
147
|
+
const lower = text.toLowerCase().trim();
|
|
148
|
+
const endings = [
|
|
149
|
+
'ok', 'okay', 'thanks', 'thank you', 'thx', 'bye', 'goodbye', 'see you',
|
|
150
|
+
'good night', 'gn', 'ttyl', 'later', 'cool', 'got it', 'perfect',
|
|
151
|
+
'شكرا', 'مشكور', 'تمام', 'اوكي', 'باي', 'يعطيك العافية', 'الله يعافيك',
|
|
152
|
+
'👍', '🙏', '✅', '❤️', '👋',
|
|
153
|
+
];
|
|
154
|
+
return endings.some(e => lower === e || lower === e + '!' || lower === e + '.');
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Detect the language of a message
|
|
159
|
+
*/
|
|
160
|
+
detectLanguage(text) {
|
|
161
|
+
// Check for Arabic characters
|
|
162
|
+
const arabicRegex = /[\u0600-\u06FF\u0750-\u077F\u08A0-\u08FF]/;
|
|
163
|
+
const arabicChars = (text.match(arabicRegex) || []).length;
|
|
164
|
+
const totalChars = text.replace(/\s/g, '').length;
|
|
165
|
+
|
|
166
|
+
if (totalChars === 0) return 'en';
|
|
167
|
+
const arabicRatio = arabicChars / totalChars;
|
|
168
|
+
|
|
169
|
+
if (arabicRatio > 0.3) return 'ar';
|
|
170
|
+
return 'en';
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* Get appropriate reaction emoji for a message
|
|
175
|
+
*/
|
|
176
|
+
suggestReaction(text, emotion) {
|
|
177
|
+
if (emotion === 'happy') return '❤️';
|
|
178
|
+
if (emotion === 'angry') return null; // don't react to angry messages
|
|
179
|
+
|
|
180
|
+
const lower = text.toLowerCase();
|
|
181
|
+
if (lower.includes('😂') || lower.includes('haha') || lower.includes('lol')) return '😂';
|
|
182
|
+
if (lower.includes('photo') || lower.includes('image') || lower.includes('📸')) return '😍';
|
|
183
|
+
if (this.isConversationEnding(text)) return '❤️';
|
|
184
|
+
|
|
185
|
+
return null;
|
|
186
|
+
}
|
|
187
|
+
}
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* 🦑 Channel Hub Media Extension
|
|
3
|
+
* Adds voice note transcription, image analysis, and voice reply support
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { MediaHandler } from './whatsapp/media.js';
|
|
7
|
+
import { logger } from '../core/logger.js';
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Enhance ChannelHub with media processing capabilities
|
|
11
|
+
*/
|
|
12
|
+
export function addMediaSupport(hub, config, home) {
|
|
13
|
+
const mediaHandler = new MediaHandler(config, home);
|
|
14
|
+
const originalHandler = hub._handleWhatsAppMessage.bind(hub);
|
|
15
|
+
|
|
16
|
+
hub._handleWhatsAppMessage = async function(agentId, contactId, message, metadata) {
|
|
17
|
+
const agent = hub.agentManager.get(agentId);
|
|
18
|
+
if (!agent) return;
|
|
19
|
+
|
|
20
|
+
// Handle voice notes — transcribe first
|
|
21
|
+
if (metadata.mediaType === 'audio' && metadata._rawMessage) {
|
|
22
|
+
logger.info('hub', `🎤 Voice note from ${contactId} — transcribing...`);
|
|
23
|
+
try {
|
|
24
|
+
const buffer = await hub.whatsappManager.downloadMedia(agentId, metadata._rawMessage);
|
|
25
|
+
if (buffer) {
|
|
26
|
+
const transcript = await mediaHandler.transcribeAudio(buffer, config);
|
|
27
|
+
if (transcript) {
|
|
28
|
+
message = `[🎤 Voice note transcription]: "${transcript}"`;
|
|
29
|
+
metadata.originalType = 'voice';
|
|
30
|
+
metadata.transcript = transcript;
|
|
31
|
+
logger.info('hub', `Transcribed: ${transcript.substring(0, 50)}...`);
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
} catch (err) {
|
|
35
|
+
logger.error('hub', `Transcription failed: ${err.message}`);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Handle images — analyze with vision
|
|
40
|
+
if (metadata.mediaType === 'image' && metadata._rawMessage) {
|
|
41
|
+
logger.info('hub', `📸 Image from ${contactId} — analyzing...`);
|
|
42
|
+
try {
|
|
43
|
+
const buffer = await hub.whatsappManager.downloadMedia(agentId, metadata._rawMessage);
|
|
44
|
+
if (buffer) {
|
|
45
|
+
const caption = message.replace('[📸 Image]', '').trim();
|
|
46
|
+
const analysis = await mediaHandler.analyzeImage(buffer, caption || 'What is in this image? Be brief.', config);
|
|
47
|
+
message = caption
|
|
48
|
+
? `[📸 Image with caption: "${caption}"]\nImage shows: ${analysis}`
|
|
49
|
+
: `[📸 Image]\nImage shows: ${analysis}`;
|
|
50
|
+
metadata.originalType = 'image';
|
|
51
|
+
metadata.imageAnalysis = analysis;
|
|
52
|
+
}
|
|
53
|
+
} catch (err) {
|
|
54
|
+
logger.error('hub', `Image analysis failed: ${err.message}`);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Process message through agent (original flow)
|
|
59
|
+
if (metadata.isGroup) return;
|
|
60
|
+
|
|
61
|
+
const result = await agent.processMessage(contactId, message, metadata);
|
|
62
|
+
|
|
63
|
+
// Send reaction
|
|
64
|
+
if (result.reaction && metadata.messageId) {
|
|
65
|
+
try {
|
|
66
|
+
await hub.whatsappManager.sendReaction(agentId, contactId, metadata.messageId, result.reaction);
|
|
67
|
+
} catch {}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Decide: send as voice note or text?
|
|
71
|
+
const behavior = agent.behavior || {};
|
|
72
|
+
const shouldVoice = _shouldSendVoice(behavior, result.messages, metadata);
|
|
73
|
+
|
|
74
|
+
if (shouldVoice && result.messages.length > 0) {
|
|
75
|
+
// Send as voice note
|
|
76
|
+
const fullText = result.messages.join('. ');
|
|
77
|
+
try {
|
|
78
|
+
const voiceConfig = behavior.voice || {};
|
|
79
|
+
// Auto-select Arabic voice if message is Arabic
|
|
80
|
+
if (!voiceConfig.name) {
|
|
81
|
+
const { BehaviorEngine } = await import('../behavior/engine.js');
|
|
82
|
+
const be = new BehaviorEngine();
|
|
83
|
+
const lang = be.detectLanguage(fullText);
|
|
84
|
+
voiceConfig.name = lang === 'ar' ? 'ar-SA-ZariyahNeural' : 'en-US-AriaNeural';
|
|
85
|
+
}
|
|
86
|
+
const audioBuffer = await mediaHandler.textToSpeech(fullText, voiceConfig);
|
|
87
|
+
if (audioBuffer) {
|
|
88
|
+
await hub.whatsappManager.sendVoiceNote(agentId, contactId, audioBuffer);
|
|
89
|
+
logger.info('hub', `🎤 Sent voice reply to ${contactId}`);
|
|
90
|
+
} else {
|
|
91
|
+
// Fallback to text
|
|
92
|
+
await hub.whatsappManager.sendMessages(agentId, contactId, result.messages);
|
|
93
|
+
}
|
|
94
|
+
} catch (err) {
|
|
95
|
+
logger.warn('hub', `Voice send failed, falling back to text: ${err.message}`);
|
|
96
|
+
await hub.whatsappManager.sendMessages(agentId, contactId, result.messages);
|
|
97
|
+
}
|
|
98
|
+
} else if (result.messages.length > 0) {
|
|
99
|
+
await hub.whatsappManager.sendMessages(agentId, contactId, result.messages);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// Handoff notification
|
|
103
|
+
if (result.handoff) {
|
|
104
|
+
await hub._notifyHandoff(agent, contactId, result.handoff, message);
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
// Webhooks
|
|
108
|
+
await hub._fireWebhooks('message', {
|
|
109
|
+
agentId, contactId, incomingMessage: message, replies: result.messages, usage: result.usage,
|
|
110
|
+
});
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
function _shouldSendVoice(behavior, messages, metadata) {
|
|
115
|
+
const voiceConfig = behavior.voice || {};
|
|
116
|
+
if (!voiceConfig.enabled) return false;
|
|
117
|
+
|
|
118
|
+
const mode = voiceConfig.sendVoice || 'never'; // never | sometimes | prefer
|
|
119
|
+
if (mode === 'never') return false;
|
|
120
|
+
if (mode === 'prefer') return true;
|
|
121
|
+
|
|
122
|
+
// 'sometimes' — send voice for long responses or when customer sent voice
|
|
123
|
+
if (metadata.originalType === 'voice') return true;
|
|
124
|
+
const totalLength = messages.join(' ').length;
|
|
125
|
+
if (voiceConfig.voiceForLong && totalLength > 500) return true;
|
|
126
|
+
|
|
127
|
+
return false;
|
|
128
|
+
}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* 🦑 Channel Hub
|
|
3
|
+
* Routes messages between channels and agents
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { logger } from '../core/logger.js';
|
|
7
|
+
|
|
8
|
+
export class ChannelHub {
|
|
9
|
+
constructor(agentManager, whatsappManager, storage) {
|
|
10
|
+
this.agentManager = agentManager;
|
|
11
|
+
this.whatsappManager = whatsappManager;
|
|
12
|
+
this.storage = storage;
|
|
13
|
+
|
|
14
|
+
// Wire up WhatsApp message handler
|
|
15
|
+
this.whatsappManager.onMessage = this._handleWhatsAppMessage.bind(this);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
async _handleWhatsAppMessage(agentId, contactId, message, metadata) {
|
|
19
|
+
const agent = this.agentManager.get(agentId);
|
|
20
|
+
if (!agent) {
|
|
21
|
+
logger.warn('hub', `No agent found for ${agentId}`);
|
|
22
|
+
return;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Skip group messages unless mentioned (for now)
|
|
26
|
+
if (metadata.isGroup) {
|
|
27
|
+
// TODO: implement group mention detection
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// Process message through agent
|
|
32
|
+
const result = await agent.processMessage(contactId, message, metadata);
|
|
33
|
+
|
|
34
|
+
// Send reaction first (if any)
|
|
35
|
+
if (result.reaction && metadata.messageId) {
|
|
36
|
+
try {
|
|
37
|
+
await this.whatsappManager.sendReaction(agentId, contactId, metadata.messageId, result.reaction);
|
|
38
|
+
} catch (err) {
|
|
39
|
+
logger.warn('hub', `Failed to send reaction: ${err.message}`);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Send reply messages with human-like delays
|
|
44
|
+
if (result.messages.length > 0) {
|
|
45
|
+
try {
|
|
46
|
+
await this.whatsappManager.sendMessages(agentId, contactId, result.messages);
|
|
47
|
+
} catch (err) {
|
|
48
|
+
logger.error('hub', `Failed to send reply: ${err.message}`);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Handle handoff notification
|
|
53
|
+
if (result.handoff) {
|
|
54
|
+
await this._notifyHandoff(agent, contactId, result.handoff, message);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Fire webhooks
|
|
58
|
+
await this._fireWebhooks('message', {
|
|
59
|
+
agentId,
|
|
60
|
+
contactId,
|
|
61
|
+
incomingMessage: message,
|
|
62
|
+
replies: result.messages,
|
|
63
|
+
emotion: metadata.emotion,
|
|
64
|
+
usage: result.usage,
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
async _notifyHandoff(agent, contactId, reason, lastMessage) {
|
|
69
|
+
// TODO: notify agent owner via WhatsApp/push notification
|
|
70
|
+
logger.info('hub', `⚠️ HANDOFF: Agent "${agent.name}" needs help with ${contactId}. Reason: ${reason}`);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
async _fireWebhooks(event, data) {
|
|
74
|
+
const webhooks = await this.storage.getWebhooks();
|
|
75
|
+
for (const wh of webhooks) {
|
|
76
|
+
if (wh.events.includes('*') || wh.events.includes(event)) {
|
|
77
|
+
try {
|
|
78
|
+
await fetch(wh.url, {
|
|
79
|
+
method: 'POST',
|
|
80
|
+
headers: { 'content-type': 'application/json' },
|
|
81
|
+
body: JSON.stringify({ event, data, timestamp: new Date().toISOString() }),
|
|
82
|
+
});
|
|
83
|
+
} catch (err) {
|
|
84
|
+
logger.warn('hub', `Webhook ${wh.id} failed: ${err.message}`);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
}
|