nothumanallowed 14.1.4 → 14.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "nothumanallowed",
|
|
3
|
-
"version": "14.1.
|
|
3
|
+
"version": "14.1.6",
|
|
4
4
|
"description": "NotHumanAllowed — 38 AI agents, 80 tools, Studio (visual agentic workflows). Email, calendar, browser automation, screen capture, canvas, cron/heartbeat, Alexandria E2E messaging, GitHub, Notion, Slack, voice chat, free AI (Liara), 28 languages. Zero-dependency CLI.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
package/src/constants.mjs
CHANGED
|
@@ -5,7 +5,7 @@ import { fileURLToPath } from 'url';
|
|
|
5
5
|
const __filename = fileURLToPath(import.meta.url);
|
|
6
6
|
const __dirname = path.dirname(__filename);
|
|
7
7
|
|
|
8
|
-
export const VERSION = '14.1.
|
|
8
|
+
export const VERSION = '14.1.6';
|
|
9
9
|
export const BASE_URL = 'https://nothumanallowed.com/cli';
|
|
10
10
|
export const API_BASE = 'https://nothumanallowed.com/api/v1';
|
|
11
11
|
|
|
@@ -275,12 +275,59 @@ export function register(router) {
|
|
|
275
275
|
a.params = { url: 'https://' + a.params.query.trim() };
|
|
276
276
|
}
|
|
277
277
|
}
|
|
278
|
+
// Auto-detect email reading intent — force imap_list if LLM didn't emit the tool
|
|
279
|
+
const wantsReadEmail = /\b(leggi|read|mostra|lista|ultime?|recenti?|email|mail|inbox|posta)\b.*\b(email|mail|messag|inbox|posta)\b|\b(email|mail)\b.*\b(leggi|read|mostra|lista|ultime?|recenti?)\b/i.test(msg);
|
|
280
|
+
if (wantsReadEmail && !actions.some(a => a.action?.startsWith('imap_') || a.action === 'list_emails')) {
|
|
281
|
+
try {
|
|
282
|
+
const { listAccounts: _la } = await import('../../services/email-db.mjs');
|
|
283
|
+
const imapAccs = _la();
|
|
284
|
+
if (imapAccs.length > 0) {
|
|
285
|
+
const firstAcc = imapAccs[0];
|
|
286
|
+
const limitMatch = msg.match(/\b(\d+)\b/);
|
|
287
|
+
const limit = limitMatch ? Math.min(parseInt(limitMatch[1]), 20) : 5;
|
|
288
|
+
actions.push({ action: 'imap_list', params: { accountId: firstAcc.id, limit } });
|
|
289
|
+
}
|
|
290
|
+
} catch { /* fallback to LLM response */ }
|
|
291
|
+
}
|
|
278
292
|
|
|
279
293
|
for (const { action, params } of actions) {
|
|
280
294
|
if (action === 'web_search' && wantsScreenshot) params.screenshot = true;
|
|
281
295
|
sse('tool', { action, status: 'executing' });
|
|
282
296
|
try {
|
|
283
297
|
const result = await executeTool(action, params, config);
|
|
298
|
+
|
|
299
|
+
// ── Screenshot result handling ───────────────────────────────────
|
|
300
|
+
if (result && typeof result === 'object' && result.__screenshot) {
|
|
301
|
+
// Copy file to ~/.nha/screenshots/ so the UI can load it via /api/screenshots/
|
|
302
|
+
let screenshotUrl = null;
|
|
303
|
+
try {
|
|
304
|
+
const ssDir = path.join(os.homedir(), '.nha', 'screenshots');
|
|
305
|
+
if (!fs.existsSync(ssDir)) fs.mkdirSync(ssDir, { recursive: true });
|
|
306
|
+
const filename = `screenshot-${Date.now()}.png`;
|
|
307
|
+
const destPath = path.join(ssDir, filename);
|
|
308
|
+
if (result.path && fs.existsSync(result.path)) {
|
|
309
|
+
fs.copyFileSync(result.path, destPath);
|
|
310
|
+
screenshotUrl = `/api/screenshots/${filename}`;
|
|
311
|
+
} else if (result.base64) {
|
|
312
|
+
fs.writeFileSync(destPath, Buffer.from(result.base64, 'base64'));
|
|
313
|
+
screenshotUrl = `/api/screenshots/${filename}`;
|
|
314
|
+
}
|
|
315
|
+
} catch { /* fallback — no image shown */ }
|
|
316
|
+
|
|
317
|
+
// Vision analysis — LLM describes what's in the screenshot
|
|
318
|
+
let visionDescription = 'Screenshot captured.';
|
|
319
|
+
if (result.base64) {
|
|
320
|
+
try {
|
|
321
|
+
visionDescription = await callLLMVision(config, 'You are a helpful assistant describing a screenshot.', result.question || 'Describe EXACTLY and ONLY what you see in this screenshot.', { base64: result.base64, mediaType: 'image/png' });
|
|
322
|
+
} catch { /* keep default description */ }
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
toolResults.push({ action, result: visionDescription });
|
|
326
|
+
sse('tool', { action, status: 'done', result: visionDescription.slice(0, 500) });
|
|
327
|
+
if (screenshotUrl) sse('screenshot', { url: screenshotUrl });
|
|
328
|
+
continue;
|
|
329
|
+
}
|
|
330
|
+
|
|
284
331
|
let resultStr = typeof result === 'object' ? JSON.stringify(result) : String(result);
|
|
285
332
|
if ((action === 'web_search' || action === 'fetch_url') && resultStr.includes('<')) {
|
|
286
333
|
resultStr = resultStr
|
|
@@ -352,6 +399,57 @@ export function register(router) {
|
|
|
352
399
|
}
|
|
353
400
|
});
|
|
354
401
|
|
|
402
|
+
// POST /api/chat — non-streaming, for attachments (PDF, image, text file)
|
|
403
|
+
router.post('/api/chat', async (req, res) => {
|
|
404
|
+
try {
|
|
405
|
+
const body = await parseBody(req);
|
|
406
|
+
if (!body.message) return sendError(res, 400, 'message required');
|
|
407
|
+
const config = loadConfig();
|
|
408
|
+
const chatSystemPrompt = await getChatSystemPrompt();
|
|
409
|
+
let enrichedPrompt = chatSystemPrompt;
|
|
410
|
+
try { const ic = await getImapAccountsContext(); if (ic) enrichedPrompt += ic; } catch {}
|
|
411
|
+
const LANG_MAP = { it:'Italian', en:'English', es:'Spanish', fr:'French', de:'German', pt:'Portuguese', nl:'Dutch', pl:'Polish', ru:'Russian', zh:'Chinese', ja:'Japanese', ko:'Korean', ar:'Arabic', hi:'Hindi', tr:'Turkish', sv:'Swedish', da:'Danish', fi:'Finnish', cs:'Czech' };
|
|
412
|
+
const userLang = LANG_MAP[(config?.language || config?.lang || 'en').slice(0,2)] || 'English';
|
|
413
|
+
enrichedPrompt += `\n\nIMPORTANT: Always respond in ${userLang}.`;
|
|
414
|
+
|
|
415
|
+
let response;
|
|
416
|
+
|
|
417
|
+
if (body.pdfBase64) {
|
|
418
|
+
const userMsg = body.message || 'Analyze this PDF document and describe its content.';
|
|
419
|
+
const provider = config?.llm?.provider || 'nha';
|
|
420
|
+
if (provider === 'nha') {
|
|
421
|
+
// Liara Vision non supporta PDF — estrai testo grezzo dal base64 come fallback
|
|
422
|
+
const buf = Buffer.from(body.pdfBase64, 'base64');
|
|
423
|
+
const rawText = buf.toString('latin1').replace(/[^\x20-\x7E\n\r\t]/g, ' ').replace(/\s{4,}/g, '\n').slice(0, 20000);
|
|
424
|
+
const fileCtx = `\n\n--- PDF: ${body.pdfName || 'document.pdf'} (testo estratto) ---\n${rawText}\n--- END PDF ---`;
|
|
425
|
+
response = await callLLM(config, enrichedPrompt + fileCtx, userMsg);
|
|
426
|
+
} else {
|
|
427
|
+
// Anthropic/OpenAI/Gemini — vision nativa per PDF
|
|
428
|
+
response = await callLLMVision(config, enrichedPrompt, userMsg, {
|
|
429
|
+
base64: body.pdfBase64,
|
|
430
|
+
mediaType: 'application/pdf',
|
|
431
|
+
fileName: body.pdfName || 'document.pdf',
|
|
432
|
+
});
|
|
433
|
+
}
|
|
434
|
+
} else if (body.imageBase64) {
|
|
435
|
+
// Image — vision call
|
|
436
|
+
const userMsg = body.message || 'Describe what you see in this image.';
|
|
437
|
+
response = await callLLMVision(config, enrichedPrompt, userMsg, {
|
|
438
|
+
base64: body.imageBase64,
|
|
439
|
+
mediaType: body.imageMimeType || 'image/png',
|
|
440
|
+
});
|
|
441
|
+
} else if (body.fileContent) {
|
|
442
|
+
// Text file — inject content into prompt
|
|
443
|
+
const fileCtx = `\n\n--- FILE: ${body.fileName || 'file'} ---\n${String(body.fileContent).slice(0, 40000)}\n--- END FILE ---`;
|
|
444
|
+
response = await callLLM(config, enrichedPrompt + fileCtx, body.message);
|
|
445
|
+
} else {
|
|
446
|
+
response = await callLLM(config, enrichedPrompt, body.message);
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
sendJSON(res, 200, { response });
|
|
450
|
+
} catch (e) { sendError(res, 500, e.message); }
|
|
451
|
+
});
|
|
452
|
+
|
|
355
453
|
// POST /api/ask — single-turn non-streaming chat
|
|
356
454
|
router.post('/api/ask', async (req, res) => {
|
|
357
455
|
try {
|