nothumanallowed 13.5.159 → 13.5.160
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/constants.mjs +1 -1
- package/src/services/web-ui.mjs +19 -8
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "nothumanallowed",
|
|
3
|
-
"version": "13.5.
|
|
3
|
+
"version": "13.5.160",
|
|
4
4
|
"description": "NotHumanAllowed — 38 AI agents, 80 tools, Studio (visual agentic workflows). Email, calendar, browser automation, screen capture, canvas, cron/heartbeat, Alexandria E2E messaging, GitHub, Notion, Slack, voice chat, free AI (Liara), 28 languages. Zero-dependency CLI.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
package/src/constants.mjs
CHANGED
|
@@ -5,7 +5,7 @@ import { fileURLToPath } from 'url';
|
|
|
5
5
|
const __filename = fileURLToPath(import.meta.url);
|
|
6
6
|
const __dirname = path.dirname(__filename);
|
|
7
7
|
|
|
8
|
-
export const VERSION = '13.5.
|
|
8
|
+
export const VERSION = '13.5.160';
|
|
9
9
|
export const BASE_URL = 'https://nothumanallowed.com/cli';
|
|
10
10
|
export const API_BASE = 'https://nothumanallowed.com/api/v1';
|
|
11
11
|
|
package/src/services/web-ui.mjs
CHANGED
|
@@ -8844,15 +8844,24 @@ async function wcGenerate() {
|
|
|
8844
8844
|
var splitPrompts = WC_CSS_SPLIT[fp.name];
|
|
8845
8845
|
if (splitPrompts) {
|
|
8846
8846
|
// Two-pass generation: streaming on first pass only
|
|
8847
|
-
var part1 = await wcCallLLM(sysPreamble, splitPrompts[0] + _nl2 + _nl2 + 'File: ' + fp.name, signal, fp.lang, 8192, onLiveUpdate);
|
|
8847
|
+
var part1 = await wcCallLLM(sysPreamble, splitPrompts[0] + _nl2 + _nl2 + 'File: ' + fp.name, signal, fp.lang, 8192, onLiveUpdate, fp.name);
|
|
8848
8848
|
part1 = wcStripFences(part1);
|
|
8849
8849
|
if (signal && signal.aborted) return part1;
|
|
8850
|
-
var part2 = await wcCallLLM(sysPreamble, splitPrompts[1] + _nl2 + _nl2 + 'File: ' + fp.name, signal, fp.lang, 8192, function(p2) { if (onLiveUpdate) onLiveUpdate(part1 + _nl2 + _nl2 + p2); });
|
|
8850
|
+
var part2 = await wcCallLLM(sysPreamble, splitPrompts[1] + _nl2 + _nl2 + 'File: ' + fp.name, signal, fp.lang, 8192, function(p2) { if (onLiveUpdate) onLiveUpdate(part1 + _nl2 + _nl2 + p2); }, fp.name);
|
|
8851
8851
|
part2 = wcStripFences(part2);
|
|
8852
8852
|
return part1 + _nl2 + _nl2 + part2;
|
|
8853
8853
|
}
|
|
8854
|
-
var content = await wcCallLLM(sysPreamble, fp.prompt + _nl2 + _nl2 + 'File to generate: ' + fp.name, signal, fp.lang, undefined, onLiveUpdate);
|
|
8854
|
+
var content = await wcCallLLM(sysPreamble, fp.prompt + _nl2 + _nl2 + 'File to generate: ' + fp.name, signal, fp.lang, undefined, onLiveUpdate, fp.name);
|
|
8855
8855
|
content = wcStripFences(content);
|
|
8856
|
+
// Detect model confusion: if output looks like a conversational reply instead of code, retry once
|
|
8857
|
+
var firstLine = content.trim().split(_nl2)[0] || '';
|
|
8858
|
+
var confusionPhrases = ['I notice', 'Could you please', 'I need to know', 'I don', 'To help you', 'Please clarify', 'I apologize', 'Unfortunately', 'As an AI'];
|
|
8859
|
+
var isConfused = confusionPhrases.some(function(p) { return firstLine.indexOf(p) === 0; });
|
|
8860
|
+
if (isConfused && !(signal && signal.aborted)) {
|
|
8861
|
+
var retryPrompt = 'IMPORTANT: Output ONLY the raw file content for ' + fp.name + '. No explanations, no questions, no markdown. Just the code.' + _nl2 + _nl2 + fp.prompt;
|
|
8862
|
+
content = await wcCallLLM(sysPreamble, retryPrompt + _nl2 + _nl2 + 'File to generate: ' + fp.name, signal, fp.lang, undefined, onLiveUpdate, fp.name);
|
|
8863
|
+
content = wcStripFences(content);
|
|
8864
|
+
}
|
|
8856
8865
|
// Post-process: fix LLM streaming artifacts (spaces inserted inside keywords/identifiers)
|
|
8857
8866
|
if (fp.lang === 'javascript' || fp.lang === 'typescript') {
|
|
8858
8867
|
// Fix spaces inside JS/TS keywords that LLMs sometimes split during streaming
|
|
@@ -9294,19 +9303,21 @@ async function wcCallLLMRaw(sys, user, signal, maxTok, onToken) {
|
|
|
9294
9303
|
}
|
|
9295
9304
|
}
|
|
9296
9305
|
|
|
9297
|
-
async function wcCallLLM(sys, user, signal, lang, maxTok, onToken) {
|
|
9306
|
+
async function wcCallLLM(sys, user, signal, lang, maxTok, onToken, fileName) {
|
|
9298
9307
|
var content = await wcCallLLMRaw(sys, user, signal, maxTok, onToken);
|
|
9299
9308
|
// Continuation loop: if response is truncated, ask model to continue (no streaming for continuations)
|
|
9300
9309
|
var maxContinuations = 2;
|
|
9301
9310
|
for (var ci = 0; ci < maxContinuations; ci++) {
|
|
9302
9311
|
if (!wcIsTruncated(content, lang || 'text')) break;
|
|
9303
9312
|
if (signal && signal.aborted) break;
|
|
9304
|
-
var
|
|
9305
|
-
|
|
9306
|
-
|
|
9313
|
+
var _nlc = String.fromCharCode(10);
|
|
9314
|
+
var continuePrompt = (fileName ? 'File: ' + fileName + _nlc + _nlc : '') +
|
|
9315
|
+
'You were generating this file and ran out of tokens. The file is INCOMPLETE.' + _nlc +
|
|
9316
|
+
'Continue EXACTLY from where you stopped. Output ONLY the remaining code — do NOT repeat anything already written, do NOT explain, do NOT use markdown fences.' + _nlc + _nlc +
|
|
9317
|
+
'The file so far ends with (last 600 chars):' + _nlc + content.slice(-600);
|
|
9307
9318
|
var continuation = await wcCallLLMRaw(sys, continuePrompt, signal, maxTok);
|
|
9308
9319
|
if (!continuation || continuation.trim().length < 5) break;
|
|
9309
|
-
content = content +
|
|
9320
|
+
content = content + _nlc + continuation;
|
|
9310
9321
|
if (onToken) onToken(content);
|
|
9311
9322
|
}
|
|
9312
9323
|
return content;
|