nothumanallowed 13.5.92 → 13.5.93

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "nothumanallowed",
3
- "version": "13.5.92",
3
+ "version": "13.5.93",
4
4
  "description": "NotHumanAllowed — 38 AI agents, 80 tools, Studio (visual agentic workflows). Email, calendar, browser automation, screen capture, canvas, cron/heartbeat, Alexandria E2E messaging, GitHub, Notion, Slack, voice chat, free AI (Liara), 28 languages. Zero-dependency CLI.",
5
5
  "type": "module",
6
6
  "bin": {
@@ -8015,10 +8015,10 @@ async function wcGenerate() {
8015
8015
  var splitPrompts = WC_CSS_SPLIT[fp.name];
8016
8016
  if (splitPrompts) {
8017
8017
  // Two-pass generation: call LLM twice and concatenate
8018
- var part1 = await wcCallLLM(sysPreamble, splitPrompts[0] + _nl2 + _nl2 + 'File: ' + fp.name, signal, fp.lang);
8018
+ var part1 = await wcCallLLM(sysPreamble, splitPrompts[0] + _nl2 + _nl2 + 'File: ' + fp.name, signal, fp.lang, 8192);
8019
8019
  part1 = wcStripFences(part1);
8020
8020
  if (signal && signal.aborted) return part1;
8021
- var part2 = await wcCallLLM(sysPreamble, splitPrompts[1] + _nl2 + _nl2 + 'File: ' + fp.name, signal, fp.lang);
8021
+ var part2 = await wcCallLLM(sysPreamble, splitPrompts[1] + _nl2 + _nl2 + 'File: ' + fp.name, signal, fp.lang, 8192);
8022
8022
  part2 = wcStripFences(part2);
8023
8023
  return part1 + _nl2 + _nl2 + part2;
8024
8024
  }
@@ -8122,7 +8122,8 @@ async function wcAutoRepair(filePlan, sysPreamble) {
8122
8122
  if (filePlan) filePlan.forEach(function(fp){ planMap[fp.name] = fp; });
8123
8123
 
8124
8124
  var _nl3 = String.fromCharCode(10);
8125
- var sysBase = sysPreamble || ('You are an expert full-stack engineer. Output ONLY the complete corrected file content. No explanations, no markdown fences.');
8125
+ // Use compact system prompt for repair to avoid exceeding Liara context window
8126
+ var sysBase = 'You are an expert full-stack engineer. Output ONLY the complete corrected file content. No explanations, no markdown fences, no preamble. Raw file content only.';
8126
8127
 
8127
8128
  for (var ri = 0; ri < toFix.length; ri++) {
8128
8129
  var broken = toFix[ri];
@@ -8148,7 +8149,7 @@ async function wcAutoRepair(filePlan, sysPreamble) {
8148
8149
  (broken.content.length > 800 ? broken.content.slice(0, 400) + _nl3 + '...' + _nl3 + broken.content.slice(-400) : broken.content) + _nl3 + _nl3 +
8149
8150
  'Output the COMPLETE corrected file from the beginning.';
8150
8151
  }
8151
- var fixed = await wcCallLLM(fixSys, fixUser, null, broken.lang || plan && plan.lang);
8152
+ var fixed = await wcCallLLM(fixSys, fixUser, null, broken.lang || plan && plan.lang, 8192);
8152
8153
  var _fence3 = String.fromCharCode(96,96,96);
8153
8154
  var fixLines = fixed.split(_nl3);
8154
8155
  if (fixLines.length > 0 && fixLines[0].indexOf(_fence3) === 0) fixLines.shift();
@@ -8258,11 +8259,11 @@ function wcIsTruncated(content, lang) {
8258
8259
  return false;
8259
8260
  }
8260
8261
 
8261
- async function wcCallLLMRaw(sys, user, signal) {
8262
+ async function wcCallLLMRaw(sys, user, signal, maxTok) {
8262
8263
  var fetchOpts = {
8263
8264
  method: 'POST',
8264
8265
  headers: {'Content-Type':'application/json'},
8265
- body: JSON.stringify({system: sys, user: user, max_tokens: 16384})
8266
+ body: JSON.stringify({system: sys, user: user, max_tokens: maxTok || 16384})
8266
8267
  };
8267
8268
  if (signal) fetchOpts.signal = signal;
8268
8269
  for (var attempt = 0; attempt < 3; attempt++) {
@@ -8290,8 +8291,8 @@ async function wcCallLLMRaw(sys, user, signal) {
8290
8291
  }
8291
8292
  }
8292
8293
 
8293
- async function wcCallLLM(sys, user, signal, lang) {
8294
- var content = await wcCallLLMRaw(sys, user, signal);
8294
+ async function wcCallLLM(sys, user, signal, lang, maxTok) {
8295
+ var content = await wcCallLLMRaw(sys, user, signal, maxTok);
8295
8296
  // Continuation loop: if response is truncated, ask model to continue
8296
8297
  var maxContinuations = 2;
8297
8298
  for (var ci = 0; ci < maxContinuations; ci++) {
@@ -8300,7 +8301,7 @@ async function wcCallLLM(sys, user, signal, lang) {
8300
8301
  var continuePrompt = 'Continue generating the file EXACTLY from where you stopped. Do not repeat anything already written. Output ONLY the remaining code, starting from the next character after where you stopped.' +
8301
8302
  String.fromCharCode(10) + String.fromCharCode(10) + 'The file so far ends with:' +
8302
8303
  String.fromCharCode(10) + content.slice(-300);
8303
- var continuation = await wcCallLLMRaw(sys, continuePrompt, signal);
8304
+ var continuation = await wcCallLLMRaw(sys, continuePrompt, signal, maxTok);
8304
8305
  if (!continuation || continuation.trim().length < 5) break;
8305
8306
  content = content + String.fromCharCode(10) + continuation;
8306
8307
  }