@hasna/terminal 1.3.1 → 1.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/cli.js +13 -11
  2. package/package.json +1 -1
  3. package/src/cli.tsx +13 -10
package/dist/cli.js CHANGED
@@ -462,18 +462,11 @@ else if (args.length > 0) {
462
462
  console.log(formatWatchResult(result));
463
463
  process.exit(0);
464
464
  }
465
- // Lazy mode
466
- if (shouldBeLazy(clean, actualCmd)) {
467
- const lazy = toLazy(clean, actualCmd);
468
- const saved = rawTokens - estimateTokens(JSON.stringify(lazy));
469
- if (saved > 0)
470
- recordSaving("compressed", saved);
471
- console.log(JSON.stringify(lazy, null, 2));
472
- process.exit(0);
473
- }
474
- // AI answer framing — ALWAYS use in NL mode (even for small output)
475
- // The AI needs to ANSWER the question, not just pass through data
465
+ // Frame-first pipeline: AI answers the question, lazy is fallback
466
+ // For question-type prompts, answer framing runs BEFORE lazy mode
467
+ const isQuestion = /^(what|which|how|is|are|does|do|can|should|where|who|why|am|was|were|has|have|will)\b/i.test(prompt) || prompt.includes("?");
476
468
  if (clean.length > 10) {
469
+ // Try AI answer framing first (especially for questions)
477
470
  const processed = await processOutput(actualCmd, clean, prompt);
478
471
  if (processed.aiProcessed) {
479
472
  if (processed.tokensSaved > 0)
@@ -484,6 +477,15 @@ else if (args.length > 0) {
484
477
  process.exit(0);
485
478
  }
486
479
  }
480
+ // Lazy mode — fallback when AI framing didn't run or failed
481
+ if (shouldBeLazy(clean, actualCmd)) {
482
+ const lazy = toLazy(clean, actualCmd);
483
+ const saved = rawTokens - estimateTokens(JSON.stringify(lazy));
484
+ if (saved > 0)
485
+ recordSaving("compressed", saved);
486
+ console.log(JSON.stringify(lazy, null, 2));
487
+ process.exit(0);
488
+ }
487
489
  // Fallback: AI unavailable — pass through clean
488
490
  console.log(clean);
489
491
  const saved = rawTokens - estimateTokens(clean);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hasna/terminal",
3
- "version": "1.3.1",
3
+ "version": "1.3.2",
4
4
  "description": "Smart terminal wrapper for AI agents and humans — structured output, token compression, MCP server, natural language",
5
5
  "type": "module",
6
6
  "bin": {
package/src/cli.tsx CHANGED
@@ -446,18 +446,12 @@ else if (args.length > 0) {
446
446
  process.exit(0);
447
447
  }
448
448
 
449
- // Lazy mode
450
- if (shouldBeLazy(clean, actualCmd)) {
451
- const lazy = toLazy(clean, actualCmd);
452
- const saved = rawTokens - estimateTokens(JSON.stringify(lazy));
453
- if (saved > 0) recordSaving("compressed", saved);
454
- console.log(JSON.stringify(lazy, null, 2));
455
- process.exit(0);
456
- }
449
+ // Frame-first pipeline: AI answers the question, lazy is fallback
450
+ // For question-type prompts, answer framing runs BEFORE lazy mode
451
+ const isQuestion = /^(what|which|how|is|are|does|do|can|should|where|who|why|am|was|were|has|have|will)\b/i.test(prompt) || prompt.includes("?");
457
452
 
458
- // AI answer framing — ALWAYS use in NL mode (even for small output)
459
- // The AI needs to ANSWER the question, not just pass through data
460
453
  if (clean.length > 10) {
454
+ // Try AI answer framing first (especially for questions)
461
455
  const processed = await processOutput(actualCmd, clean, prompt);
462
456
  if (processed.aiProcessed) {
463
457
  if (processed.tokensSaved > 0) recordSaving("compressed", processed.tokensSaved);
@@ -467,6 +461,15 @@ else if (args.length > 0) {
467
461
  }
468
462
  }
469
463
 
464
+ // Lazy mode — fallback when AI framing didn't run or failed
465
+ if (shouldBeLazy(clean, actualCmd)) {
466
+ const lazy = toLazy(clean, actualCmd);
467
+ const saved = rawTokens - estimateTokens(JSON.stringify(lazy));
468
+ if (saved > 0) recordSaving("compressed", saved);
469
+ console.log(JSON.stringify(lazy, null, 2));
470
+ process.exit(0);
471
+ }
472
+
470
473
  // Fallback: AI unavailable — pass through clean
471
474
  console.log(clean);
472
475
  const saved = rawTokens - estimateTokens(clean);