@o-lang/olang 1.2.4 → 1.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@o-lang/olang",
3
- "version": "1.2.4",
3
+ "version": "1.2.5",
4
4
  "author": "Olalekan Ogundipe <info@workfily.com>",
5
5
  "description": "O-Lang: A governance language for user-directed, rule-enforced agent workflows",
6
6
  "main": "./src/index.js",
@@ -1,4 +1,3 @@
1
- // src/runtime/RuntimeAPI.js
2
1
  const fs = require('fs');
3
2
  const path = require('path');
4
3
 
@@ -332,6 +331,40 @@ class RuntimeAPI {
332
331
  });
333
332
  }
334
333
 
334
+ // -----------------------------
335
+ // āœ… KERNEL-LEVEL LLM HALLUCINATION PREVENTION (ZERO WORKFLOW CHANGES)
336
+ // -----------------------------
337
+ _validateLLMOutput(output, actionContext) {
338
+ if (!output || typeof output !== 'string') return { passed: true };
339
+
340
+ // šŸ”‘ CRITICAL: Extract ONLY allowed capabilities from workflow allowlist
341
+ const allowedCapabilities = Array.from(this.allowedResolvers)
342
+ .filter(name => !name.startsWith('llm-') && name !== 'builtInMathResolver')
343
+ .map(name => name.replace('@o-lang/', '').replace(/-resolver$/, ''));
344
+
345
+ // šŸ”’ Block capability hallucinations (claims to do things outside allowlist)
346
+ const forbiddenPatterns = [
347
+ { pattern: /\b(transfer|send|wire|pay|withdraw|deposit)\b/i, capability: 'transfer' },
348
+ { pattern: /\b(create|open|close|delete)\s+(account|profile)\b/i, capability: 'account_management' },
349
+ { pattern: /\bI (can|will|am able to)\s+(transfer|pay|send)/i, capability: 'unauthorized_action' }
350
+ ];
351
+
352
+ for (const { pattern, capability } of forbiddenPatterns) {
353
+ if (pattern.test(output)) {
354
+ // āœ… Only block if capability NOT in allowlist
355
+ if (!allowedCapabilities.some(c => c.includes(capability) || c.includes('transfer'))) {
356
+ return {
357
+ passed: false,
358
+ reason: `Hallucinated "${capability}" capability (not in workflow allowlist: ${allowedCapabilities.join(', ') || 'none'})`,
359
+ detected: output.match(pattern)?.[0] || 'unknown'
360
+ };
361
+ }
362
+ }
363
+ }
364
+
365
+ return { passed: true };
366
+ }
367
+
335
368
  // -----------------------------
336
369
  // āœ… CRITICAL FIX: Resolver output unwrapping helper
337
370
  // -----------------------------
@@ -538,7 +571,7 @@ class RuntimeAPI {
538
571
  }
539
572
  });
540
573
  if (!hasDocs) {
541
- errorMessage += ` → Visit https://www.npmjs.com/search?q=%40o-lang for resolver packages\n`; // āœ… FIXED
574
+ errorMessage += ` → Visit https://www.npmjs.com/search?q=%40o-lang for resolver packages\n`; // āœ… FIXED
542
575
  }
543
576
 
544
577
  errorMessage += `\nšŸ›‘ Workflow halted to prevent unsafe data propagation to LLMs.`;
@@ -600,6 +633,26 @@ class RuntimeAPI {
600
633
  const rawResult = await runResolvers(action);
601
634
  const unwrapped = this._unwrapResolverResult(rawResult);
602
635
 
636
+ // šŸ”’ KERNEL-ENFORCED: Block LLM hallucinations BEFORE saving to context
637
+ // Detect if this was an LLM resolver by checking action pattern
638
+ const isLLMAction = action.toLowerCase().includes('groq') ||
639
+ action.toLowerCase().includes('openai') ||
640
+ action.toLowerCase().includes('anthropic') ||
641
+ action.toLowerCase().includes('llm');
642
+
643
+ if (isLLMAction && typeof unwrapped?.output === 'string') {
644
+ const safetyCheck = this._validateLLMOutput(unwrapped.output, action);
645
+ if (!safetyCheck.passed) {
646
+ throw new Error(
647
+ `[O-Lang SAFETY] LLM hallucinated unauthorized capability:\n` +
648
+ ` → Detected: "${safetyCheck.detected}"\n` +
649
+ ` → Reason: ${safetyCheck.reason}\n` +
650
+ ` → Workflow allowlist: ${Array.from(this.allowedResolvers).join(', ')}\n` +
651
+ `\nšŸ›‘ Halting to prevent deceptive user experience.`
652
+ );
653
+ }
654
+ }
655
+
603
656
  if (step.saveAs) {
604
657
  this.context[step.saveAs] = unwrapped;
605
658
  }
@@ -627,6 +680,20 @@ class RuntimeAPI {
627
680
  const rawResult = await runResolvers(`Action ${target}`);
628
681
  const unwrapped = this._unwrapResolverResult(rawResult);
629
682
 
683
+ // šŸ”’ KERNEL-ENFORCED: Block LLM hallucinations BEFORE saving to context
684
+ if (typeof unwrapped?.output === 'string') {
685
+ const safetyCheck = this._validateLLMOutput(unwrapped.output, target);
686
+ if (!safetyCheck.passed) {
687
+ throw new Error(
688
+ `[O-Lang SAFETY] LLM hallucinated unauthorized capability:\n` +
689
+ ` → Detected: "${safetyCheck.detected}"\n` +
690
+ ` → Reason: ${safetyCheck.reason}\n` +
691
+ ` → Workflow allowlist: ${Array.from(this.allowedResolvers).join(', ')}\n` +
692
+ `\nšŸ›‘ Halting to prevent deceptive user experience.`
693
+ );
694
+ }
695
+ }
696
+
630
697
  if (step.saveAs) this.context[step.saveAs] = unwrapped;
631
698
  break;
632
699
  }