trace.ai-cli 1.1.4 → 1.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli/traceAI.js CHANGED
@@ -411,7 +411,7 @@ class TraceAI {
411
411
  async () => {
412
412
  const context = this.contexts.map(ctx => ctx.content).join('\n\n');
413
413
  const result = await processWithAI(input, context);
414
- this.displayResult('Trace.Ai Response', result);
414
+ this.displayResult('Trace.Ai Response', result.text || result);
415
415
  }
416
416
  );
417
417
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "trace.ai-cli",
3
- "version": "1.1.4",
3
+ "version": "1.1.5",
4
4
  "description": "A powerful AI-powered CLI tool",
5
5
  "main": "index.js",
6
6
  "bin": {
@@ -92,16 +92,6 @@ async function determineSystemInfoQuery(prompt, basicInfo) {
92
92
  }
93
93
  }
94
94
 
95
- /**
96
- * Check if a prompt is related to system information
97
- * @param {string} prompt - The user's prompt
98
- * @returns {boolean} True if the prompt is related to system information
99
- */
100
- function isSystemInfoQuery(prompt) {
101
- // Always return true for /system commands - we'll let the AI determine relevance
102
- return true;
103
- }
104
-
105
95
  /**
106
96
  * Format system information into a readable response
107
97
  * @param {Object} sysInfo - System information object
@@ -503,7 +493,7 @@ function formatSystemInfoResponse(sysInfo, prompt) {
503
493
  async function processWithAI(prompt, context = '') {
504
494
  try {
505
495
  // Check if the prompt is related to system information
506
- if (isSystemInfoQuery(prompt)) {
496
+ if (prompt.toLowerCase().startsWith('get system information')) {
507
497
  try {
508
498
  // First get basic system info to provide context
509
499
  const basicInfo = await getSystemInfo('basic');
@@ -521,7 +511,7 @@ async function processWithAI(prompt, context = '') {
521
511
  }
522
512
 
523
513
  // Regular AI processing
524
- const models = ['kimi', 'mvrk', 'gma3', 'dsv3', 'qw32b', 'ms24b', 'll70b', 'qw3', 'mp4', 'nlm3'];
514
+ const models = ['kimi', 'mvrk', 'gma3', 'dsv3', 'qw32b', 'ms24b', 'll70b', 'qw3', 'nlm3'];
525
515
 
526
516
  const modelRequests = models.map(model =>
527
517
  fetch('https://traceai.dukeindustries7.workers.dev/', {
@@ -566,6 +556,5 @@ async function processWithAI(prompt, context = '') {
566
556
  module.exports = {
567
557
  processWithAI,
568
558
  determineSystemInfoQuery,
569
- isSystemInfoQuery,
570
559
  formatSystemInfoResponse
571
560
  };