sam-coder-cli 1.0.59 → 1.0.60

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/bin/agi-cli.js +228 -2
  2. package/package.json +1 -1
package/bin/agi-cli.js CHANGED
@@ -17,6 +17,7 @@ const CONFIG_PATH = path.join(os.homedir(), '.sam-coder-config.json');
17
17
  let OPENROUTER_API_KEY;
18
18
  let MODEL = 'deepseek/deepseek-chat-v3-0324:free';
19
19
  let API_BASE_URL = 'https://openrouter.ai/api/v1';
20
+ let SHOW_THOUGHTS = false; // Optional: reveal <think> content in console
20
21
 
21
22
  // Tool/Function definitions for the AI
22
23
  const tools = [
@@ -619,6 +620,143 @@ function extractJsonFromMarkdown(text) {
619
620
  return null;
620
621
  }
621
622
 
623
+ // Extract and strip <think>...</think> blocks from model output
624
+ function splitThinking(text) {
625
+ if (!text || typeof text !== 'string') {
626
+ return { thought: '', content: text || '' };
627
+ }
628
+ const thinkRegex = /<think>[\s\S]*?<\/think>/gi;
629
+ let combinedThoughts = [];
630
+ let match;
631
+ // Collect all thoughts
632
+ const singleThinkRegex = /<think>([\s\S]*?)<\/think>/i;
633
+ let remaining = text;
634
+ while ((match = remaining.match(singleThinkRegex))) {
635
+ combinedThoughts.push((match[1] || '').trim());
636
+ remaining = remaining.replace(singleThinkRegex, '');
637
+ }
638
+ const visible = remaining.replace(thinkRegex, '').trim();
639
+ return { thought: combinedThoughts.join('\n\n').trim(), content: visible };
640
+ }
641
+
642
+ // Try to recover tool/function calls embedded in assistant text for thinking models
643
+ function parseInlineToolCalls(text) {
644
+ if (!text || typeof text !== 'string') return null;
645
+
646
+ const candidates = [];
647
+
648
+ // 1) JSON code blocks
649
+ const codeBlockRegex = /```(?:json)?\s*([\s\S]*?)\s*```/gi;
650
+ let m;
651
+ while ((m = codeBlockRegex.exec(text)) !== null) {
652
+ const block = (m[1] || '').trim();
653
+ if (block) candidates.push(block);
654
+ }
655
+
656
+ // 2) <tool_call>...</tool_call>
657
+ const toolTagRegex = /<tool_call>([\s\S]*?)<\/tool_call>/gi;
658
+ while ((m = toolTagRegex.exec(text)) !== null) {
659
+ const inner = (m[1] || '').trim();
660
+ if (inner) candidates.push(inner);
661
+ }
662
+ // 2b) <function_call>...</function_call>
663
+ const fnTagRegex = /<function_call>([\s\S]*?)<\/function_call>/gi;
664
+ while ((m = fnTagRegex.exec(text)) !== null) {
665
+ const inner = (m[1] || '').trim();
666
+ if (inner) candidates.push(inner);
667
+ }
668
+
669
+ // 3) General JSON-looking substrings as last resort
670
+ const braceRegex = /\{[\s\S]*?\}/g;
671
+ const braceMatches = text.match(braceRegex) || [];
672
+ braceMatches.forEach(snippet => candidates.push(snippet));
673
+
674
+ const toolCalls = [];
675
+
676
+ for (const candidate of candidates) {
677
+ try {
678
+ const obj = JSON.parse(candidate);
679
+ // OpenAI-style single function_call
680
+ if (obj && obj.function_call && obj.function_call.name) {
681
+ const args = obj.function_call.arguments ?? {};
682
+ toolCalls.push({
683
+ id: `inline-${toolCalls.length + 1}`,
684
+ type: 'function',
685
+ function: {
686
+ name: obj.function_call.name,
687
+ arguments: typeof args === 'string' ? args : JSON.stringify(args)
688
+ }
689
+ });
690
+ continue;
691
+ }
692
+ // Anthropic-like tool_use
693
+ if (obj && obj.tool_call && obj.tool_call.name) {
694
+ const args = obj.tool_call.arguments ?? {};
695
+ toolCalls.push({
696
+ id: `inline-${toolCalls.length + 1}`,
697
+ type: 'function',
698
+ function: {
699
+ name: obj.tool_call.name,
700
+ arguments: typeof args === 'string' ? args : JSON.stringify(args)
701
+ }
702
+ });
703
+ continue;
704
+ }
705
+ // Array of tool_calls
706
+ if (Array.isArray(obj?.tool_calls)) {
707
+ obj.tool_calls.forEach((tc) => {
708
+ if (tc?.function?.name) {
709
+ const args = tc.function.arguments ?? {};
710
+ toolCalls.push({
711
+ id: tc.id || `inline-${toolCalls.length + 1}`,
712
+ type: 'function',
713
+ function: {
714
+ name: tc.function.name,
715
+ arguments: typeof args === 'string' ? args : JSON.stringify(args)
716
+ }
717
+ });
718
+ }
719
+ });
720
+ if (toolCalls.length) continue;
721
+ }
722
+ // Direct function structure
723
+ if (obj?.name && (obj.arguments !== undefined || obj.args !== undefined)) {
724
+ const args = obj.arguments ?? obj.args ?? {};
725
+ toolCalls.push({
726
+ id: `inline-${toolCalls.length + 1}`,
727
+ type: 'function',
728
+ function: {
729
+ name: obj.name,
730
+ arguments: typeof args === 'string' ? args : JSON.stringify(args)
731
+ }
732
+ });
733
+ continue;
734
+ }
735
+ } catch (_) {
736
+ // ignore parse failures
737
+ }
738
+ }
739
+
740
+ return toolCalls.length ? toolCalls : null;
741
+ }
742
+
743
+ // Normalize single function_call to tool_calls array if present
744
+ function normalizeToolCallsFromMessage(message) {
745
+ if (!message || typeof message !== 'object') return message;
746
+ if (!message.tool_calls && message.function_call && message.function_call.name) {
747
+ const args = message.function_call.arguments ?? {};
748
+ message.tool_calls = [{
749
+ id: 'fc-1',
750
+ type: 'function',
751
+ function: {
752
+ name: message.function_call.name,
753
+ arguments: typeof args === 'string' ? args : JSON.stringify(args)
754
+ }
755
+ }];
756
+ }
757
+ return message;
758
+ }
759
+
622
760
  // Call OpenRouter API with tool calling
623
761
  async function callOpenRouter(messages, currentModel, useJson = false) {
624
762
  const apiKey = OPENROUTER_API_KEY;
@@ -675,8 +813,24 @@ async function processQueryWithTools(query, conversation = [], currentModel) {
675
813
  try {
676
814
  const response = await callOpenRouter(messages, currentModel);
677
815
  const assistantMessage = response.choices[0].message;
816
+ // Handle thinking tags and optionally display them
817
+ if (assistantMessage && typeof assistantMessage.content === 'string') {
818
+ const { thought, content } = splitThinking(assistantMessage.content);
819
+ if (thought && SHOW_THOUGHTS) {
820
+ ui.showInfo(`Thinking:\n${thought}`);
821
+ }
822
+ assistantMessage.content = content;
823
+ }
824
+ normalizeToolCallsFromMessage(assistantMessage);
678
825
  messages.push(assistantMessage);
679
826
 
827
+ // Try inline recovery for thinking models that embed tool calls inside content
828
+ if (!assistantMessage.tool_calls && assistantMessage.content) {
829
+ const recovered = parseInlineToolCalls(assistantMessage.content);
830
+ if (recovered && recovered.length) {
831
+ assistantMessage.tool_calls = recovered;
832
+ }
833
+ }
680
834
  if (assistantMessage.tool_calls) {
681
835
  const toolResults = await handleToolCalls(assistantMessage.tool_calls, messages);
682
836
  messages.push(...toolResults);
@@ -684,6 +838,14 @@ async function processQueryWithTools(query, conversation = [], currentModel) {
684
838
  ui.startThinking();
685
839
  const finalResponseObj = await callOpenRouter(messages, currentModel);
686
840
  const finalAssistantMessage = finalResponseObj.choices[0].message;
841
+ if (finalAssistantMessage && typeof finalAssistantMessage.content === 'string') {
842
+ const { thought, content } = splitThinking(finalAssistantMessage.content);
843
+ if (thought && SHOW_THOUGHTS) {
844
+ ui.showInfo(`Thinking:\n${thought}`);
845
+ }
846
+ finalAssistantMessage.content = content;
847
+ }
848
+ normalizeToolCallsFromMessage(finalAssistantMessage);
687
849
  messages.push(finalAssistantMessage);
688
850
  ui.stopThinking();
689
851
 
@@ -692,6 +854,31 @@ async function processQueryWithTools(query, conversation = [], currentModel) {
692
854
  conversation: messages
693
855
  };
694
856
  } else {
857
+ // Fallback: if no tool_calls were returned, try to parse a JSON action from content (thinking models may embed later)
858
+ const fallbackAction = extractJsonFromMarkdown(assistantMessage.content);
859
+ if (fallbackAction && fallbackAction.type) {
860
+ try {
861
+ const result = await executeAction(fallbackAction);
862
+ messages.push({ role: 'user', content: `Action result (${fallbackAction.type}): ${result}` });
863
+ ui.startThinking();
864
+ const finalResponseObj = await callOpenRouter(messages, currentModel);
865
+ const finalAssistantMessage = finalResponseObj.choices[0].message;
866
+ if (finalAssistantMessage && typeof finalAssistantMessage.content === 'string') {
867
+ const { thought, content } = splitThinking(finalAssistantMessage.content);
868
+ if (thought && SHOW_THOUGHTS) {
869
+ ui.showInfo(`Thinking:\n${thought}`);
870
+ }
871
+ finalAssistantMessage.content = content;
872
+ }
873
+ normalizeToolCallsFromMessage(finalAssistantMessage);
874
+ messages.push(finalAssistantMessage);
875
+ ui.stopThinking();
876
+ return { response: finalAssistantMessage.content, conversation: messages };
877
+ } catch (e) {
878
+ ui.stopThinking();
879
+ // If fallback execution fails, just return original assistant content
880
+ }
881
+ }
695
882
  ui.stopThinking();
696
883
  return {
697
884
  response: assistantMessage.content,
@@ -818,6 +1005,14 @@ async function processQuery(query, conversation = [], currentModel) {
818
1005
  while (actionCount < MAX_ACTIONS) {
819
1006
  const responseObj = await callOpenRouter(messages, currentModel, true);
820
1007
  const assistantMessage = responseObj.choices[0].message;
1008
+ if (assistantMessage && typeof assistantMessage.content === 'string') {
1009
+ const { thought, content } = splitThinking(assistantMessage.content);
1010
+ if (thought && SHOW_THOUGHTS) {
1011
+ ui.showInfo(`Thinking:\n${thought}`);
1012
+ }
1013
+ assistantMessage.content = content;
1014
+ }
1015
+ normalizeToolCallsFromMessage(assistantMessage);
821
1016
  messages.push(assistantMessage);
822
1017
 
823
1018
  const actionData = extractJsonFromMarkdown(assistantMessage.content);
@@ -869,8 +1064,17 @@ async function processQuery(query, conversation = [], currentModel) {
869
1064
  messages.push(finalMsg);
870
1065
 
871
1066
  const finalResponseObj = await callOpenRouter(messages, currentModel, true);
872
- finalResponse = finalResponseObj.choices[0].message.content;
873
- messages.push(finalResponseObj.choices[0].message);
1067
+ const finalAssistantMessage = finalResponseObj.choices[0].message;
1068
+ if (finalAssistantMessage && typeof finalAssistantMessage.content === 'string') {
1069
+ const { thought, content } = splitThinking(finalAssistantMessage.content);
1070
+ if (thought && SHOW_THOUGHTS) {
1071
+ ui.showInfo(`Thinking:\n${thought}`);
1072
+ }
1073
+ finalResponse = content;
1074
+ } else {
1075
+ finalResponse = finalResponseObj.choices[0].message.content;
1076
+ }
1077
+ messages.push(finalAssistantMessage);
874
1078
  }
875
1079
 
876
1080
  ui.stopThinking();
@@ -920,6 +1124,25 @@ async function chat(rl, useToolCalling, initialModel) {
920
1124
  return;
921
1125
  }
922
1126
 
1127
+ if (input.toLowerCase().startsWith('/thoughts')) {
1128
+ const parts = input.trim().split(/\s+/);
1129
+ const arg = parts[1] ? parts[1].toLowerCase() : '';
1130
+ if (arg !== 'on' && arg !== 'off') {
1131
+ const state = SHOW_THOUGHTS ? 'on' : 'off';
1132
+ ui.showInfo(`Usage: /thoughts on|off (currently ${state})`);
1133
+ rl.prompt();
1134
+ return;
1135
+ }
1136
+ const enable = arg === 'on';
1137
+ SHOW_THOUGHTS = enable;
1138
+ let config = await readConfig() || {};
1139
+ config.showThoughts = enable;
1140
+ await writeConfig(config);
1141
+ ui.showResponse(`Hidden thoughts ${enable ? 'enabled' : 'disabled'}.`);
1142
+ rl.prompt();
1143
+ return;
1144
+ }
1145
+
923
1146
  if (input.toLowerCase() === '/default-model') {
924
1147
  currentModel = 'deepseek/deepseek-chat-v3-0324:free';
925
1148
  let config = await readConfig() || {};
@@ -1039,6 +1262,9 @@ async function start() {
1039
1262
  console.log(`🚀 Using Pro Plan custom endpoint: ${API_BASE_URL}`);
1040
1263
  }
1041
1264
 
1265
+ // Optional: reveal <think> thoughts if enabled in config or env
1266
+ SHOW_THOUGHTS = (typeof config.showThoughts === 'boolean') ? config.showThoughts : (process.env.SHOW_THOUGHTS === '1');
1267
+
1042
1268
  // Check if animation should be shown (can be disabled via config)
1043
1269
  const showAnimation = config.showAnimation !== false; // Default to true
1044
1270
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sam-coder-cli",
3
- "version": "1.0.59",
3
+ "version": "1.0.60",
4
4
  "description": "SAM-CODER: An animated command-line AI assistant with agency capabilities.",
5
5
  "main": "bin/agi-cli.js",
6
6
  "bin": {