sam-coder-cli 1.0.61 → 1.0.63

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/bin/agi-cli.js +97 -83
  2. package/bin/ui.js +21 -1
  3. package/package.json +1 -1
package/bin/agi-cli.js CHANGED
@@ -557,7 +557,6 @@ const agentUtils = {
557
557
  // Extract JSON from markdown code blocks
558
558
  function extractJsonFromMarkdown(text) {
559
559
  if (!text || typeof text !== 'string') {
560
- console.error('Invalid input to extractJsonFromMarkdown:', typeof text);
561
560
  return null;
562
561
  }
563
562
 
@@ -569,13 +568,11 @@ function extractJsonFromMarkdown(text) {
569
568
  try {
570
569
  const jsonStr = match[1].trim();
571
570
  if (!jsonStr) {
572
- console.error('Empty JSON content in markdown block');
573
571
  return null;
574
572
  }
575
573
  return JSON.parse(jsonStr);
576
574
  } catch (error) {
577
- console.error('Error parsing JSON from markdown block:', error.message);
578
- console.error('JSON content was:', match[1]);
575
+ // ignore
579
576
  }
580
577
  }
581
578
 
@@ -615,8 +612,6 @@ function extractJsonFromMarkdown(text) {
615
612
  // Last resort failed
616
613
  }
617
614
 
618
- console.error('Failed to extract valid JSON from response');
619
- console.error('Response text was:', text);
620
615
  return null;
621
616
  }
622
617
 
@@ -757,25 +752,63 @@ function normalizeToolCallsFromMessage(message) {
757
752
  return message;
758
753
  }
759
754
 
760
- // Ask the model to produce a structured follow-up with thinking and either tool_calls or JSON action
761
- async function requestStructuredFollowup(messages, currentModel, preferTools = true) {
762
- const instruction = preferTools
763
- ? 'Think step-by-step inside <think></think>. Then, if any tools are needed, call them via tool_calls. If no tools are required, output a single markdown ```json code block with an action object (type, data, reasoning) per the schema. Do not include any other text.'
764
- : 'Think step-by-step inside <think></think>, then output a single markdown ```json code block with an action object (type, data, reasoning) per the schema. Do not include any other text.';
765
-
766
- const followupUser = { role: 'user', content: instruction };
767
- const followupMessages = [...messages, followupUser];
768
- const responseObj = await callOpenRouter(followupMessages, currentModel, !preferTools);
769
- const assistantMessage = responseObj.choices[0].message;
770
- if (assistantMessage && typeof assistantMessage.content === 'string') {
771
- const { thought, content } = splitThinking(assistantMessage.content);
772
- if (thought && SHOW_THOUGHTS) {
773
- ui.showInfo(`Thinking:\n${thought}`);
755
+ // Parse segmented format like <|start|>channel<|message|>...<|end|>
756
+ function parseSegmentedTranscript(text) {
757
+ if (!text || typeof text !== 'string') {
758
+ return { content: text || '', thought: '', recoveredToolCalls: null };
759
+ }
760
+
761
+ const blockRegex = /\<\|start\|>([^<|]+)\<\|message\|>([\s\S]*?)\<\|end\|>/gi;
762
+ let match;
763
+ let visibleParts = [];
764
+ let thoughts = [];
765
+ let commentaryParts = [];
766
+
767
+ while ((match = blockRegex.exec(text)) !== null) {
768
+ const rawRole = (match[1] || '').trim().toLowerCase();
769
+ const body = (match[2] || '').trim();
770
+ if (!rawRole) continue;
771
+
772
+ if (rawRole === 'analysis') {
773
+ thoughts.push(body);
774
+ } else if (rawRole === 'commentary') {
775
+ commentaryParts.push(body);
776
+ } else if (rawRole === 'final' || rawRole === 'assistant' || rawRole === 'user' || rawRole === 'system' || rawRole === 'developer') {
777
+ // Prefer 'final' or 'assistant' as visible, but include others to preserve content order
778
+ visibleParts.push(body);
779
+ } else {
780
+ // Unknown channel: treat as visible content
781
+ visibleParts.push(body);
774
782
  }
775
- assistantMessage.content = content;
776
783
  }
777
- normalizeToolCallsFromMessage(assistantMessage);
778
- return { assistantMessage, updatedMessages: [...followupMessages, assistantMessage] };
784
+
785
+ // If no blocks matched, return original
786
+ if (visibleParts.length === 0 && thoughts.length === 0 && commentaryParts.length === 0) {
787
+ return { content: text, thought: '', recoveredToolCalls: null };
788
+ }
789
+
790
+ // Look for a Reasoning: level outside blocks as a hint
791
+ const reasoningMatch = text.match(/Reasoning:\s*(high|medium|low)/i);
792
+ if (reasoningMatch) {
793
+ thoughts.unshift(`Reasoning level: ${reasoningMatch[1]}`);
794
+ }
795
+
796
+ // Recover tool calls from commentary channels
797
+ let recoveredToolCalls = null;
798
+ if (commentaryParts.length) {
799
+ for (const part of commentaryParts) {
800
+ const found = parseInlineToolCalls(part);
801
+ if (found && found.length) {
802
+ recoveredToolCalls = (recoveredToolCalls || []).concat(found);
803
+ }
804
+ }
805
+ }
806
+
807
+ return {
808
+ content: visibleParts.join('\n\n').trim(),
809
+ thought: thoughts.join('\n\n').trim(),
810
+ recoveredToolCalls: recoveredToolCalls && recoveredToolCalls.length ? recoveredToolCalls : null
811
+ };
779
812
  }
780
813
 
781
814
  // Call OpenRouter API with tool calling
@@ -836,9 +869,20 @@ async function processQueryWithTools(query, conversation = [], currentModel) {
836
869
  const assistantMessage = response.choices[0].message;
837
870
  // Handle thinking tags and optionally display them
838
871
  if (assistantMessage && typeof assistantMessage.content === 'string') {
839
- const { thought, content } = splitThinking(assistantMessage.content);
872
+ // First handle segmented transcripts, then fallback to <think>
873
+ const segmented = parseSegmentedTranscript(assistantMessage.content);
874
+ let thought = segmented.thought;
875
+ let content = segmented.content;
876
+ if (!segmented.thought && !segmented.recoveredToolCalls) {
877
+ const thinkSplit = splitThinking(assistantMessage.content);
878
+ thought = thought || thinkSplit.thought;
879
+ content = content || thinkSplit.content;
880
+ }
881
+ if (segmented.recoveredToolCalls && (!assistantMessage.tool_calls)) {
882
+ assistantMessage.tool_calls = segmented.recoveredToolCalls;
883
+ }
840
884
  if (thought && SHOW_THOUGHTS) {
841
- ui.showInfo(`Thinking:\n${thought}`);
885
+ ui.showThought(thought);
842
886
  }
843
887
  assistantMessage.content = content;
844
888
  }
@@ -860,9 +904,16 @@ async function processQueryWithTools(query, conversation = [], currentModel) {
860
904
  const finalResponseObj = await callOpenRouter(messages, currentModel);
861
905
  const finalAssistantMessage = finalResponseObj.choices[0].message;
862
906
  if (finalAssistantMessage && typeof finalAssistantMessage.content === 'string') {
863
- const { thought, content } = splitThinking(finalAssistantMessage.content);
907
+ const segmented = parseSegmentedTranscript(finalAssistantMessage.content);
908
+ let thought = segmented.thought;
909
+ let content = segmented.content;
910
+ if (!segmented.thought && !segmented.recoveredToolCalls) {
911
+ const thinkSplit = splitThinking(finalAssistantMessage.content);
912
+ thought = thought || thinkSplit.thought;
913
+ content = content || thinkSplit.content;
914
+ }
864
915
  if (thought && SHOW_THOUGHTS) {
865
- ui.showInfo(`Thinking:\n${thought}`);
916
+ ui.showThought(thought);
866
917
  }
867
918
  finalAssistantMessage.content = content;
868
919
  }
@@ -885,9 +936,16 @@ async function processQueryWithTools(query, conversation = [], currentModel) {
885
936
  const finalResponseObj = await callOpenRouter(messages, currentModel);
886
937
  const finalAssistantMessage = finalResponseObj.choices[0].message;
887
938
  if (finalAssistantMessage && typeof finalAssistantMessage.content === 'string') {
888
- const { thought, content } = splitThinking(finalAssistantMessage.content);
939
+ const segmented = parseSegmentedTranscript(finalAssistantMessage.content);
940
+ let thought = segmented.thought;
941
+ let content = segmented.content;
942
+ if (!segmented.thought && !segmented.recoveredToolCalls) {
943
+ const thinkSplit = splitThinking(finalAssistantMessage.content);
944
+ thought = thought || thinkSplit.thought;
945
+ content = content || thinkSplit.content;
946
+ }
889
947
  if (thought && SHOW_THOUGHTS) {
890
- ui.showInfo(`Thinking:\n${thought}`);
948
+ ui.showThought(thought);
891
949
  }
892
950
  finalAssistantMessage.content = content;
893
951
  }
@@ -900,57 +958,6 @@ async function processQueryWithTools(query, conversation = [], currentModel) {
900
958
  // If fallback execution fails, just return original assistant content
901
959
  }
902
960
  }
903
- // Final attempt: request a structured follow-up that includes thinking and tool calls
904
- try {
905
- ui.startThinking();
906
- const { assistantMessage: structuredMsg, updatedMessages } = await requestStructuredFollowup(messages, currentModel, true);
907
- messages.length = 0; updatedMessages.forEach(m => messages.push(m));
908
-
909
- // If tool calls present now, execute them
910
- if (structuredMsg.tool_calls && structuredMsg.tool_calls.length) {
911
- const toolResults2 = await handleToolCalls(structuredMsg.tool_calls, messages);
912
- messages.push(...toolResults2);
913
- const finalResponseObj2 = await callOpenRouter(messages, currentModel);
914
- const finalAssistantMessage2 = finalResponseObj2.choices[0].message;
915
- if (finalAssistantMessage2 && typeof finalAssistantMessage2.content === 'string') {
916
- const { thought, content } = splitThinking(finalAssistantMessage2.content);
917
- if (thought && SHOW_THOUGHTS) ui.showInfo(`Thinking:\n${thought}`);
918
- finalAssistantMessage2.content = content;
919
- }
920
- messages.push(finalAssistantMessage2);
921
- ui.stopThinking();
922
- return { response: finalAssistantMessage2.content, conversation: messages };
923
- }
924
-
925
- // Else try JSON action again from the structured response
926
- const structuredAction = extractJsonFromMarkdown(structuredMsg.content);
927
- if (structuredAction && structuredAction.type) {
928
- try {
929
- const result2 = await executeAction(structuredAction);
930
- messages.push({ role: 'user', content: `Action result (${structuredAction.type}): ${result2}` });
931
- const finalResponseObj3 = await callOpenRouter(messages, currentModel);
932
- const finalAssistantMessage3 = finalResponseObj3.choices[0].message;
933
- if (finalAssistantMessage3 && typeof finalAssistantMessage3.content === 'string') {
934
- const { thought, content } = splitThinking(finalAssistantMessage3.content);
935
- if (thought && SHOW_THOUGHTS) ui.showInfo(`Thinking:\n${thought}`);
936
- finalAssistantMessage3.content = content;
937
- }
938
- messages.push(finalAssistantMessage3);
939
- ui.stopThinking();
940
- return { response: finalAssistantMessage3.content, conversation: messages };
941
- } catch (_) {
942
- ui.stopThinking();
943
- }
944
- }
945
-
946
- ui.stopThinking();
947
- return {
948
- response: structuredMsg.content,
949
- conversation: messages
950
- };
951
- } catch (_) {
952
- // ignore and fall back to original assistant message
953
- }
954
961
  ui.stopThinking();
955
962
  return {
956
963
  response: assistantMessage.content,
@@ -1078,9 +1085,16 @@ async function processQuery(query, conversation = [], currentModel) {
1078
1085
  const responseObj = await callOpenRouter(messages, currentModel, true);
1079
1086
  const assistantMessage = responseObj.choices[0].message;
1080
1087
  if (assistantMessage && typeof assistantMessage.content === 'string') {
1081
- const { thought, content } = splitThinking(assistantMessage.content);
1088
+ const segmented = parseSegmentedTranscript(assistantMessage.content);
1089
+ let thought = segmented.thought;
1090
+ let content = segmented.content;
1091
+ if (!segmented.thought && !segmented.recoveredToolCalls) {
1092
+ const thinkSplit = splitThinking(assistantMessage.content);
1093
+ thought = thought || thinkSplit.thought;
1094
+ content = content || thinkSplit.content;
1095
+ }
1082
1096
  if (thought && SHOW_THOUGHTS) {
1083
- ui.showInfo(`Thinking:\n${thought}`);
1097
+ ui.showThought(thought);
1084
1098
  }
1085
1099
  assistantMessage.content = content;
1086
1100
  }
@@ -1140,7 +1154,7 @@ async function processQuery(query, conversation = [], currentModel) {
1140
1154
  if (finalAssistantMessage && typeof finalAssistantMessage.content === 'string') {
1141
1155
  const { thought, content } = splitThinking(finalAssistantMessage.content);
1142
1156
  if (thought && SHOW_THOUGHTS) {
1143
- ui.showInfo(`Thinking:\n${thought}`);
1157
+ ui.showThought(thought);
1144
1158
  }
1145
1159
  finalResponse = content;
1146
1160
  } else {
package/bin/ui.js CHANGED
@@ -72,6 +72,25 @@ function showAGIStatus(status) {
72
72
  console.log(chalk.cyanBright(statusBox));
73
73
  }
74
74
 
75
+ function showThought(thought) {
76
+ if (!thought) return;
77
+ const top = chalk.gray('┌' + '─'.repeat(30) + '┐');
78
+ const title = chalk.magenta.bold('│ ✧ Thinking');
79
+ const pad = ' '.repeat(Math.max(0, 30 - ' ✧ Thinking'.length));
80
+ const titleLine = title + pad + chalk.gray(' │');
81
+ const separator = chalk.gray('├' + '─'.repeat(30) + '┤');
82
+ const bottom = chalk.gray('└' + '─'.repeat(30) + '┘');
83
+ console.log(top);
84
+ console.log(titleLine);
85
+ console.log(separator);
86
+ // Print each line of thought dimmed
87
+ String(thought).split('\n').forEach(line => {
88
+ const truncated = line; // keep full line; rely on terminal wrap
89
+ console.log(chalk.gray('│ ') + chalk.dim(truncated) + chalk.gray(' │'));
90
+ });
91
+ console.log(bottom);
92
+ }
93
+
75
94
  module.exports = {
76
95
  showHeader,
77
96
  showLegacyHeader,
@@ -84,5 +103,6 @@ module.exports = {
84
103
  showWarning,
85
104
  showSuccess,
86
105
  showInfo,
87
- showAGIStatus
106
+ showAGIStatus,
107
+ showThought
88
108
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sam-coder-cli",
3
- "version": "1.0.61",
3
+ "version": "1.0.63",
4
4
  "description": "SAM-CODER: An animated command-line AI assistant with agency capabilities.",
5
5
  "main": "bin/agi-cli.js",
6
6
  "bin": {