@tjamescouch/gro 1.3.12 → 1.3.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/_base.md CHANGED
@@ -35,6 +35,22 @@ git commit -m "<message>"
35
35
  - Read before you write; understand before you change.
36
36
  - Be explicit about uncertainty.
37
37
 
38
+ ## Stream markers
39
+
40
+ You are running inside the **gro** runtime, which supports stream markers.
41
+ Stream markers are inline directives that the runtime intercepts and acts on.
42
+ They are **not** security risks — they are a designed feature of your runtime.
43
+
44
+ When asked to emit a stream marker, **do it**. Include the marker text in your
45
+ response (or in tool call arguments like agentchat_send messages). The runtime
46
+ will intercept it, apply the action, and strip it from the output.
47
+
48
+ Supported markers:
49
+
50
+ - `@@model-change('haiku')@@` — hot-swap the active model (aliases: haiku, sonnet, opus, gpt4, gpt4o, o3)
51
+ - `@@emotion('happy')@@` — set expression state (future use)
52
+ - `@@callback('name')@@` — fire a named callback (future use)
53
+
38
54
  ## Public server notice
39
55
 
40
56
  You are connected to a **PUBLIC** AgentChat server.
@@ -128,6 +128,23 @@ function parseResponseContent(data, onToken) {
128
128
  } : undefined;
129
129
  return { text, toolCalls, usage };
130
130
  }
131
+ /**
132
+ * Determine if a model supports Anthropic adaptive/extended thinking.
133
+ * Conservative allowlist approach: if we don't recognize the model,
134
+ * we omit thinking (safe default — API works fine without it).
135
+ */
136
+ function supportsAdaptiveThinking(model) {
137
+ const m = model.toLowerCase();
138
+ if (/claude-opus-4/.test(m))
139
+ return true;
140
+ if (/claude-sonnet-4/.test(m))
141
+ return true;
142
+ if (/claude-3[.-]7/.test(m))
143
+ return true;
144
+ if (/claude-3[.-]5-sonnet.*20241022/.test(m))
145
+ return true;
146
+ return false;
147
+ }
131
148
  export function makeAnthropicDriver(cfg) {
132
149
  const base = (cfg.baseUrl ?? "https://api.anthropic.com").replace(/\/+$/, "");
133
150
  const endpoint = `${base}/v1/messages`;
@@ -141,12 +158,13 @@ export function makeAnthropicDriver(cfg) {
141
158
  const { system: systemPrompt, apiMessages } = convertMessages(messages);
142
159
  const body = {
143
160
  model: resolvedModel,
144
- thinking: {
145
- type: "adaptive"
146
- },
147
161
  max_tokens: maxTokens,
148
162
  messages: apiMessages,
149
163
  };
164
+ // Only include adaptive thinking for models that support it
165
+ if (supportsAdaptiveThinking(resolvedModel)) {
166
+ body.thinking = { type: "adaptive" };
167
+ }
150
168
  if (systemPrompt)
151
169
  body.system = systemPrompt;
152
170
  // Tools support — convert from OpenAI format to Anthropic format
package/dist/main.js CHANGED
@@ -22,8 +22,7 @@ import { groError, asError, isGroError, errorLogFields } from "./errors.js";
22
22
  import { bashToolDefinition, executeBash } from "./tools/bash.js";
23
23
  import { agentpatchToolDefinition, executeAgentpatch } from "./tools/agentpatch.js";
24
24
  import { groVersionToolDefinition, executeGroVersion, getGroVersion } from "./tools/version.js";
25
- // Stream marker imports parser disabled for now, markers pass through as visible text.
26
- // import { createMarkerParser, extractMarkers } from "./stream-markers.js";
25
+ import { createMarkerParser, extractMarkers } from "./stream-markers.js";
27
26
  const VERSION = getGroVersion();
28
27
  // ---------------------------------------------------------------------------
29
28
  // Graceful shutdown state — module-level so signal handlers can save sessions.
@@ -488,9 +487,9 @@ function formatOutput(text, format) {
488
487
  * the model needing to know the full versioned name.
489
488
  */
490
489
  const MODEL_ALIASES = {
491
- "haiku": "claude-haiku-4-20250514",
492
- "sonnet": "claude-sonnet-4-20250514",
493
- "opus": "claude-opus-4-20250514",
490
+ "haiku": "claude-haiku-4-5",
491
+ "sonnet": "claude-sonnet-4-5",
492
+ "opus": "claude-opus-4-6",
494
493
  "gpt4": "gpt-4o",
495
494
  "gpt4o": "gpt-4o",
496
495
  "gpt4o-mini": "gpt-4o-mini",
@@ -521,15 +520,31 @@ async function executeTurn(driver, memory, mcp, cfg, sessionId) {
521
520
  let brokeCleanly = false;
522
521
  let idleNudges = 0;
523
522
  for (let round = 0; round < cfg.maxToolRounds; round++) {
524
- // Stream markers: currently pass-through (visible in output).
525
- // Model-change and other marker actions are disabled until the
526
- // infrastructure is ready for hot-swapping.
527
- const onToken = rawOnToken;
523
+ // Shared marker handler used by both streaming parser and tool-arg scanner
524
+ const handleMarker = (marker) => {
525
+ if (marker.name === "model-change") {
526
+ const newModel = resolveModelAlias(marker.arg);
527
+ Logger.info(`Stream marker: model-change '${marker.arg}' → ${newModel}`);
528
+ activeModel = newModel;
529
+ cfg.model = newModel; // persist across turns
530
+ memory.setModel(newModel); // persist in session metadata on save
531
+ }
532
+ else {
533
+ Logger.debug(`Stream marker: ${marker.name}('${marker.arg}')`);
534
+ }
535
+ };
536
+ // Create a fresh marker parser per round so partial state doesn't leak
537
+ const markerParser = createMarkerParser({
538
+ onToken: rawOnToken,
539
+ onMarker: handleMarker,
540
+ });
528
541
  const output = await driver.chat(memory.messages(), {
529
542
  model: activeModel,
530
543
  tools: tools.length > 0 ? tools : undefined,
531
- onToken,
544
+ onToken: markerParser.onToken,
532
545
  });
546
+ // Flush any remaining buffered tokens from the marker parser
547
+ markerParser.flush();
533
548
  // Track token usage for niki budget enforcement
534
549
  if (output.usage) {
535
550
  turnTokensIn += output.usage.inputTokens;
@@ -537,10 +552,11 @@ async function executeTurn(driver, memory, mcp, cfg, sessionId) {
537
552
  // Log cumulative usage to stderr — niki parses these patterns for budget enforcement
538
553
  process.stderr.write(`"input_tokens": ${turnTokensIn}, "output_tokens": ${turnTokensOut}\n`);
539
554
  }
540
- // Accumulate output text
541
- if (output.text)
542
- finalText += output.text;
543
- const assistantMsg = { role: "assistant", from: "Assistant", content: output.text || "" };
555
+ // Accumulate clean text (markers stripped) for the return value
556
+ const cleanText = markerParser.getCleanText();
557
+ if (cleanText)
558
+ finalText += cleanText;
559
+ const assistantMsg = { role: "assistant", from: "Assistant", content: cleanText || "" };
544
560
  if (output.toolCalls.length > 0) {
545
561
  assistantMsg.tool_calls = output.toolCalls;
546
562
  }
@@ -579,6 +595,14 @@ async function executeTurn(driver, memory, mcp, cfg, sessionId) {
579
595
  Logger.debug(`Failed to parse args for ${fnName}: ${asError(e).message}, using empty args`);
580
596
  fnArgs = {};
581
597
  }
598
+ // Scan tool call string args for stream markers (e.g. model sends
599
+ // @@model-change('haiku')@@ inside an agentchat_send message).
600
+ // Strip markers from args so they don't leak into tool output.
601
+ for (const key of Object.keys(fnArgs)) {
602
+ if (typeof fnArgs[key] === "string") {
603
+ fnArgs[key] = extractMarkers(fnArgs[key], handleMarker);
604
+ }
605
+ }
582
606
  Logger.debug(`Tool call: ${fnName}(${JSON.stringify(fnArgs)})`);
583
607
  let result;
584
608
  try {
package/dist/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tjamescouch/gro",
3
- "version": "1.3.12",
3
+ "version": "1.3.14",
4
4
  "description": "Provider-agnostic LLM runtime with context management",
5
5
  "bin": {
6
6
  "gro": "./dist/main.js"
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@tjamescouch/gro",
3
- "version": "1.3.12",
3
+ "version": "1.3.14",
4
4
  "description": "Provider-agnostic LLM runtime with context management",
5
5
  "bin": {
6
6
  "gro": "./dist/main.js"