@tjamescouch/gro 1.3.8 → 1.3.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.js +63 -11
- package/dist/memory/advanced-memory.js +3 -0
- package/dist/memory/agent-memory.js +2 -0
- package/dist/memory/simple-memory.js +3 -0
- package/dist/package.json +13 -2
- package/dist/stream-markers.js +113 -0
- package/package.json +2 -2
package/dist/main.js
CHANGED
|
@@ -22,6 +22,7 @@ import { groError, asError, isGroError, errorLogFields } from "./errors.js";
|
|
|
22
22
|
import { bashToolDefinition, executeBash } from "./tools/bash.js";
|
|
23
23
|
import { agentpatchToolDefinition, executeAgentpatch } from "./tools/agentpatch.js";
|
|
24
24
|
import { groVersionToolDefinition, executeGroVersion, getGroVersion } from "./tools/version.js";
|
|
25
|
+
import { createMarkerParser } from "./stream-markers.js";
|
|
25
26
|
const VERSION = getGroVersion();
|
|
26
27
|
// ---------------------------------------------------------------------------
|
|
27
28
|
// Graceful shutdown state — module-level so signal handlers can save sessions.
|
|
@@ -480,6 +481,24 @@ function formatOutput(text, format) {
|
|
|
480
481
|
// ---------------------------------------------------------------------------
|
|
481
482
|
// Tool execution loop
|
|
482
483
|
// ---------------------------------------------------------------------------
|
|
484
|
+
/**
|
|
485
|
+
* Resolve short model aliases to full model identifiers.
|
|
486
|
+
* Allows stream markers like @@model-change('haiku')@@ without
|
|
487
|
+
* the model needing to know the full versioned name.
|
|
488
|
+
*/
|
|
489
|
+
const MODEL_ALIASES = {
|
|
490
|
+
"haiku": "claude-haiku-4-20250514",
|
|
491
|
+
"sonnet": "claude-sonnet-4-20250514",
|
|
492
|
+
"opus": "claude-opus-4-20250514",
|
|
493
|
+
"gpt4": "gpt-4o",
|
|
494
|
+
"gpt4o": "gpt-4o",
|
|
495
|
+
"gpt4o-mini": "gpt-4o-mini",
|
|
496
|
+
"o3": "o3",
|
|
497
|
+
};
|
|
498
|
+
function resolveModelAlias(alias) {
|
|
499
|
+
const lower = alias.trim().toLowerCase();
|
|
500
|
+
return MODEL_ALIASES[lower] ?? alias;
|
|
501
|
+
}
|
|
483
502
|
/**
|
|
484
503
|
* Execute a single turn: call the model, handle tool calls, repeat until
|
|
485
504
|
* the model produces a final text response or we hit maxRounds.
|
|
@@ -493,17 +512,37 @@ async function executeTurn(driver, memory, mcp, cfg, sessionId) {
|
|
|
493
512
|
let finalText = "";
|
|
494
513
|
let turnTokensIn = 0;
|
|
495
514
|
let turnTokensOut = 0;
|
|
496
|
-
const
|
|
515
|
+
const rawOnToken = cfg.outputFormat === "stream-json"
|
|
497
516
|
? (t) => process.stdout.write(JSON.stringify({ type: "token", token: t }) + "\n")
|
|
498
517
|
: (t) => process.stdout.write(t);
|
|
518
|
+
// Mutable model reference — stream markers can switch this mid-turn
|
|
519
|
+
let activeModel = cfg.model;
|
|
499
520
|
let brokeCleanly = false;
|
|
500
521
|
let idleNudges = 0;
|
|
501
522
|
for (let round = 0; round < cfg.maxToolRounds; round++) {
|
|
523
|
+
// Create a fresh marker parser per round so partial state doesn't leak
|
|
524
|
+
const markerParser = createMarkerParser({
|
|
525
|
+
onToken: rawOnToken,
|
|
526
|
+
onMarker(marker) {
|
|
527
|
+
if (marker.name === "model-change") {
|
|
528
|
+
const newModel = resolveModelAlias(marker.arg);
|
|
529
|
+
Logger.info(`Stream marker: model-change '${marker.arg}' → ${newModel}`);
|
|
530
|
+
activeModel = newModel;
|
|
531
|
+
cfg.model = newModel; // persist across turns
|
|
532
|
+
memory.setModel(newModel); // persist in session metadata on save
|
|
533
|
+
}
|
|
534
|
+
else {
|
|
535
|
+
Logger.debug(`Stream marker: ${marker.name}('${marker.arg}')`);
|
|
536
|
+
}
|
|
537
|
+
},
|
|
538
|
+
});
|
|
502
539
|
const output = await driver.chat(memory.messages(), {
|
|
503
|
-
model:
|
|
540
|
+
model: activeModel,
|
|
504
541
|
tools: tools.length > 0 ? tools : undefined,
|
|
505
|
-
onToken,
|
|
542
|
+
onToken: markerParser.onToken,
|
|
506
543
|
});
|
|
544
|
+
// Flush any remaining buffered tokens from the marker parser
|
|
545
|
+
markerParser.flush();
|
|
507
546
|
// Track token usage for niki budget enforcement
|
|
508
547
|
if (output.usage) {
|
|
509
548
|
turnTokensIn += output.usage.inputTokens;
|
|
@@ -511,12 +550,13 @@ async function executeTurn(driver, memory, mcp, cfg, sessionId) {
|
|
|
511
550
|
// Log cumulative usage to stderr — niki parses these patterns for budget enforcement
|
|
512
551
|
process.stderr.write(`"input_tokens": ${turnTokensIn}, "output_tokens": ${turnTokensOut}\n`);
|
|
513
552
|
}
|
|
514
|
-
// Accumulate text
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
//
|
|
519
|
-
|
|
553
|
+
// Accumulate clean text (markers stripped) for the return value
|
|
554
|
+
const cleanText = markerParser.getCleanText();
|
|
555
|
+
if (cleanText)
|
|
556
|
+
finalText += cleanText;
|
|
557
|
+
// Store clean text in memory — markers are runtime directives, not conversation content.
|
|
558
|
+
// The original output.text is preserved in case we need it for debugging.
|
|
559
|
+
const assistantMsg = { role: "assistant", from: "Assistant", content: cleanText || "" };
|
|
520
560
|
if (output.toolCalls.length > 0) {
|
|
521
561
|
assistantMsg.tool_calls = output.toolCalls;
|
|
522
562
|
}
|
|
@@ -607,8 +647,8 @@ async function executeTurn(driver, memory, mcp, cfg, sessionId) {
|
|
|
607
647
|
if (!brokeCleanly && tools.length > 0) {
|
|
608
648
|
Logger.debug("Max tool rounds reached — final turn with no tools");
|
|
609
649
|
const finalOutput = await driver.chat(memory.messages(), {
|
|
610
|
-
model:
|
|
611
|
-
onToken,
|
|
650
|
+
model: activeModel,
|
|
651
|
+
onToken: rawOnToken,
|
|
612
652
|
});
|
|
613
653
|
if (finalOutput.usage) {
|
|
614
654
|
turnTokensIn += finalOutput.usage.inputTokens;
|
|
@@ -646,6 +686,12 @@ async function singleShot(cfg, driver, mcp, sessionId, positionalArgs) {
|
|
|
646
686
|
// Resume existing session if requested
|
|
647
687
|
if (cfg.continueSession || cfg.resumeSession) {
|
|
648
688
|
await memory.load(sessionId);
|
|
689
|
+
const sess = loadSession(sessionId);
|
|
690
|
+
if (sess?.meta.model && sess.meta.model !== cfg.model) {
|
|
691
|
+
Logger.info(`Restoring model from session: ${cfg.model} → ${sess.meta.model}`);
|
|
692
|
+
cfg.model = sess.meta.model;
|
|
693
|
+
memory.setModel(sess.meta.model);
|
|
694
|
+
}
|
|
649
695
|
}
|
|
650
696
|
await memory.add({ role: "user", from: "User", content: prompt });
|
|
651
697
|
let text;
|
|
@@ -695,6 +741,12 @@ async function interactive(cfg, driver, mcp, sessionId) {
|
|
|
695
741
|
if (sess) {
|
|
696
742
|
const msgCount = sess.messages.filter((m) => m.role !== "system").length;
|
|
697
743
|
Logger.info(C.gray(`Resumed session ${sessionId} (${msgCount} messages)`));
|
|
744
|
+
// Restore model from session metadata (e.g. after a stream marker model-change)
|
|
745
|
+
if (sess.meta.model && sess.meta.model !== cfg.model) {
|
|
746
|
+
Logger.info(`Restoring model from session: ${cfg.model} → ${sess.meta.model}`);
|
|
747
|
+
cfg.model = sess.meta.model;
|
|
748
|
+
memory.setModel(sess.meta.model);
|
|
749
|
+
}
|
|
698
750
|
}
|
|
699
751
|
}
|
|
700
752
|
const rl = readline.createInterface({
|
|
@@ -24,6 +24,9 @@ export class AdvancedMemory extends AgentMemory {
|
|
|
24
24
|
this.keepRecentPerLane = Math.max(1, Math.floor(args.keepRecentPerLane ?? 4));
|
|
25
25
|
this.keepRecentTools = Math.max(0, Math.floor(args.keepRecentTools ?? 3));
|
|
26
26
|
}
|
|
27
|
+
setModel(model) {
|
|
28
|
+
this.model = model;
|
|
29
|
+
}
|
|
27
30
|
async load(id) {
|
|
28
31
|
const session = loadSession(id);
|
|
29
32
|
if (session) {
|
package/dist/package.json
CHANGED
|
@@ -1,11 +1,22 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tjamescouch/gro",
|
|
3
|
-
"version": "1.3.
|
|
3
|
+
"version": "1.3.10",
|
|
4
4
|
"description": "Provider-agnostic LLM runtime with context management",
|
|
5
|
+
"bin": {
|
|
6
|
+
"gro": "./dist/main.js"
|
|
7
|
+
},
|
|
8
|
+
"files": [
|
|
9
|
+
"dist/",
|
|
10
|
+
"gro",
|
|
11
|
+
"providers/",
|
|
12
|
+
"_base.md",
|
|
13
|
+
"owl"
|
|
14
|
+
],
|
|
5
15
|
"type": "module",
|
|
6
16
|
"scripts": {
|
|
7
17
|
"start": "npx tsx src/main.ts",
|
|
8
|
-
"build": "npx tsc && cp package.json dist/",
|
|
18
|
+
"build": "npx tsc && chmod +x dist/main.js && cp package.json dist/",
|
|
19
|
+
"prepublishOnly": "npm run build",
|
|
9
20
|
"build:bun": "bun build src/main.ts --outdir dist --target bun",
|
|
10
21
|
"test": "npx tsx --test tests/*.test.ts",
|
|
11
22
|
"test:bun": "bun test"
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Stream Marker Parser
|
|
3
|
+
*
|
|
4
|
+
* Intercepts @@name('arg')@@ patterns in the token stream.
|
|
5
|
+
* Generic architecture — any marker type can register a handler.
|
|
6
|
+
*
|
|
7
|
+
* Markers are stripped from the output text that reaches the user.
|
|
8
|
+
* When a complete marker is detected, the registered handler fires.
|
|
9
|
+
*
|
|
10
|
+
* Built-in marker types:
|
|
11
|
+
* @@model-change('sonnet')@@ — switch the active model mid-stream
|
|
12
|
+
* @@callback('name')@@ — fire a named callback
|
|
13
|
+
* @@emotion('happy')@@ — set facial expression / emotion state
|
|
14
|
+
*
|
|
15
|
+
* Usage:
|
|
16
|
+
* const parser = createMarkerParser({ onMarker: (name, arg) => { ... } });
|
|
17
|
+
* // Wrap your onToken callback:
|
|
18
|
+
* driver.chat(messages, { onToken: parser.onToken });
|
|
19
|
+
* // After the response, get clean text:
|
|
20
|
+
* const cleanText = parser.getCleanText();
|
|
21
|
+
*/
|
|
22
|
+
import { Logger } from "./logger.js";
|
|
23
|
+
/**
|
|
24
|
+
* Regex for matching complete markers.
|
|
25
|
+
* Supports: @@name('arg')@@ and @@name("arg")@@ and @@name(arg)@@
|
|
26
|
+
*/
|
|
27
|
+
const MARKER_RE = /@@([a-zA-Z][a-zA-Z0-9_-]*)\((?:'([^']*)'|"([^"]*)"|([^)]*?))\)@@/g;
|
|
28
|
+
/** Partial marker detection — we might be mid-stream in a marker */
|
|
29
|
+
const PARTIAL_MARKER_RE = /@@[a-zA-Z][a-zA-Z0-9_-]*(?:\([^)]*)?$/;
|
|
30
|
+
export function createMarkerParser(opts) {
|
|
31
|
+
const { onMarker, onToken } = opts;
|
|
32
|
+
let buffer = "";
|
|
33
|
+
let cleanText = "";
|
|
34
|
+
const markers = [];
|
|
35
|
+
function processBuffer(isFinal) {
|
|
36
|
+
// Try to match complete markers in the buffer
|
|
37
|
+
let lastIndex = 0;
|
|
38
|
+
const regex = new RegExp(MARKER_RE.source, "g");
|
|
39
|
+
let match;
|
|
40
|
+
while ((match = regex.exec(buffer)) !== null) {
|
|
41
|
+
// Emit any text before this marker
|
|
42
|
+
const before = buffer.slice(lastIndex, match.index);
|
|
43
|
+
if (before) {
|
|
44
|
+
cleanText += before;
|
|
45
|
+
if (onToken)
|
|
46
|
+
onToken(before);
|
|
47
|
+
}
|
|
48
|
+
// Parse the marker
|
|
49
|
+
const name = match[1];
|
|
50
|
+
const arg = match[2] ?? match[3] ?? match[4] ?? "";
|
|
51
|
+
const raw = match[0];
|
|
52
|
+
const marker = { name, arg, raw };
|
|
53
|
+
markers.push(marker);
|
|
54
|
+
Logger.debug(`Stream marker detected: ${raw}`);
|
|
55
|
+
try {
|
|
56
|
+
onMarker(marker);
|
|
57
|
+
}
|
|
58
|
+
catch (e) {
|
|
59
|
+
Logger.warn(`Marker handler error for ${name}: ${e}`);
|
|
60
|
+
}
|
|
61
|
+
lastIndex = match.index + match[0].length;
|
|
62
|
+
}
|
|
63
|
+
// Whatever's left after all matches
|
|
64
|
+
const remainder = buffer.slice(lastIndex);
|
|
65
|
+
if (isFinal) {
|
|
66
|
+
// End of stream — flush everything remaining as text
|
|
67
|
+
if (remainder) {
|
|
68
|
+
cleanText += remainder;
|
|
69
|
+
if (onToken)
|
|
70
|
+
onToken(remainder);
|
|
71
|
+
}
|
|
72
|
+
buffer = "";
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
// Check if the remainder could be a partial marker
|
|
76
|
+
const partialMatch = PARTIAL_MARKER_RE.exec(remainder);
|
|
77
|
+
if (partialMatch) {
|
|
78
|
+
// Hold back the potential partial marker, emit what's before it
|
|
79
|
+
const safe = remainder.slice(0, partialMatch.index);
|
|
80
|
+
if (safe) {
|
|
81
|
+
cleanText += safe;
|
|
82
|
+
if (onToken)
|
|
83
|
+
onToken(safe);
|
|
84
|
+
}
|
|
85
|
+
buffer = remainder.slice(partialMatch.index);
|
|
86
|
+
}
|
|
87
|
+
else {
|
|
88
|
+
// No partial marker — emit all remaining text
|
|
89
|
+
if (remainder) {
|
|
90
|
+
cleanText += remainder;
|
|
91
|
+
if (onToken)
|
|
92
|
+
onToken(remainder);
|
|
93
|
+
}
|
|
94
|
+
buffer = "";
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
return {
|
|
99
|
+
onToken(s) {
|
|
100
|
+
buffer += s;
|
|
101
|
+
processBuffer(false);
|
|
102
|
+
},
|
|
103
|
+
getCleanText() {
|
|
104
|
+
return cleanText;
|
|
105
|
+
},
|
|
106
|
+
getMarkers() {
|
|
107
|
+
return [...markers];
|
|
108
|
+
},
|
|
109
|
+
flush() {
|
|
110
|
+
processBuffer(true);
|
|
111
|
+
},
|
|
112
|
+
};
|
|
113
|
+
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@tjamescouch/gro",
|
|
3
|
-
"version": "1.3.
|
|
3
|
+
"version": "1.3.10",
|
|
4
4
|
"description": "Provider-agnostic LLM runtime with context management",
|
|
5
5
|
"bin": {
|
|
6
6
|
"gro": "./dist/main.js"
|
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
"type": "module",
|
|
16
16
|
"scripts": {
|
|
17
17
|
"start": "npx tsx src/main.ts",
|
|
18
|
-
"build": "npx tsc && chmod +x dist/main.js",
|
|
18
|
+
"build": "npx tsc && chmod +x dist/main.js && cp package.json dist/",
|
|
19
19
|
"prepublishOnly": "npm run build",
|
|
20
20
|
"build:bun": "bun build src/main.ts --outdir dist --target bun",
|
|
21
21
|
"test": "npx tsx --test tests/*.test.ts",
|