sharkbait 1.0.23 → 1.0.24
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +51 -3
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -454,7 +454,8 @@ class AzureOpenAIClient {
|
|
|
454
454
|
input,
|
|
455
455
|
instructions,
|
|
456
456
|
tools: toolsConfig,
|
|
457
|
-
stream: true
|
|
457
|
+
stream: true,
|
|
458
|
+
reasoning: { effort: "medium", summary: "auto" }
|
|
458
459
|
});
|
|
459
460
|
}, {
|
|
460
461
|
maxRetries: 3,
|
|
@@ -516,6 +517,15 @@ class AzureOpenAIClient {
|
|
|
516
517
|
}
|
|
517
518
|
break;
|
|
518
519
|
}
|
|
520
|
+
case "response.reasoning_summary_text.delta": {
|
|
521
|
+
const reasoningDelta = event.delta || "";
|
|
522
|
+
if (reasoningDelta) {
|
|
523
|
+
yield { content: "", reasoning: reasoningDelta, toolCalls: undefined, finishReason: null };
|
|
524
|
+
}
|
|
525
|
+
break;
|
|
526
|
+
}
|
|
527
|
+
case "response.reasoning_summary_text.done":
|
|
528
|
+
break;
|
|
519
529
|
case "response.completed": {
|
|
520
530
|
const resp = event.response;
|
|
521
531
|
if (streamedTextLength === 0 && resp?.output) {
|
|
@@ -3351,6 +3361,9 @@ Revise your approach based on what we've learned.`
|
|
|
3351
3361
|
let toolCalls = [];
|
|
3352
3362
|
try {
|
|
3353
3363
|
for await (const chunk of this.llm.chat([{ role: "system", content: this.systemPrompt }, ...contextMessages], this.tools.getDefinitions())) {
|
|
3364
|
+
if (chunk.reasoning) {
|
|
3365
|
+
yield { type: "reasoning", content: chunk.reasoning };
|
|
3366
|
+
}
|
|
3354
3367
|
if (chunk.content) {
|
|
3355
3368
|
fullContent += chunk.content;
|
|
3356
3369
|
yield { type: "text", content: chunk.content };
|
|
@@ -3671,6 +3684,9 @@ ${modePrompt}`;
|
|
|
3671
3684
|
let toolCalls = [];
|
|
3672
3685
|
try {
|
|
3673
3686
|
for await (const chunk of this.llm.chat(messagesToSend, this.getTools())) {
|
|
3687
|
+
if (chunk.reasoning) {
|
|
3688
|
+
yield { type: "reasoning", content: chunk.reasoning };
|
|
3689
|
+
}
|
|
3674
3690
|
if (chunk.content) {
|
|
3675
3691
|
fullContent += chunk.content;
|
|
3676
3692
|
yield { type: "text", content: chunk.content };
|
|
@@ -8349,6 +8365,8 @@ function App({ contextFiles: initialContextFiles, enableBeads: initialBeadsEnabl
|
|
|
8349
8365
|
const [isExecuting, setIsExecuting] = useState2(false);
|
|
8350
8366
|
const [parallelProgress, setParallelProgress] = useState2(null);
|
|
8351
8367
|
const [thinkingMessage, setThinkingMessage] = useState2(null);
|
|
8368
|
+
const [currentReasoning, setCurrentReasoning] = useState2("");
|
|
8369
|
+
const pendingReasoningRef = useRef("");
|
|
8352
8370
|
const [currentModel, setCurrentModel] = useState2(() => {
|
|
8353
8371
|
const config = loadConfig();
|
|
8354
8372
|
return config.azure.deployment;
|
|
@@ -8362,10 +8380,13 @@ function App({ contextFiles: initialContextFiles, enableBeads: initialBeadsEnabl
|
|
|
8362
8380
|
const workingDir = currentDir;
|
|
8363
8381
|
const flushOutput = useCallback(() => {
|
|
8364
8382
|
const output = pendingOutputRef.current;
|
|
8383
|
+
const reasoning = pendingReasoningRef.current;
|
|
8365
8384
|
const tokens = pendingTokensRef.current;
|
|
8366
8385
|
const cost = pendingCostRef.current;
|
|
8367
8386
|
if (output)
|
|
8368
8387
|
setCurrentOutput(output);
|
|
8388
|
+
if (reasoning)
|
|
8389
|
+
setCurrentReasoning(reasoning);
|
|
8369
8390
|
if (tokens > 0) {
|
|
8370
8391
|
setTokenCount((prev) => prev + tokens);
|
|
8371
8392
|
pendingTokensRef.current = 0;
|
|
@@ -8541,6 +8562,8 @@ function App({ contextFiles: initialContextFiles, enableBeads: initialBeadsEnabl
|
|
|
8541
8562
|
setIsLoading(true);
|
|
8542
8563
|
setIsExecuting(true);
|
|
8543
8564
|
setCurrentOutput("");
|
|
8565
|
+
setCurrentReasoning("");
|
|
8566
|
+
pendingReasoningRef.current = "";
|
|
8544
8567
|
setActiveToolCalls([]);
|
|
8545
8568
|
const inputTokens = estimateTokens(userMessage);
|
|
8546
8569
|
setTokenCount((prev) => prev + inputTokens);
|
|
@@ -8553,6 +8576,12 @@ function App({ contextFiles: initialContextFiles, enableBeads: initialBeadsEnabl
|
|
|
8553
8576
|
break;
|
|
8554
8577
|
}
|
|
8555
8578
|
switch (event.type) {
|
|
8579
|
+
case "reasoning":
|
|
8580
|
+
pendingReasoningRef.current += event.content;
|
|
8581
|
+
if (!outputTimerRef.current) {
|
|
8582
|
+
outputTimerRef.current = setTimeout(flushOutput, 80);
|
|
8583
|
+
}
|
|
8584
|
+
break;
|
|
8556
8585
|
case "text":
|
|
8557
8586
|
assistantContent += event.content;
|
|
8558
8587
|
const chunkTokens = estimateTokens(event.content);
|
|
@@ -8665,6 +8694,8 @@ ${event.consolidated}`,
|
|
|
8665
8694
|
pendingCostRef.current = 0;
|
|
8666
8695
|
}
|
|
8667
8696
|
pendingOutputRef.current = "";
|
|
8697
|
+
pendingReasoningRef.current = "";
|
|
8698
|
+
setCurrentReasoning("");
|
|
8668
8699
|
if (assistantContent.trim()) {
|
|
8669
8700
|
setMessages((prev) => [...prev, {
|
|
8670
8701
|
role: "assistant",
|
|
@@ -8773,6 +8804,23 @@ ${event.consolidated}`,
|
|
|
8773
8804
|
newContent: pendingConfirm.data.newContent
|
|
8774
8805
|
}, undefined, false, undefined, this) : undefined
|
|
8775
8806
|
}, undefined, false, undefined, this),
|
|
8807
|
+
currentReasoning && /* @__PURE__ */ jsxDEV12(Box12, {
|
|
8808
|
+
marginLeft: 3,
|
|
8809
|
+
marginBottom: 0,
|
|
8810
|
+
children: [
|
|
8811
|
+
/* @__PURE__ */ jsxDEV12(Text12, {
|
|
8812
|
+
color: colors.textDim,
|
|
8813
|
+
dimColor: true,
|
|
8814
|
+
children: "\uD83D\uDCAD "
|
|
8815
|
+
}, undefined, false, undefined, this),
|
|
8816
|
+
/* @__PURE__ */ jsxDEV12(Text12, {
|
|
8817
|
+
color: colors.textDim,
|
|
8818
|
+
dimColor: true,
|
|
8819
|
+
wrap: "wrap",
|
|
8820
|
+
children: currentReasoning
|
|
8821
|
+
}, undefined, false, undefined, this)
|
|
8822
|
+
]
|
|
8823
|
+
}, undefined, true, undefined, this),
|
|
8776
8824
|
currentOutput && /* @__PURE__ */ jsxDEV12(MessageView, {
|
|
8777
8825
|
role: "assistant",
|
|
8778
8826
|
content: currentOutput,
|
|
@@ -8839,7 +8887,7 @@ ${event.consolidated}`,
|
|
|
8839
8887
|
}
|
|
8840
8888
|
|
|
8841
8889
|
// src/version.ts
|
|
8842
|
-
var VERSION = "1.0.
|
|
8890
|
+
var VERSION = "1.0.24";
|
|
8843
8891
|
|
|
8844
8892
|
// src/agent/start-chat.ts
|
|
8845
8893
|
async function startChat(options = {}) {
|
|
@@ -10123,7 +10171,7 @@ ${"━".repeat(60)}`);
|
|
|
10123
10171
|
}
|
|
10124
10172
|
|
|
10125
10173
|
// src/version.ts
|
|
10126
|
-
var VERSION2 = "1.0.
|
|
10174
|
+
var VERSION2 = "1.0.24";
|
|
10127
10175
|
|
|
10128
10176
|
// src/ui/logo.tsx
|
|
10129
10177
|
import { Box as Box14, Text as Text14 } from "ink";
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "sharkbait",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.24",
|
|
4
4
|
"description": "AI-powered coding assistant for the command line. Uses OpenAI Responses API (not Chat). Autonomous agents, parallel code reviews, 36 tools.",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/cli.js",
|