@emblemvault/hustle-react 1.4.2 → 1.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2116,6 +2116,30 @@ function HustleProvider({
2116
2116
  }
2117
2117
  };
2118
2118
  }, [client]);
2119
+ useEffect(() => {
2120
+ if (typeof window !== "undefined") {
2121
+ const win = window;
2122
+ win.__hustleGetModel = () => selectedModel;
2123
+ win.__hustleSetModel = (model) => setSelectedModel(model);
2124
+ win.__hustleGetSystemPrompt = () => systemPrompt;
2125
+ win.__hustleSetSystemPrompt = (prompt) => setSystemPrompt(prompt);
2126
+ win.__hustleGetSkipServerPrompt = () => skipServerPrompt;
2127
+ win.__hustleSetSkipServerPrompt = (skip) => setSkipServerPrompt(skip);
2128
+ win.__hustleGetModels = () => models;
2129
+ }
2130
+ return () => {
2131
+ if (typeof window !== "undefined") {
2132
+ const win = window;
2133
+ delete win.__hustleGetModel;
2134
+ delete win.__hustleSetModel;
2135
+ delete win.__hustleGetSystemPrompt;
2136
+ delete win.__hustleSetSystemPrompt;
2137
+ delete win.__hustleGetSkipServerPrompt;
2138
+ delete win.__hustleSetSkipServerPrompt;
2139
+ delete win.__hustleGetModels;
2140
+ }
2141
+ };
2142
+ }, [selectedModel, setSelectedModel, systemPrompt, setSystemPrompt, skipServerPrompt, setSkipServerPrompt, models]);
2119
2143
  const loadModels = useCallback(async () => {
2120
2144
  if (!client) {
2121
2145
  log("Cannot load models - client not ready");
@@ -3715,6 +3739,15 @@ Executors run in the browser context with full access to:
3715
3739
  - **window.__hustleListPlugins()** - List all installed plugins
3716
3740
  - **window.__hustleGetPlugin(name)** - Get a specific plugin by name
3717
3741
 
3742
+ ### Hustle Settings Globals (Read/Write)
3743
+ - **window.__hustleGetModel()** - Get the current model override (empty string = server default)
3744
+ - **window.__hustleSetModel(model)** - Set the model override (e.g., "anthropic/claude-3-opus")
3745
+ - **window.__hustleGetSystemPrompt()** - Get the current system prompt override
3746
+ - **window.__hustleSetSystemPrompt(prompt)** - Set the system prompt override
3747
+ - **window.__hustleGetSkipServerPrompt()** - Get whether server prompt is skipped (boolean)
3748
+ - **window.__hustleSetSkipServerPrompt(skip)** - Set whether to skip server prompt
3749
+ - **window.__hustleGetModels()** - Get list of available models (read-only)
3750
+
3718
3751
  ### DOM Manipulation Examples
3719
3752
  Create a modal: document.createElement('div'), style it, append to document.body
3720
3753
  Add event listeners: element.addEventListener('click', handler)
@@ -15178,7 +15211,14 @@ function HustleChat({
15178
15211
  setIsStreaming(true);
15179
15212
  setCurrentToolCalls([]);
15180
15213
  try {
15181
- const chatMessages = messagesRef.current.filter((m2) => !m2.isStreaming).map((m2) => ({ role: m2.role, content: m2.content }));
15214
+ const chatMessages = messagesRef.current.filter((m2) => !m2.isStreaming).map((m2) => {
15215
+ const msg = { role: m2.role, content: m2.content };
15216
+ if (m2.role === "assistant" && m2.toolInvocations && m2.toolInvocations.length > 0) {
15217
+ msg.toolInvocations = m2.toolInvocations;
15218
+ msg.parts = m2.parts;
15219
+ }
15220
+ return msg;
15221
+ });
15182
15222
  chatMessages.push({ role: "user", content: "continue" });
15183
15223
  const stream = chatStream({
15184
15224
  messages: chatMessages,
@@ -15186,6 +15226,9 @@ function HustleChat({
15186
15226
  });
15187
15227
  let fullContent = "";
15188
15228
  const toolCallsAccumulated = [];
15229
+ const toolInvocationsAccumulated = [];
15230
+ const partsAccumulated = [{ type: "step-start" }];
15231
+ let stepCounter = 0;
15189
15232
  for await (const chunk of stream) {
15190
15233
  if (chunk.type === "text") {
15191
15234
  fullContent += chunk.value;
@@ -15197,22 +15240,79 @@ function HustleChat({
15197
15240
  } else if (chunk.type === "tool_call") {
15198
15241
  const toolCall = chunk.value;
15199
15242
  toolCallsAccumulated.push(toolCall);
15243
+ const invocation = {
15244
+ state: "call",
15245
+ step: stepCounter++,
15246
+ toolCallId: toolCall.toolCallId,
15247
+ toolName: toolCall.toolName,
15248
+ args: toolCall.args
15249
+ };
15250
+ toolInvocationsAccumulated.push(invocation);
15251
+ partsAccumulated.push({ type: "tool-invocation", toolInvocation: invocation });
15200
15252
  setCurrentToolCalls([...toolCallsAccumulated]);
15201
15253
  setMessages(
15202
15254
  (prev) => prev.map(
15203
- (m2) => m2.id === assistantMessage.id ? { ...m2, toolCalls: [...toolCallsAccumulated] } : m2
15255
+ (m2) => m2.id === assistantMessage.id ? {
15256
+ ...m2,
15257
+ toolCalls: [...toolCallsAccumulated],
15258
+ toolInvocations: [...toolInvocationsAccumulated],
15259
+ parts: [...partsAccumulated]
15260
+ } : m2
15204
15261
  )
15205
15262
  );
15206
15263
  onToolCall?.(toolCall);
15264
+ } else if (chunk.type === "tool_result") {
15265
+ const toolResult = chunk.value;
15266
+ const invocationIndex = toolInvocationsAccumulated.findIndex(
15267
+ (inv) => inv.toolCallId === toolResult.toolCallId
15268
+ );
15269
+ if (invocationIndex !== -1) {
15270
+ toolInvocationsAccumulated[invocationIndex] = {
15271
+ ...toolInvocationsAccumulated[invocationIndex],
15272
+ state: "result",
15273
+ result: toolResult.result
15274
+ };
15275
+ const partIndex = partsAccumulated.findIndex(
15276
+ (p) => p.type === "tool-invocation" && p.toolInvocation.toolCallId === toolResult.toolCallId
15277
+ );
15278
+ if (partIndex !== -1) {
15279
+ partsAccumulated[partIndex] = {
15280
+ type: "tool-invocation",
15281
+ toolInvocation: toolInvocationsAccumulated[invocationIndex]
15282
+ };
15283
+ }
15284
+ }
15285
+ setMessages(
15286
+ (prev) => prev.map(
15287
+ (m2) => m2.id === assistantMessage.id ? {
15288
+ ...m2,
15289
+ toolInvocations: [...toolInvocationsAccumulated],
15290
+ parts: [...partsAccumulated]
15291
+ } : m2
15292
+ )
15293
+ );
15207
15294
  } else if (chunk.type === "error") {
15208
15295
  console.error("Stream error:", chunk.value);
15209
15296
  }
15210
15297
  }
15211
15298
  const processedResponse = await stream.response;
15212
15299
  const finalContent = processedResponse?.content || fullContent || "(No response)";
15300
+ const finalParts = [{ type: "step-start" }];
15301
+ if (finalContent) {
15302
+ finalParts.push({ type: "text", text: finalContent });
15303
+ }
15304
+ for (const inv of toolInvocationsAccumulated) {
15305
+ finalParts.push({ type: "tool-invocation", toolInvocation: inv });
15306
+ }
15213
15307
  setMessages(
15214
15308
  (prev) => prev.map(
15215
- (m2) => m2.id === assistantMessage.id ? { ...m2, isStreaming: false, content: finalContent } : m2
15309
+ (m2) => m2.id === assistantMessage.id ? {
15310
+ ...m2,
15311
+ isStreaming: false,
15312
+ content: finalContent,
15313
+ toolInvocations: toolInvocationsAccumulated.length > 0 ? toolInvocationsAccumulated : void 0,
15314
+ parts: toolInvocationsAccumulated.length > 0 ? finalParts : void 0
15315
+ } : m2
15216
15316
  )
15217
15317
  );
15218
15318
  onResponse?.(finalContent);
@@ -15274,7 +15374,14 @@ function HustleChat({
15274
15374
  setIsStreaming(true);
15275
15375
  setCurrentToolCalls([]);
15276
15376
  try {
15277
- const chatMessages = messages.filter((m2) => !m2.isStreaming).map((m2) => ({ role: m2.role, content: m2.content }));
15377
+ const chatMessages = messages.filter((m2) => !m2.isStreaming).map((m2) => {
15378
+ const msg = { role: m2.role, content: m2.content };
15379
+ if (m2.role === "assistant" && m2.toolInvocations && m2.toolInvocations.length > 0) {
15380
+ msg.toolInvocations = m2.toolInvocations;
15381
+ msg.parts = m2.parts;
15382
+ }
15383
+ return msg;
15384
+ });
15278
15385
  chatMessages.push({ role: "user", content });
15279
15386
  const stream = chatStream({
15280
15387
  messages: chatMessages,
@@ -15284,6 +15391,9 @@ function HustleChat({
15284
15391
  setAttachments([]);
15285
15392
  let fullContent = "";
15286
15393
  const toolCallsAccumulated = [];
15394
+ const toolInvocationsAccumulated = [];
15395
+ const partsAccumulated = [{ type: "step-start" }];
15396
+ let stepCounter = 0;
15287
15397
  for await (const chunk of stream) {
15288
15398
  if (chunk.type === "text") {
15289
15399
  fullContent += chunk.value;
@@ -15295,22 +15405,79 @@ function HustleChat({
15295
15405
  } else if (chunk.type === "tool_call") {
15296
15406
  const toolCall = chunk.value;
15297
15407
  toolCallsAccumulated.push(toolCall);
15408
+ const invocation = {
15409
+ state: "call",
15410
+ step: stepCounter++,
15411
+ toolCallId: toolCall.toolCallId,
15412
+ toolName: toolCall.toolName,
15413
+ args: toolCall.args
15414
+ };
15415
+ toolInvocationsAccumulated.push(invocation);
15416
+ partsAccumulated.push({ type: "tool-invocation", toolInvocation: invocation });
15298
15417
  setCurrentToolCalls([...toolCallsAccumulated]);
15299
15418
  setMessages(
15300
15419
  (prev) => prev.map(
15301
- (m2) => m2.id === assistantMessage.id ? { ...m2, toolCalls: [...toolCallsAccumulated] } : m2
15420
+ (m2) => m2.id === assistantMessage.id ? {
15421
+ ...m2,
15422
+ toolCalls: [...toolCallsAccumulated],
15423
+ toolInvocations: [...toolInvocationsAccumulated],
15424
+ parts: [...partsAccumulated]
15425
+ } : m2
15302
15426
  )
15303
15427
  );
15304
15428
  onToolCall?.(toolCall);
15429
+ } else if (chunk.type === "tool_result") {
15430
+ const toolResult = chunk.value;
15431
+ const invocationIndex = toolInvocationsAccumulated.findIndex(
15432
+ (inv) => inv.toolCallId === toolResult.toolCallId
15433
+ );
15434
+ if (invocationIndex !== -1) {
15435
+ toolInvocationsAccumulated[invocationIndex] = {
15436
+ ...toolInvocationsAccumulated[invocationIndex],
15437
+ state: "result",
15438
+ result: toolResult.result
15439
+ };
15440
+ const partIndex = partsAccumulated.findIndex(
15441
+ (p) => p.type === "tool-invocation" && p.toolInvocation.toolCallId === toolResult.toolCallId
15442
+ );
15443
+ if (partIndex !== -1) {
15444
+ partsAccumulated[partIndex] = {
15445
+ type: "tool-invocation",
15446
+ toolInvocation: toolInvocationsAccumulated[invocationIndex]
15447
+ };
15448
+ }
15449
+ }
15450
+ setMessages(
15451
+ (prev) => prev.map(
15452
+ (m2) => m2.id === assistantMessage.id ? {
15453
+ ...m2,
15454
+ toolInvocations: [...toolInvocationsAccumulated],
15455
+ parts: [...partsAccumulated]
15456
+ } : m2
15457
+ )
15458
+ );
15305
15459
  } else if (chunk.type === "error") {
15306
15460
  console.error("Stream error:", chunk.value);
15307
15461
  }
15308
15462
  }
15309
15463
  const processedResponse = await stream.response;
15310
15464
  const finalContent = processedResponse?.content || fullContent || "(No response)";
15465
+ const finalParts = [{ type: "step-start" }];
15466
+ if (finalContent) {
15467
+ finalParts.push({ type: "text", text: finalContent });
15468
+ }
15469
+ for (const inv of toolInvocationsAccumulated) {
15470
+ finalParts.push({ type: "tool-invocation", toolInvocation: inv });
15471
+ }
15311
15472
  setMessages(
15312
15473
  (prev) => prev.map(
15313
- (m2) => m2.id === assistantMessage.id ? { ...m2, isStreaming: false, content: finalContent } : m2
15474
+ (m2) => m2.id === assistantMessage.id ? {
15475
+ ...m2,
15476
+ isStreaming: false,
15477
+ content: finalContent,
15478
+ toolInvocations: toolInvocationsAccumulated.length > 0 ? toolInvocationsAccumulated : void 0,
15479
+ parts: toolInvocationsAccumulated.length > 0 ? finalParts : void 0
15480
+ } : m2
15314
15481
  )
15315
15482
  );
15316
15483
  onResponse?.(finalContent);