@flutchai/flutch-sdk 0.1.9 → 0.1.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +153 -205
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +20 -15
- package/dist/index.d.ts +20 -15
- package/dist/index.js +153 -205
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -3900,7 +3900,6 @@ var AttachmentType = /* @__PURE__ */ ((AttachmentType2) => {
|
|
|
3900
3900
|
var StreamChannel = /* @__PURE__ */ ((StreamChannel2) => {
|
|
3901
3901
|
StreamChannel2["TEXT"] = "text";
|
|
3902
3902
|
StreamChannel2["PROCESSING"] = "processing";
|
|
3903
|
-
StreamChannel2["TOOLS"] = "tools";
|
|
3904
3903
|
return StreamChannel2;
|
|
3905
3904
|
})(StreamChannel || {});
|
|
3906
3905
|
|
|
@@ -4443,15 +4442,15 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4443
4442
|
*/
|
|
4444
4443
|
createAccumulator() {
|
|
4445
4444
|
return {
|
|
4446
|
-
|
|
4447
|
-
|
|
4448
|
-
|
|
4449
|
-
|
|
4445
|
+
channels: /* @__PURE__ */ new Map([
|
|
4446
|
+
["text" /* TEXT */, { contentChain: [], currentBlock: null }],
|
|
4447
|
+
["processing" /* PROCESSING */, { contentChain: [], currentBlock: null }]
|
|
4448
|
+
]),
|
|
4449
|
+
attachments: [],
|
|
4450
|
+
metadata: {},
|
|
4450
4451
|
traceEvents: [],
|
|
4451
4452
|
traceStartedAt: null,
|
|
4452
|
-
traceCompletedAt: null
|
|
4453
|
-
currentReasoningSteps: [],
|
|
4454
|
-
currentToolUse: null
|
|
4453
|
+
traceCompletedAt: null
|
|
4455
4454
|
};
|
|
4456
4455
|
}
|
|
4457
4456
|
/**
|
|
@@ -4479,10 +4478,88 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4479
4478
|
}
|
|
4480
4479
|
return [];
|
|
4481
4480
|
}
|
|
4481
|
+
/**
|
|
4482
|
+
* Send delta to UI (unified format)
|
|
4483
|
+
*/
|
|
4484
|
+
sendDelta(channel, delta, onPartial) {
|
|
4485
|
+
if (!onPartial) return;
|
|
4486
|
+
onPartial(
|
|
4487
|
+
JSON.stringify({
|
|
4488
|
+
channel,
|
|
4489
|
+
delta
|
|
4490
|
+
})
|
|
4491
|
+
);
|
|
4492
|
+
}
|
|
4493
|
+
/**
|
|
4494
|
+
* Process content stream blocks (universal for all channels)
|
|
4495
|
+
*/
|
|
4496
|
+
processContentStream(acc, channel, blocks, onPartial) {
|
|
4497
|
+
const state = acc.channels.get(channel);
|
|
4498
|
+
for (const block of blocks) {
|
|
4499
|
+
if (block.type === "tool_use" || block.type === "tool_call") {
|
|
4500
|
+
if (state.currentBlock) {
|
|
4501
|
+
state.contentChain.push(state.currentBlock);
|
|
4502
|
+
}
|
|
4503
|
+
state.currentBlock = {
|
|
4504
|
+
index: state.contentChain.length,
|
|
4505
|
+
type: "tool_use",
|
|
4506
|
+
name: block.name,
|
|
4507
|
+
id: block.id,
|
|
4508
|
+
input: block.input || "",
|
|
4509
|
+
output: ""
|
|
4510
|
+
};
|
|
4511
|
+
this.sendDelta(
|
|
4512
|
+
channel,
|
|
4513
|
+
{
|
|
4514
|
+
type: "step_started",
|
|
4515
|
+
step: state.currentBlock
|
|
4516
|
+
},
|
|
4517
|
+
onPartial
|
|
4518
|
+
);
|
|
4519
|
+
} else if (block.type === "input_json_delta") {
|
|
4520
|
+
if (state.currentBlock && state.currentBlock.type === "tool_use") {
|
|
4521
|
+
const chunk = block.input || "";
|
|
4522
|
+
state.currentBlock.input += chunk;
|
|
4523
|
+
this.sendDelta(
|
|
4524
|
+
channel,
|
|
4525
|
+
{
|
|
4526
|
+
type: "tool_input_chunk",
|
|
4527
|
+
stepId: state.currentBlock.id,
|
|
4528
|
+
chunk
|
|
4529
|
+
},
|
|
4530
|
+
onPartial
|
|
4531
|
+
);
|
|
4532
|
+
}
|
|
4533
|
+
} else if (block.type === "text") {
|
|
4534
|
+
const textChunk = block.text || "";
|
|
4535
|
+
if (state.currentBlock && state.currentBlock.type === "text") {
|
|
4536
|
+
state.currentBlock.text = (state.currentBlock.text || "") + textChunk;
|
|
4537
|
+
} else {
|
|
4538
|
+
if (state.currentBlock) {
|
|
4539
|
+
state.contentChain.push(state.currentBlock);
|
|
4540
|
+
}
|
|
4541
|
+
state.currentBlock = {
|
|
4542
|
+
index: state.contentChain.length,
|
|
4543
|
+
type: "text",
|
|
4544
|
+
text: textChunk
|
|
4545
|
+
};
|
|
4546
|
+
}
|
|
4547
|
+
this.sendDelta(
|
|
4548
|
+
channel,
|
|
4549
|
+
{
|
|
4550
|
+
type: "text_chunk",
|
|
4551
|
+
text: textChunk
|
|
4552
|
+
},
|
|
4553
|
+
onPartial
|
|
4554
|
+
);
|
|
4555
|
+
}
|
|
4556
|
+
}
|
|
4557
|
+
}
|
|
4482
4558
|
/**
|
|
4483
4559
|
* Groups tool_use and input_json_delta into proper structure
|
|
4484
4560
|
* tool_use.input → output (tool execution result)
|
|
4485
4561
|
* input_json_delta.input → output (tool execution result, accumulated)
|
|
4562
|
+
* @deprecated This method is for legacy fallback only
|
|
4486
4563
|
*/
|
|
4487
4564
|
mapReasoningSteps(rawSteps) {
|
|
4488
4565
|
const steps = [];
|
|
@@ -4531,60 +4608,10 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4531
4608
|
*/
|
|
4532
4609
|
processEvent(acc, event, onPartial) {
|
|
4533
4610
|
this.captureTraceEvent(acc, event);
|
|
4534
|
-
if (event.event === "on_chat_model_stream" && event.
|
|
4535
|
-
const
|
|
4536
|
-
const blocks = this.normalizeContentBlocks(chunk);
|
|
4537
|
-
|
|
4538
|
-
onPartial(JSON.stringify({ text: blocks }));
|
|
4539
|
-
}
|
|
4540
|
-
const textOnly = blocks.filter((block) => block?.type === "text").map((block) => block.text || "").join("");
|
|
4541
|
-
if (textOnly) {
|
|
4542
|
-
acc.streamedText += textOnly;
|
|
4543
|
-
}
|
|
4544
|
-
return;
|
|
4545
|
-
}
|
|
4546
|
-
if (event.event === "on_chat_model_stream" && event.metadata?.stream_channel === "processing" /* PROCESSING */ && event.data?.chunk?.content) {
|
|
4547
|
-
const chunk = event.data.chunk.content;
|
|
4548
|
-
const blocks = this.normalizeContentBlocks(chunk);
|
|
4549
|
-
for (const block of blocks) {
|
|
4550
|
-
if (block.type === "tool_use" || block.type === "tool_call") {
|
|
4551
|
-
if (acc.currentToolUse) {
|
|
4552
|
-
acc.currentReasoningSteps.push(acc.currentToolUse);
|
|
4553
|
-
}
|
|
4554
|
-
acc.currentToolUse = {
|
|
4555
|
-
index: acc.currentReasoningSteps.length,
|
|
4556
|
-
type: "tool_use",
|
|
4557
|
-
name: block.name,
|
|
4558
|
-
id: block.id,
|
|
4559
|
-
input: block.input || "",
|
|
4560
|
-
output: ""
|
|
4561
|
-
};
|
|
4562
|
-
if (onPartial) {
|
|
4563
|
-
onPartial(
|
|
4564
|
-
JSON.stringify({
|
|
4565
|
-
processing_delta: {
|
|
4566
|
-
type: "step_started",
|
|
4567
|
-
step: acc.currentToolUse
|
|
4568
|
-
}
|
|
4569
|
-
})
|
|
4570
|
-
);
|
|
4571
|
-
}
|
|
4572
|
-
} else if (block.type === "input_json_delta") {
|
|
4573
|
-
if (acc.currentToolUse && onPartial) {
|
|
4574
|
-
const chunk2 = block.input || "";
|
|
4575
|
-
acc.currentToolUse.output += chunk2;
|
|
4576
|
-
onPartial(
|
|
4577
|
-
JSON.stringify({
|
|
4578
|
-
processing_delta: {
|
|
4579
|
-
type: "output_chunk",
|
|
4580
|
-
stepId: acc.currentToolUse.id,
|
|
4581
|
-
chunk: chunk2
|
|
4582
|
-
}
|
|
4583
|
-
})
|
|
4584
|
-
);
|
|
4585
|
-
}
|
|
4586
|
-
}
|
|
4587
|
-
}
|
|
4611
|
+
if (event.event === "on_chat_model_stream" && event.data?.chunk?.content) {
|
|
4612
|
+
const channel = event.metadata?.stream_channel ?? "text" /* TEXT */;
|
|
4613
|
+
const blocks = this.normalizeContentBlocks(event.data.chunk.content);
|
|
4614
|
+
this.processContentStream(acc, channel, blocks, onPartial);
|
|
4588
4615
|
return;
|
|
4589
4616
|
}
|
|
4590
4617
|
if (event.event === "on_tool_start") {
|
|
@@ -4597,11 +4624,27 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4597
4624
|
return;
|
|
4598
4625
|
}
|
|
4599
4626
|
if (event.event === "on_tool_end") {
|
|
4600
|
-
|
|
4601
|
-
|
|
4602
|
-
|
|
4603
|
-
|
|
4604
|
-
|
|
4627
|
+
const channel = event.metadata?.stream_channel ?? "text" /* TEXT */;
|
|
4628
|
+
const state = acc.channels.get(channel);
|
|
4629
|
+
if (state?.currentBlock && state.currentBlock.type === "tool_use") {
|
|
4630
|
+
const output = event.data?.output;
|
|
4631
|
+
const outputString = typeof output === "string" ? output : JSON.stringify(output, null, 2);
|
|
4632
|
+
state.currentBlock.output = outputString;
|
|
4633
|
+
this.sendDelta(
|
|
4634
|
+
channel,
|
|
4635
|
+
{
|
|
4636
|
+
type: "tool_output_chunk",
|
|
4637
|
+
stepId: state.currentBlock.id,
|
|
4638
|
+
chunk: outputString
|
|
4639
|
+
},
|
|
4640
|
+
onPartial
|
|
4641
|
+
);
|
|
4642
|
+
this.logger.log("\u2705 Tool execution completed", {
|
|
4643
|
+
toolName: event.name,
|
|
4644
|
+
outputPreview: outputString.substring(0, 200) + (outputString.length > 200 ? "..." : ""),
|
|
4645
|
+
runId: event.run_id
|
|
4646
|
+
});
|
|
4647
|
+
}
|
|
4605
4648
|
return;
|
|
4606
4649
|
}
|
|
4607
4650
|
if (event.event === "on_tool_error") {
|
|
@@ -4613,133 +4656,48 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4613
4656
|
return;
|
|
4614
4657
|
}
|
|
4615
4658
|
if (event.event === "on_chat_model_end") {
|
|
4616
|
-
|
|
4617
|
-
|
|
4618
|
-
|
|
4619
|
-
|
|
4620
|
-
const llmCall = {
|
|
4621
|
-
modelId,
|
|
4622
|
-
promptTokens: usageMetadata.input_tokens || 0,
|
|
4623
|
-
completionTokens: usageMetadata.output_tokens || 0,
|
|
4624
|
-
totalTokens: usageMetadata.total_tokens || 0,
|
|
4625
|
-
timestamp: Date.now(),
|
|
4626
|
-
nodeName: event.metadata?.langgraph_node || event.name
|
|
4627
|
-
};
|
|
4628
|
-
acc.llmCalls.push(llmCall);
|
|
4629
|
-
this.logger.log("\u2705 LLM call recorded", {
|
|
4630
|
-
modelId,
|
|
4631
|
-
tokens: llmCall.totalTokens,
|
|
4632
|
-
nodeName: llmCall.nodeName,
|
|
4633
|
-
totalRecorded: acc.llmCalls.length
|
|
4634
|
-
});
|
|
4635
|
-
} else {
|
|
4636
|
-
this.logger.warn(
|
|
4637
|
-
"\u26A0\uFE0F Missing usage metadata or modelId in on_chat_model_end",
|
|
4638
|
-
{
|
|
4639
|
-
hasUsageMetadata: !!usageMetadata,
|
|
4640
|
-
hasModelId: !!modelId,
|
|
4641
|
-
eventName: event.name,
|
|
4642
|
-
metadataKeys: event.metadata ? Object.keys(event.metadata) : [],
|
|
4643
|
-
outputKeys: output ? Object.keys(output) : []
|
|
4644
|
-
}
|
|
4645
|
-
);
|
|
4646
|
-
}
|
|
4647
|
-
if (event.metadata?.stream_channel === "processing" /* PROCESSING */) {
|
|
4648
|
-
if (acc.currentToolUse) {
|
|
4649
|
-
acc.currentReasoningSteps.push(acc.currentToolUse);
|
|
4650
|
-
acc.currentToolUse = null;
|
|
4651
|
-
}
|
|
4652
|
-
if (acc.currentReasoningSteps.length > 0) {
|
|
4653
|
-
acc.reasoningChains.push({
|
|
4654
|
-
steps: acc.currentReasoningSteps,
|
|
4655
|
-
isComplete: true
|
|
4656
|
-
});
|
|
4657
|
-
if (onPartial) {
|
|
4658
|
-
onPartial(
|
|
4659
|
-
JSON.stringify({
|
|
4660
|
-
processing_delta: {
|
|
4661
|
-
type: "chain_completed"
|
|
4662
|
-
}
|
|
4663
|
-
})
|
|
4664
|
-
);
|
|
4665
|
-
}
|
|
4666
|
-
acc.currentReasoningSteps = [];
|
|
4667
|
-
} else {
|
|
4668
|
-
const stepsRaw = output?.content || // AIMessageChunk object (direct)
|
|
4669
|
-
output?.kwargs?.content || // Serialized LangChain format
|
|
4670
|
-
event.data?.chunk?.content || // Older version
|
|
4671
|
-
[];
|
|
4672
|
-
let steps;
|
|
4673
|
-
if (Array.isArray(stepsRaw)) {
|
|
4674
|
-
steps = this.mapReasoningSteps(stepsRaw);
|
|
4675
|
-
} else if (typeof stepsRaw === "string" && stepsRaw.trim().length > 0) {
|
|
4676
|
-
steps = [
|
|
4677
|
-
{
|
|
4678
|
-
index: 0,
|
|
4679
|
-
type: "text",
|
|
4680
|
-
text: stepsRaw.trim()
|
|
4681
|
-
}
|
|
4682
|
-
];
|
|
4683
|
-
} else {
|
|
4684
|
-
steps = [];
|
|
4685
|
-
}
|
|
4686
|
-
if (steps.length > 0) {
|
|
4687
|
-
acc.reasoningChains.push({
|
|
4688
|
-
steps,
|
|
4689
|
-
isComplete: true
|
|
4690
|
-
});
|
|
4691
|
-
if (onPartial) {
|
|
4692
|
-
onPartial(
|
|
4693
|
-
JSON.stringify({
|
|
4694
|
-
processing_delta: {
|
|
4695
|
-
type: "chain_completed"
|
|
4696
|
-
}
|
|
4697
|
-
})
|
|
4698
|
-
);
|
|
4699
|
-
}
|
|
4700
|
-
}
|
|
4701
|
-
}
|
|
4702
|
-
}
|
|
4659
|
+
this.logger.debug("\u2705 LLM call completed", {
|
|
4660
|
+
nodeName: event.metadata?.langgraph_node || event.name,
|
|
4661
|
+
channel: event.metadata?.stream_channel
|
|
4662
|
+
});
|
|
4703
4663
|
return;
|
|
4704
4664
|
}
|
|
4705
|
-
if (event.event === "on_chain_end"
|
|
4706
|
-
const
|
|
4707
|
-
|
|
4708
|
-
|
|
4709
|
-
|
|
4710
|
-
|
|
4711
|
-
|
|
4712
|
-
|
|
4713
|
-
|
|
4714
|
-
|
|
4715
|
-
|
|
4716
|
-
|
|
4717
|
-
|
|
4718
|
-
|
|
4719
|
-
};
|
|
4720
|
-
} else if (output?.generation?.content) {
|
|
4721
|
-
generation = {
|
|
4722
|
-
text: output.generation.content,
|
|
4723
|
-
attachments: [],
|
|
4724
|
-
metadata: {}
|
|
4725
|
-
};
|
|
4726
|
-
} else if (output?.text) {
|
|
4727
|
-
generation = {
|
|
4728
|
-
text: output.text,
|
|
4729
|
-
attachments: output.attachments || [],
|
|
4730
|
-
metadata: output.metadata || {}
|
|
4731
|
-
};
|
|
4665
|
+
if (event.event === "on_chain_end") {
|
|
4666
|
+
const channel = event.metadata?.stream_channel ?? "text" /* TEXT */;
|
|
4667
|
+
if (channel === "text" /* TEXT */) {
|
|
4668
|
+
const output = event.data.output;
|
|
4669
|
+
if (output?.answer) {
|
|
4670
|
+
acc.attachments = output.answer.attachments || [];
|
|
4671
|
+
acc.metadata = output.answer.metadata || {};
|
|
4672
|
+
} else if (output?.generation) {
|
|
4673
|
+
acc.attachments = output.generation.attachments || [];
|
|
4674
|
+
acc.metadata = output.generation.metadata || {};
|
|
4675
|
+
} else if (output) {
|
|
4676
|
+
acc.attachments = output.attachments || [];
|
|
4677
|
+
acc.metadata = output.metadata || {};
|
|
4678
|
+
}
|
|
4732
4679
|
}
|
|
4733
|
-
acc.generation = generation;
|
|
4734
4680
|
return;
|
|
4735
4681
|
}
|
|
4736
4682
|
}
|
|
4737
4683
|
/**
|
|
4738
4684
|
* Build final result from accumulator
|
|
4739
|
-
*
|
|
4740
|
-
* Returns content and trace events (metrics should be extracted from trace on backend)
|
|
4685
|
+
* Returns unified content chains from all channels
|
|
4741
4686
|
*/
|
|
4742
4687
|
getResult(acc) {
|
|
4688
|
+
const allChains = [];
|
|
4689
|
+
for (const [channel, state] of acc.channels.entries()) {
|
|
4690
|
+
if (state.currentBlock) {
|
|
4691
|
+
state.contentChain.push(state.currentBlock);
|
|
4692
|
+
}
|
|
4693
|
+
if (state.contentChain.length > 0) {
|
|
4694
|
+
allChains.push({
|
|
4695
|
+
channel,
|
|
4696
|
+
steps: state.contentChain,
|
|
4697
|
+
isComplete: true
|
|
4698
|
+
});
|
|
4699
|
+
}
|
|
4700
|
+
}
|
|
4743
4701
|
const startedAt = acc.traceStartedAt ?? Date.now();
|
|
4744
4702
|
const completedAt = acc.traceCompletedAt ?? startedAt;
|
|
4745
4703
|
const trace = acc.traceEvents.length > 0 ? {
|
|
@@ -4747,26 +4705,19 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4747
4705
|
startedAt,
|
|
4748
4706
|
completedAt,
|
|
4749
4707
|
durationMs: Math.max(0, completedAt - startedAt),
|
|
4750
|
-
totalEvents: acc.traceEvents.length
|
|
4751
|
-
totalModelCalls: acc.llmCalls.length
|
|
4708
|
+
totalEvents: acc.traceEvents.length
|
|
4752
4709
|
} : null;
|
|
4753
|
-
|
|
4754
|
-
|
|
4755
|
-
|
|
4756
|
-
|
|
4757
|
-
|
|
4758
|
-
|
|
4759
|
-
firstEventSample: trace.events[0] ? JSON.stringify(trace.events[0]).substring(0, 150) : null,
|
|
4760
|
-
allEventsNull: trace.events.every((e) => e === null),
|
|
4761
|
-
someEventsNull: trace.events.some((e) => e === null)
|
|
4762
|
-
});
|
|
4763
|
-
}
|
|
4710
|
+
this.logger.log("\u{1F4CA} [EventProcessor] Final result assembled", {
|
|
4711
|
+
totalChains: allChains.length,
|
|
4712
|
+
textChains: allChains.filter((c) => c.channel === "text").length,
|
|
4713
|
+
processingChains: allChains.filter((c) => c.channel === "processing").length,
|
|
4714
|
+
totalSteps: allChains.reduce((sum, c) => sum + c.steps.length, 0)
|
|
4715
|
+
});
|
|
4764
4716
|
return {
|
|
4765
4717
|
content: {
|
|
4766
|
-
|
|
4767
|
-
attachments: acc.
|
|
4768
|
-
metadata: acc.
|
|
4769
|
-
reasoningChains: acc.reasoningChains.length > 0 ? acc.reasoningChains : void 0
|
|
4718
|
+
contentChains: allChains.length > 0 ? allChains : void 0,
|
|
4719
|
+
attachments: acc.attachments,
|
|
4720
|
+
metadata: acc.metadata
|
|
4770
4721
|
},
|
|
4771
4722
|
trace
|
|
4772
4723
|
};
|
|
@@ -4887,8 +4838,7 @@ exports.LangGraphEngine = class LangGraphEngine {
|
|
|
4887
4838
|
hasContent: !!content,
|
|
4888
4839
|
hasContext: !!config.configurable?.context,
|
|
4889
4840
|
hasTrace: !!trace,
|
|
4890
|
-
traceEvents: trace?.events?.length || 0
|
|
4891
|
-
totalModelCalls: trace?.totalModelCalls || 0
|
|
4841
|
+
traceEvents: trace?.events?.length || 0
|
|
4892
4842
|
});
|
|
4893
4843
|
if (trace && trace.events.length > 0 && config.configurable?.context) {
|
|
4894
4844
|
const context = config.configurable.context;
|
|
@@ -4906,7 +4856,6 @@ exports.LangGraphEngine = class LangGraphEngine {
|
|
|
4906
4856
|
companyId: context.companyId || "unknown",
|
|
4907
4857
|
events: trace.events,
|
|
4908
4858
|
totalEvents: trace.totalEvents,
|
|
4909
|
-
totalModelCalls: trace.totalModelCalls,
|
|
4910
4859
|
startedAt: trace.startedAt,
|
|
4911
4860
|
completedAt: trace.completedAt,
|
|
4912
4861
|
durationMs: trace.durationMs
|
|
@@ -4989,7 +4938,6 @@ exports.LangGraphEngine = class LangGraphEngine {
|
|
|
4989
4938
|
agentId: payload.agentId,
|
|
4990
4939
|
companyId: payload.companyId,
|
|
4991
4940
|
totalEvents: payload.totalEvents,
|
|
4992
|
-
totalModelCalls: payload.totalModelCalls,
|
|
4993
4941
|
startedAt: payload.startedAt,
|
|
4994
4942
|
completedAt: payload.completedAt,
|
|
4995
4943
|
durationMs: payload.durationMs,
|