@flutchai/flutch-sdk 0.1.9 → 0.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +266 -284
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +39 -30
- package/dist/index.d.ts +39 -30
- package/dist/index.js +267 -284
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -3900,7 +3900,6 @@ var AttachmentType = /* @__PURE__ */ ((AttachmentType2) => {
|
|
|
3900
3900
|
var StreamChannel = /* @__PURE__ */ ((StreamChannel2) => {
|
|
3901
3901
|
StreamChannel2["TEXT"] = "text";
|
|
3902
3902
|
StreamChannel2["PROCESSING"] = "processing";
|
|
3903
|
-
StreamChannel2["TOOLS"] = "tools";
|
|
3904
3903
|
return StreamChannel2;
|
|
3905
3904
|
})(StreamChannel || {});
|
|
3906
3905
|
|
|
@@ -4443,15 +4442,15 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4443
4442
|
*/
|
|
4444
4443
|
createAccumulator() {
|
|
4445
4444
|
return {
|
|
4446
|
-
|
|
4447
|
-
|
|
4448
|
-
|
|
4449
|
-
|
|
4445
|
+
channels: /* @__PURE__ */ new Map([
|
|
4446
|
+
["text" /* TEXT */, { contentChain: [], currentBlock: null }],
|
|
4447
|
+
["processing" /* PROCESSING */, { contentChain: [], currentBlock: null }]
|
|
4448
|
+
]),
|
|
4449
|
+
attachments: [],
|
|
4450
|
+
metadata: {},
|
|
4450
4451
|
traceEvents: [],
|
|
4451
4452
|
traceStartedAt: null,
|
|
4452
|
-
traceCompletedAt: null
|
|
4453
|
-
currentReasoningSteps: [],
|
|
4454
|
-
currentToolUse: null
|
|
4453
|
+
traceCompletedAt: null
|
|
4455
4454
|
};
|
|
4456
4455
|
}
|
|
4457
4456
|
/**
|
|
@@ -4479,10 +4478,88 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4479
4478
|
}
|
|
4480
4479
|
return [];
|
|
4481
4480
|
}
|
|
4481
|
+
/**
|
|
4482
|
+
* Send delta to UI (unified format)
|
|
4483
|
+
*/
|
|
4484
|
+
sendDelta(channel, delta, onPartial) {
|
|
4485
|
+
if (!onPartial) return;
|
|
4486
|
+
onPartial(
|
|
4487
|
+
JSON.stringify({
|
|
4488
|
+
channel,
|
|
4489
|
+
delta
|
|
4490
|
+
})
|
|
4491
|
+
);
|
|
4492
|
+
}
|
|
4493
|
+
/**
|
|
4494
|
+
* Process content stream blocks (universal for all channels)
|
|
4495
|
+
*/
|
|
4496
|
+
processContentStream(acc, channel, blocks, onPartial) {
|
|
4497
|
+
const state = acc.channels.get(channel);
|
|
4498
|
+
for (const block of blocks) {
|
|
4499
|
+
if (block.type === "tool_use" || block.type === "tool_call") {
|
|
4500
|
+
if (state.currentBlock) {
|
|
4501
|
+
state.contentChain.push(state.currentBlock);
|
|
4502
|
+
}
|
|
4503
|
+
state.currentBlock = {
|
|
4504
|
+
index: state.contentChain.length,
|
|
4505
|
+
type: "tool_use",
|
|
4506
|
+
name: block.name,
|
|
4507
|
+
id: block.id,
|
|
4508
|
+
input: block.input || "",
|
|
4509
|
+
output: ""
|
|
4510
|
+
};
|
|
4511
|
+
this.sendDelta(
|
|
4512
|
+
channel,
|
|
4513
|
+
{
|
|
4514
|
+
type: "step_started",
|
|
4515
|
+
step: state.currentBlock
|
|
4516
|
+
},
|
|
4517
|
+
onPartial
|
|
4518
|
+
);
|
|
4519
|
+
} else if (block.type === "input_json_delta") {
|
|
4520
|
+
if (state.currentBlock && state.currentBlock.type === "tool_use") {
|
|
4521
|
+
const chunk = block.input || "";
|
|
4522
|
+
state.currentBlock.input += chunk;
|
|
4523
|
+
this.sendDelta(
|
|
4524
|
+
channel,
|
|
4525
|
+
{
|
|
4526
|
+
type: "tool_input_chunk",
|
|
4527
|
+
stepId: state.currentBlock.id,
|
|
4528
|
+
chunk
|
|
4529
|
+
},
|
|
4530
|
+
onPartial
|
|
4531
|
+
);
|
|
4532
|
+
}
|
|
4533
|
+
} else if (block.type === "text") {
|
|
4534
|
+
const textChunk = block.text || "";
|
|
4535
|
+
if (state.currentBlock && state.currentBlock.type === "text") {
|
|
4536
|
+
state.currentBlock.text = (state.currentBlock.text || "") + textChunk;
|
|
4537
|
+
} else {
|
|
4538
|
+
if (state.currentBlock) {
|
|
4539
|
+
state.contentChain.push(state.currentBlock);
|
|
4540
|
+
}
|
|
4541
|
+
state.currentBlock = {
|
|
4542
|
+
index: state.contentChain.length,
|
|
4543
|
+
type: "text",
|
|
4544
|
+
text: textChunk
|
|
4545
|
+
};
|
|
4546
|
+
}
|
|
4547
|
+
this.sendDelta(
|
|
4548
|
+
channel,
|
|
4549
|
+
{
|
|
4550
|
+
type: "text_chunk",
|
|
4551
|
+
text: textChunk
|
|
4552
|
+
},
|
|
4553
|
+
onPartial
|
|
4554
|
+
);
|
|
4555
|
+
}
|
|
4556
|
+
}
|
|
4557
|
+
}
|
|
4482
4558
|
/**
|
|
4483
4559
|
* Groups tool_use and input_json_delta into proper structure
|
|
4484
4560
|
* tool_use.input → output (tool execution result)
|
|
4485
4561
|
* input_json_delta.input → output (tool execution result, accumulated)
|
|
4562
|
+
* @deprecated This method is for legacy fallback only
|
|
4486
4563
|
*/
|
|
4487
4564
|
mapReasoningSteps(rawSteps) {
|
|
4488
4565
|
const steps = [];
|
|
@@ -4531,60 +4608,10 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4531
4608
|
*/
|
|
4532
4609
|
processEvent(acc, event, onPartial) {
|
|
4533
4610
|
this.captureTraceEvent(acc, event);
|
|
4534
|
-
if (event.event === "on_chat_model_stream" && event.
|
|
4535
|
-
const
|
|
4536
|
-
const blocks = this.normalizeContentBlocks(chunk);
|
|
4537
|
-
|
|
4538
|
-
onPartial(JSON.stringify({ text: blocks }));
|
|
4539
|
-
}
|
|
4540
|
-
const textOnly = blocks.filter((block) => block?.type === "text").map((block) => block.text || "").join("");
|
|
4541
|
-
if (textOnly) {
|
|
4542
|
-
acc.streamedText += textOnly;
|
|
4543
|
-
}
|
|
4544
|
-
return;
|
|
4545
|
-
}
|
|
4546
|
-
if (event.event === "on_chat_model_stream" && event.metadata?.stream_channel === "processing" /* PROCESSING */ && event.data?.chunk?.content) {
|
|
4547
|
-
const chunk = event.data.chunk.content;
|
|
4548
|
-
const blocks = this.normalizeContentBlocks(chunk);
|
|
4549
|
-
for (const block of blocks) {
|
|
4550
|
-
if (block.type === "tool_use" || block.type === "tool_call") {
|
|
4551
|
-
if (acc.currentToolUse) {
|
|
4552
|
-
acc.currentReasoningSteps.push(acc.currentToolUse);
|
|
4553
|
-
}
|
|
4554
|
-
acc.currentToolUse = {
|
|
4555
|
-
index: acc.currentReasoningSteps.length,
|
|
4556
|
-
type: "tool_use",
|
|
4557
|
-
name: block.name,
|
|
4558
|
-
id: block.id,
|
|
4559
|
-
input: block.input || "",
|
|
4560
|
-
output: ""
|
|
4561
|
-
};
|
|
4562
|
-
if (onPartial) {
|
|
4563
|
-
onPartial(
|
|
4564
|
-
JSON.stringify({
|
|
4565
|
-
processing_delta: {
|
|
4566
|
-
type: "step_started",
|
|
4567
|
-
step: acc.currentToolUse
|
|
4568
|
-
}
|
|
4569
|
-
})
|
|
4570
|
-
);
|
|
4571
|
-
}
|
|
4572
|
-
} else if (block.type === "input_json_delta") {
|
|
4573
|
-
if (acc.currentToolUse && onPartial) {
|
|
4574
|
-
const chunk2 = block.input || "";
|
|
4575
|
-
acc.currentToolUse.output += chunk2;
|
|
4576
|
-
onPartial(
|
|
4577
|
-
JSON.stringify({
|
|
4578
|
-
processing_delta: {
|
|
4579
|
-
type: "output_chunk",
|
|
4580
|
-
stepId: acc.currentToolUse.id,
|
|
4581
|
-
chunk: chunk2
|
|
4582
|
-
}
|
|
4583
|
-
})
|
|
4584
|
-
);
|
|
4585
|
-
}
|
|
4586
|
-
}
|
|
4587
|
-
}
|
|
4611
|
+
if (event.event === "on_chat_model_stream" && event.data?.chunk?.content) {
|
|
4612
|
+
const channel = event.metadata?.stream_channel ?? "text" /* TEXT */;
|
|
4613
|
+
const blocks = this.normalizeContentBlocks(event.data.chunk.content);
|
|
4614
|
+
this.processContentStream(acc, channel, blocks, onPartial);
|
|
4588
4615
|
return;
|
|
4589
4616
|
}
|
|
4590
4617
|
if (event.event === "on_tool_start") {
|
|
@@ -4597,11 +4624,27 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4597
4624
|
return;
|
|
4598
4625
|
}
|
|
4599
4626
|
if (event.event === "on_tool_end") {
|
|
4600
|
-
|
|
4601
|
-
|
|
4602
|
-
|
|
4603
|
-
|
|
4604
|
-
|
|
4627
|
+
const channel = event.metadata?.stream_channel ?? "text" /* TEXT */;
|
|
4628
|
+
const state = acc.channels.get(channel);
|
|
4629
|
+
if (state?.currentBlock && state.currentBlock.type === "tool_use") {
|
|
4630
|
+
const output = event.data?.output;
|
|
4631
|
+
const outputString = typeof output === "string" ? output : JSON.stringify(output, null, 2);
|
|
4632
|
+
state.currentBlock.output = outputString;
|
|
4633
|
+
this.sendDelta(
|
|
4634
|
+
channel,
|
|
4635
|
+
{
|
|
4636
|
+
type: "tool_output_chunk",
|
|
4637
|
+
stepId: state.currentBlock.id,
|
|
4638
|
+
chunk: outputString
|
|
4639
|
+
},
|
|
4640
|
+
onPartial
|
|
4641
|
+
);
|
|
4642
|
+
this.logger.log("\u2705 Tool execution completed", {
|
|
4643
|
+
toolName: event.name,
|
|
4644
|
+
outputPreview: outputString.substring(0, 200) + (outputString.length > 200 ? "..." : ""),
|
|
4645
|
+
runId: event.run_id
|
|
4646
|
+
});
|
|
4647
|
+
}
|
|
4605
4648
|
return;
|
|
4606
4649
|
}
|
|
4607
4650
|
if (event.event === "on_tool_error") {
|
|
@@ -4613,133 +4656,48 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4613
4656
|
return;
|
|
4614
4657
|
}
|
|
4615
4658
|
if (event.event === "on_chat_model_end") {
|
|
4616
|
-
|
|
4617
|
-
|
|
4618
|
-
|
|
4619
|
-
|
|
4620
|
-
const llmCall = {
|
|
4621
|
-
modelId,
|
|
4622
|
-
promptTokens: usageMetadata.input_tokens || 0,
|
|
4623
|
-
completionTokens: usageMetadata.output_tokens || 0,
|
|
4624
|
-
totalTokens: usageMetadata.total_tokens || 0,
|
|
4625
|
-
timestamp: Date.now(),
|
|
4626
|
-
nodeName: event.metadata?.langgraph_node || event.name
|
|
4627
|
-
};
|
|
4628
|
-
acc.llmCalls.push(llmCall);
|
|
4629
|
-
this.logger.log("\u2705 LLM call recorded", {
|
|
4630
|
-
modelId,
|
|
4631
|
-
tokens: llmCall.totalTokens,
|
|
4632
|
-
nodeName: llmCall.nodeName,
|
|
4633
|
-
totalRecorded: acc.llmCalls.length
|
|
4634
|
-
});
|
|
4635
|
-
} else {
|
|
4636
|
-
this.logger.warn(
|
|
4637
|
-
"\u26A0\uFE0F Missing usage metadata or modelId in on_chat_model_end",
|
|
4638
|
-
{
|
|
4639
|
-
hasUsageMetadata: !!usageMetadata,
|
|
4640
|
-
hasModelId: !!modelId,
|
|
4641
|
-
eventName: event.name,
|
|
4642
|
-
metadataKeys: event.metadata ? Object.keys(event.metadata) : [],
|
|
4643
|
-
outputKeys: output ? Object.keys(output) : []
|
|
4644
|
-
}
|
|
4645
|
-
);
|
|
4646
|
-
}
|
|
4647
|
-
if (event.metadata?.stream_channel === "processing" /* PROCESSING */) {
|
|
4648
|
-
if (acc.currentToolUse) {
|
|
4649
|
-
acc.currentReasoningSteps.push(acc.currentToolUse);
|
|
4650
|
-
acc.currentToolUse = null;
|
|
4651
|
-
}
|
|
4652
|
-
if (acc.currentReasoningSteps.length > 0) {
|
|
4653
|
-
acc.reasoningChains.push({
|
|
4654
|
-
steps: acc.currentReasoningSteps,
|
|
4655
|
-
isComplete: true
|
|
4656
|
-
});
|
|
4657
|
-
if (onPartial) {
|
|
4658
|
-
onPartial(
|
|
4659
|
-
JSON.stringify({
|
|
4660
|
-
processing_delta: {
|
|
4661
|
-
type: "chain_completed"
|
|
4662
|
-
}
|
|
4663
|
-
})
|
|
4664
|
-
);
|
|
4665
|
-
}
|
|
4666
|
-
acc.currentReasoningSteps = [];
|
|
4667
|
-
} else {
|
|
4668
|
-
const stepsRaw = output?.content || // AIMessageChunk object (direct)
|
|
4669
|
-
output?.kwargs?.content || // Serialized LangChain format
|
|
4670
|
-
event.data?.chunk?.content || // Older version
|
|
4671
|
-
[];
|
|
4672
|
-
let steps;
|
|
4673
|
-
if (Array.isArray(stepsRaw)) {
|
|
4674
|
-
steps = this.mapReasoningSteps(stepsRaw);
|
|
4675
|
-
} else if (typeof stepsRaw === "string" && stepsRaw.trim().length > 0) {
|
|
4676
|
-
steps = [
|
|
4677
|
-
{
|
|
4678
|
-
index: 0,
|
|
4679
|
-
type: "text",
|
|
4680
|
-
text: stepsRaw.trim()
|
|
4681
|
-
}
|
|
4682
|
-
];
|
|
4683
|
-
} else {
|
|
4684
|
-
steps = [];
|
|
4685
|
-
}
|
|
4686
|
-
if (steps.length > 0) {
|
|
4687
|
-
acc.reasoningChains.push({
|
|
4688
|
-
steps,
|
|
4689
|
-
isComplete: true
|
|
4690
|
-
});
|
|
4691
|
-
if (onPartial) {
|
|
4692
|
-
onPartial(
|
|
4693
|
-
JSON.stringify({
|
|
4694
|
-
processing_delta: {
|
|
4695
|
-
type: "chain_completed"
|
|
4696
|
-
}
|
|
4697
|
-
})
|
|
4698
|
-
);
|
|
4699
|
-
}
|
|
4700
|
-
}
|
|
4701
|
-
}
|
|
4702
|
-
}
|
|
4659
|
+
this.logger.debug("\u2705 LLM call completed", {
|
|
4660
|
+
nodeName: event.metadata?.langgraph_node || event.name,
|
|
4661
|
+
channel: event.metadata?.stream_channel
|
|
4662
|
+
});
|
|
4703
4663
|
return;
|
|
4704
4664
|
}
|
|
4705
|
-
if (event.event === "on_chain_end"
|
|
4706
|
-
const
|
|
4707
|
-
|
|
4708
|
-
|
|
4709
|
-
|
|
4710
|
-
|
|
4711
|
-
|
|
4712
|
-
|
|
4713
|
-
|
|
4714
|
-
|
|
4715
|
-
|
|
4716
|
-
|
|
4717
|
-
|
|
4718
|
-
|
|
4719
|
-
};
|
|
4720
|
-
} else if (output?.generation?.content) {
|
|
4721
|
-
generation = {
|
|
4722
|
-
text: output.generation.content,
|
|
4723
|
-
attachments: [],
|
|
4724
|
-
metadata: {}
|
|
4725
|
-
};
|
|
4726
|
-
} else if (output?.text) {
|
|
4727
|
-
generation = {
|
|
4728
|
-
text: output.text,
|
|
4729
|
-
attachments: output.attachments || [],
|
|
4730
|
-
metadata: output.metadata || {}
|
|
4731
|
-
};
|
|
4665
|
+
if (event.event === "on_chain_end") {
|
|
4666
|
+
const channel = event.metadata?.stream_channel ?? "text" /* TEXT */;
|
|
4667
|
+
if (channel === "text" /* TEXT */) {
|
|
4668
|
+
const output = event.data.output;
|
|
4669
|
+
if (output?.answer) {
|
|
4670
|
+
acc.attachments = output.answer.attachments || [];
|
|
4671
|
+
acc.metadata = output.answer.metadata || {};
|
|
4672
|
+
} else if (output?.generation) {
|
|
4673
|
+
acc.attachments = output.generation.attachments || [];
|
|
4674
|
+
acc.metadata = output.generation.metadata || {};
|
|
4675
|
+
} else if (output) {
|
|
4676
|
+
acc.attachments = output.attachments || [];
|
|
4677
|
+
acc.metadata = output.metadata || {};
|
|
4678
|
+
}
|
|
4732
4679
|
}
|
|
4733
|
-
acc.generation = generation;
|
|
4734
4680
|
return;
|
|
4735
4681
|
}
|
|
4736
4682
|
}
|
|
4737
4683
|
/**
|
|
4738
4684
|
* Build final result from accumulator
|
|
4739
|
-
*
|
|
4740
|
-
* Returns content and trace events (metrics should be extracted from trace on backend)
|
|
4685
|
+
* Returns unified content chains from all channels
|
|
4741
4686
|
*/
|
|
4742
4687
|
getResult(acc) {
|
|
4688
|
+
const allChains = [];
|
|
4689
|
+
for (const [channel, state] of acc.channels.entries()) {
|
|
4690
|
+
if (state.currentBlock) {
|
|
4691
|
+
state.contentChain.push(state.currentBlock);
|
|
4692
|
+
}
|
|
4693
|
+
if (state.contentChain.length > 0) {
|
|
4694
|
+
allChains.push({
|
|
4695
|
+
channel,
|
|
4696
|
+
steps: state.contentChain,
|
|
4697
|
+
isComplete: true
|
|
4698
|
+
});
|
|
4699
|
+
}
|
|
4700
|
+
}
|
|
4743
4701
|
const startedAt = acc.traceStartedAt ?? Date.now();
|
|
4744
4702
|
const completedAt = acc.traceCompletedAt ?? startedAt;
|
|
4745
4703
|
const trace = acc.traceEvents.length > 0 ? {
|
|
@@ -4747,26 +4705,19 @@ exports.EventProcessor = class EventProcessor {
|
|
|
4747
4705
|
startedAt,
|
|
4748
4706
|
completedAt,
|
|
4749
4707
|
durationMs: Math.max(0, completedAt - startedAt),
|
|
4750
|
-
totalEvents: acc.traceEvents.length
|
|
4751
|
-
totalModelCalls: acc.llmCalls.length
|
|
4708
|
+
totalEvents: acc.traceEvents.length
|
|
4752
4709
|
} : null;
|
|
4753
|
-
|
|
4754
|
-
|
|
4755
|
-
|
|
4756
|
-
|
|
4757
|
-
|
|
4758
|
-
|
|
4759
|
-
firstEventSample: trace.events[0] ? JSON.stringify(trace.events[0]).substring(0, 150) : null,
|
|
4760
|
-
allEventsNull: trace.events.every((e) => e === null),
|
|
4761
|
-
someEventsNull: trace.events.some((e) => e === null)
|
|
4762
|
-
});
|
|
4763
|
-
}
|
|
4710
|
+
this.logger.log("\u{1F4CA} [EventProcessor] Final result assembled", {
|
|
4711
|
+
totalChains: allChains.length,
|
|
4712
|
+
textChains: allChains.filter((c) => c.channel === "text").length,
|
|
4713
|
+
processingChains: allChains.filter((c) => c.channel === "processing").length,
|
|
4714
|
+
totalSteps: allChains.reduce((sum, c) => sum + c.steps.length, 0)
|
|
4715
|
+
});
|
|
4764
4716
|
return {
|
|
4765
4717
|
content: {
|
|
4766
|
-
|
|
4767
|
-
attachments: acc.
|
|
4768
|
-
metadata: acc.
|
|
4769
|
-
reasoningChains: acc.reasoningChains.length > 0 ? acc.reasoningChains : void 0
|
|
4718
|
+
contentChains: allChains.length > 0 ? allChains : void 0,
|
|
4719
|
+
attachments: acc.attachments,
|
|
4720
|
+
metadata: acc.metadata
|
|
4770
4721
|
},
|
|
4771
4722
|
trace
|
|
4772
4723
|
};
|
|
@@ -4887,8 +4838,7 @@ exports.LangGraphEngine = class LangGraphEngine {
|
|
|
4887
4838
|
hasContent: !!content,
|
|
4888
4839
|
hasContext: !!config.configurable?.context,
|
|
4889
4840
|
hasTrace: !!trace,
|
|
4890
|
-
traceEvents: trace?.events?.length || 0
|
|
4891
|
-
totalModelCalls: trace?.totalModelCalls || 0
|
|
4841
|
+
traceEvents: trace?.events?.length || 0
|
|
4892
4842
|
});
|
|
4893
4843
|
if (trace && trace.events.length > 0 && config.configurable?.context) {
|
|
4894
4844
|
const context = config.configurable.context;
|
|
@@ -4906,7 +4856,6 @@ exports.LangGraphEngine = class LangGraphEngine {
|
|
|
4906
4856
|
companyId: context.companyId || "unknown",
|
|
4907
4857
|
events: trace.events,
|
|
4908
4858
|
totalEvents: trace.totalEvents,
|
|
4909
|
-
totalModelCalls: trace.totalModelCalls,
|
|
4910
4859
|
startedAt: trace.startedAt,
|
|
4911
4860
|
completedAt: trace.completedAt,
|
|
4912
4861
|
durationMs: trace.durationMs
|
|
@@ -4989,7 +4938,6 @@ exports.LangGraphEngine = class LangGraphEngine {
|
|
|
4989
4938
|
agentId: payload.agentId,
|
|
4990
4939
|
companyId: payload.companyId,
|
|
4991
4940
|
totalEvents: payload.totalEvents,
|
|
4992
|
-
totalModelCalls: payload.totalModelCalls,
|
|
4993
4941
|
startedAt: payload.startedAt,
|
|
4994
4942
|
completedAt: payload.completedAt,
|
|
4995
4943
|
durationMs: payload.durationMs,
|
|
@@ -5597,37 +5545,43 @@ var McpToolFilter = class _McpToolFilter {
|
|
|
5597
5545
|
logger = new common.Logger(_McpToolFilter.name);
|
|
5598
5546
|
mcpConverter;
|
|
5599
5547
|
/**
|
|
5600
|
-
* Fetch available tools from MCP runtime with
|
|
5601
|
-
* @param
|
|
5602
|
-
* @returns Array of LangChain Tool instances
|
|
5548
|
+
* Fetch available tools from MCP runtime with dynamic schema generation
|
|
5549
|
+
* @param toolsConfig Array of tool configurations with dynamic config
|
|
5550
|
+
* @returns Array of LangChain Tool instances with dynamic schemas
|
|
5603
5551
|
*/
|
|
5604
|
-
async getFilteredTools(
|
|
5552
|
+
async getFilteredTools(toolsConfig = []) {
|
|
5605
5553
|
this.logger.debug(
|
|
5606
|
-
`[DEBUG] Getting filtered tools.
|
|
5554
|
+
`[DEBUG] Getting filtered tools with dynamic schemas. Config: ${JSON.stringify(toolsConfig)}`
|
|
5607
5555
|
);
|
|
5608
5556
|
this.logger.debug(`[DEBUG] MCP Runtime URL: ${this.mcpRuntimeUrl}`);
|
|
5609
|
-
if (
|
|
5610
|
-
this.logger.debug("No tools
|
|
5557
|
+
if (toolsConfig.length === 0) {
|
|
5558
|
+
this.logger.debug("No tools configured, returning empty array");
|
|
5611
5559
|
return [];
|
|
5612
5560
|
}
|
|
5613
5561
|
try {
|
|
5614
|
-
const filterParam = enabledTools.join(",");
|
|
5615
5562
|
this.logger.debug(
|
|
5616
|
-
`[DEBUG] Making HTTP request to: ${this.mcpRuntimeUrl}/tools/
|
|
5563
|
+
`[DEBUG] Making HTTP POST request to: ${this.mcpRuntimeUrl}/tools/schemas`
|
|
5564
|
+
);
|
|
5565
|
+
this.logger.debug(`[DEBUG] Request body: ${JSON.stringify(toolsConfig)}`);
|
|
5566
|
+
const response = await axios2__default.default.post(
|
|
5567
|
+
`${this.mcpRuntimeUrl}/tools/schemas`,
|
|
5568
|
+
{ tools: toolsConfig },
|
|
5569
|
+
{
|
|
5570
|
+
timeout: 5e3,
|
|
5571
|
+
headers: {
|
|
5572
|
+
"Content-Type": "application/json"
|
|
5573
|
+
}
|
|
5574
|
+
}
|
|
5617
5575
|
);
|
|
5618
|
-
const response = await axios2__default.default.get(`${this.mcpRuntimeUrl}/tools/list`, {
|
|
5619
|
-
params: { filter: filterParam },
|
|
5620
|
-
timeout: 5e3
|
|
5621
|
-
});
|
|
5622
5576
|
this.logger.debug(
|
|
5623
5577
|
`[DEBUG] HTTP response status: ${response.status}, data length: ${Array.isArray(response.data) ? response.data.length : "not array"}`
|
|
5624
5578
|
);
|
|
5625
|
-
const
|
|
5579
|
+
const dynamicTools = Array.isArray(response.data) ? response.data : [];
|
|
5626
5580
|
this.logger.debug(
|
|
5627
|
-
`Retrieved ${
|
|
5581
|
+
`Retrieved ${dynamicTools.length} dynamic tool schemas from MCP Runtime`
|
|
5628
5582
|
);
|
|
5629
5583
|
const mcpClient = {
|
|
5630
|
-
getTools: async () =>
|
|
5584
|
+
getTools: async () => dynamicTools,
|
|
5631
5585
|
executeTool: async (name, args) => {
|
|
5632
5586
|
this.logger.debug(`[DEBUG] Executing tool ${name} with args:`, args);
|
|
5633
5587
|
const response2 = await axios2__default.default.post(
|
|
@@ -5642,20 +5596,20 @@ var McpToolFilter = class _McpToolFilter {
|
|
|
5642
5596
|
isHealthy: async () => true
|
|
5643
5597
|
};
|
|
5644
5598
|
this.logger.log(
|
|
5645
|
-
`\u{1F680} [McpToolFilter] Converting ${
|
|
5599
|
+
`\u{1F680} [McpToolFilter] Converting ${dynamicTools.length} dynamic tools using McpConverter`
|
|
5646
5600
|
);
|
|
5647
|
-
const tools = await this.mcpConverter.convertTools(
|
|
5601
|
+
const tools = await this.mcpConverter.convertTools(dynamicTools);
|
|
5648
5602
|
this.logger.log(
|
|
5649
5603
|
`\u{1F680} [McpToolFilter] Converted tools: ${tools.map((t) => t.name).join(", ")}`
|
|
5650
5604
|
);
|
|
5651
5605
|
this.logger.log(
|
|
5652
|
-
`Configured ${tools.length} tools from MCP runtime: ${
|
|
5606
|
+
`Configured ${tools.length} tools with dynamic schemas from MCP runtime: ${dynamicTools.map((t) => t.name).join(", ")}`
|
|
5653
5607
|
);
|
|
5654
5608
|
return tools;
|
|
5655
5609
|
} catch (error) {
|
|
5656
5610
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
5657
5611
|
this.logger.warn(
|
|
5658
|
-
`[DEBUG] Failed to fetch
|
|
5612
|
+
`[DEBUG] Failed to fetch dynamic tool schemas from MCP runtime (${this.mcpRuntimeUrl}): ${errorMessage}`
|
|
5659
5613
|
);
|
|
5660
5614
|
this.logger.warn(`[DEBUG] Error details:`, {
|
|
5661
5615
|
error,
|
|
@@ -6200,8 +6154,30 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6200
6154
|
/**
|
|
6201
6155
|
* Generate cache key for model instances based on configuration
|
|
6202
6156
|
*/
|
|
6203
|
-
|
|
6204
|
-
|
|
6157
|
+
/**
|
|
6158
|
+
* Generate hash from toolsConfig for cache key
|
|
6159
|
+
* Uses MD5 hash to create short, unique identifier
|
|
6160
|
+
*/
|
|
6161
|
+
hashToolsConfig(toolsConfig) {
|
|
6162
|
+
const sorted = toolsConfig.map((t) => `${t.toolName}:${t.enabled}:${JSON.stringify(t.config || {})}`).sort().join("|");
|
|
6163
|
+
return crypto.createHash("md5").update(sorted).digest("hex").slice(0, 16);
|
|
6164
|
+
}
|
|
6165
|
+
/**
|
|
6166
|
+
* Generate cache key from ModelByIdConfig
|
|
6167
|
+
* Format: modelId:temperature:maxTokens[:toolsHash]
|
|
6168
|
+
* Example: "model123:0.7:4096" or "model123:0.7:4096:a1b2c3d4e5f6g7h8"
|
|
6169
|
+
*/
|
|
6170
|
+
generateModelCacheKey(config) {
|
|
6171
|
+
const parts = [
|
|
6172
|
+
config.modelId,
|
|
6173
|
+
config.temperature ?? "default",
|
|
6174
|
+
config.maxTokens ?? "default"
|
|
6175
|
+
];
|
|
6176
|
+
if (config.toolsConfig && config.toolsConfig.length > 0) {
|
|
6177
|
+
const toolsHash = this.hashToolsConfig(config.toolsConfig);
|
|
6178
|
+
parts.push(toolsHash);
|
|
6179
|
+
}
|
|
6180
|
+
return parts.join(":");
|
|
6205
6181
|
}
|
|
6206
6182
|
/**
|
|
6207
6183
|
* TEMPORARY SOLUTION for compatibility with new OpenAI models
|
|
@@ -6438,12 +6414,7 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6438
6414
|
["voyageai" /* VOYAGEAI */]: void 0
|
|
6439
6415
|
};
|
|
6440
6416
|
async initializeChatModel(config) {
|
|
6441
|
-
const cacheKey = this.generateModelCacheKey(
|
|
6442
|
-
config.modelId,
|
|
6443
|
-
config.temperature,
|
|
6444
|
-
config.maxTokens,
|
|
6445
|
-
"chat" /* CHAT */
|
|
6446
|
-
);
|
|
6417
|
+
const cacheKey = this.generateModelCacheKey(config);
|
|
6447
6418
|
const cachedModel = this.modelInstanceCache.get(cacheKey);
|
|
6448
6419
|
if (cachedModel) {
|
|
6449
6420
|
this.logger.debug(`Using cached chat model instance: ${cacheKey}`);
|
|
@@ -6481,17 +6452,70 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6481
6452
|
metadataKeys: Object.keys(model.metadata || {}),
|
|
6482
6453
|
hasModelId: !!model.metadata?.modelId
|
|
6483
6454
|
});
|
|
6455
|
+
this.logger.debug(`[TOOLS CHECK] toolsConfig exists: ${!!config.toolsConfig}, customTools exists: ${!!config.customTools}`);
|
|
6456
|
+
if (config.toolsConfig) {
|
|
6457
|
+
this.logger.debug(`[TOOLS CHECK] toolsConfig length: ${config.toolsConfig.length}, content: ${JSON.stringify(config.toolsConfig)}`);
|
|
6458
|
+
}
|
|
6459
|
+
if (config.toolsConfig || config.customTools) {
|
|
6460
|
+
this.logger.debug(`[TOOLS] Calling bindToolsToModel with toolsConfig: ${JSON.stringify(config.toolsConfig)}`);
|
|
6461
|
+
const boundModel = await this.bindToolsToModel(
|
|
6462
|
+
model,
|
|
6463
|
+
config.toolsConfig,
|
|
6464
|
+
config.customTools
|
|
6465
|
+
);
|
|
6466
|
+
this.logger.debug(`[TOOLS] bindToolsToModel returned successfully`);
|
|
6467
|
+
this.modelInstanceCache.set(cacheKey, boundModel);
|
|
6468
|
+
return boundModel;
|
|
6469
|
+
}
|
|
6484
6470
|
this.modelInstanceCache.set(cacheKey, model);
|
|
6485
6471
|
return model;
|
|
6486
6472
|
}
|
|
6473
|
+
/**
|
|
6474
|
+
* Bind tools to model (merge toolsConfig and customTools)
|
|
6475
|
+
* For toolsConfig: fetch tool executors from MCP Runtime
|
|
6476
|
+
* For customTools: use as-is (already prepared DynamicStructuredTool)
|
|
6477
|
+
*
|
|
6478
|
+
* Returns:
|
|
6479
|
+
* - Runnable when tools are bound (model.bindTools returns Runnable)
|
|
6480
|
+
* - BaseChatModel when no tools
|
|
6481
|
+
*/
|
|
6482
|
+
async bindToolsToModel(model, toolsConfig, customTools) {
|
|
6483
|
+
const allTools = [];
|
|
6484
|
+
if (toolsConfig && toolsConfig.length > 0) {
|
|
6485
|
+
try {
|
|
6486
|
+
const enabledToolsConfig = toolsConfig.filter((tc) => tc.enabled !== false);
|
|
6487
|
+
if (enabledToolsConfig.length > 0) {
|
|
6488
|
+
this.logger.debug(
|
|
6489
|
+
`Fetching ${enabledToolsConfig.length} tools with dynamic schemas from MCP Runtime: ${enabledToolsConfig.map((tc) => tc.toolName).join(", ")}`
|
|
6490
|
+
);
|
|
6491
|
+
const mcpToolFilter = new McpToolFilter();
|
|
6492
|
+
const mcpTools = await mcpToolFilter.getFilteredTools(
|
|
6493
|
+
enabledToolsConfig
|
|
6494
|
+
);
|
|
6495
|
+
this.logger.debug(
|
|
6496
|
+
`Successfully fetched ${mcpTools.length} tools with dynamic schemas from MCP Runtime`
|
|
6497
|
+
);
|
|
6498
|
+
allTools.push(...mcpTools);
|
|
6499
|
+
}
|
|
6500
|
+
} catch (error) {
|
|
6501
|
+
this.logger.error(
|
|
6502
|
+
`Failed to fetch tools from MCP Runtime: ${error instanceof Error ? error.message : String(error)}`
|
|
6503
|
+
);
|
|
6504
|
+
}
|
|
6505
|
+
}
|
|
6506
|
+
if (customTools && customTools.length > 0) {
|
|
6507
|
+
allTools.push(...customTools);
|
|
6508
|
+
this.logger.debug(`Added ${customTools.length} custom tools to model`);
|
|
6509
|
+
}
|
|
6510
|
+
if (allTools.length > 0) {
|
|
6511
|
+
this.logger.debug(`Binding ${allTools.length} tools to model`);
|
|
6512
|
+
const modelWithTools = model.bindTools(allTools);
|
|
6513
|
+
return modelWithTools;
|
|
6514
|
+
}
|
|
6515
|
+
return model;
|
|
6516
|
+
}
|
|
6487
6517
|
async initializeRerankModel(config) {
|
|
6488
|
-
const cacheKey = this.generateModelCacheKey(
|
|
6489
|
-
config.modelId,
|
|
6490
|
-
void 0,
|
|
6491
|
-
// rerank models typically don't use temperature
|
|
6492
|
-
config.maxTokens,
|
|
6493
|
-
"rerank" /* RERANK */
|
|
6494
|
-
);
|
|
6518
|
+
const cacheKey = this.generateModelCacheKey(config);
|
|
6495
6519
|
const cachedModel = this.modelInstanceCache.get(cacheKey);
|
|
6496
6520
|
if (cachedModel) {
|
|
6497
6521
|
this.logger.debug(`Using cached rerank model instance: ${cacheKey}`);
|
|
@@ -6519,14 +6543,7 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6519
6543
|
return model;
|
|
6520
6544
|
}
|
|
6521
6545
|
async initializeEmbeddingModel(config) {
|
|
6522
|
-
const cacheKey = this.generateModelCacheKey(
|
|
6523
|
-
config.modelId,
|
|
6524
|
-
void 0,
|
|
6525
|
-
// embedding models typically don't use temperature
|
|
6526
|
-
void 0,
|
|
6527
|
-
// embedding models typically don't use maxTokens
|
|
6528
|
-
"embedding" /* EMBEDDING */
|
|
6529
|
-
);
|
|
6546
|
+
const cacheKey = this.generateModelCacheKey(config);
|
|
6530
6547
|
const cachedModel = this.modelInstanceCache.get(cacheKey);
|
|
6531
6548
|
if (cachedModel) {
|
|
6532
6549
|
this.logger.debug(`Using cached embedding model instance: ${cacheKey}`);
|
|
@@ -6674,7 +6691,7 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6674
6691
|
}
|
|
6675
6692
|
// Simple API request for microservices (copy from original LLMInitializer)
|
|
6676
6693
|
async fetchFromApi(modelId) {
|
|
6677
|
-
const apiUrl = process.env.API_URL
|
|
6694
|
+
const apiUrl = process.env.API_URL;
|
|
6678
6695
|
const token = process.env.INTERNAL_API_TOKEN;
|
|
6679
6696
|
if (!token) {
|
|
6680
6697
|
throw new Error("INTERNAL_API_TOKEN required for API mode");
|
|
@@ -6718,40 +6735,6 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6718
6735
|
return result;
|
|
6719
6736
|
}
|
|
6720
6737
|
};
|
|
6721
|
-
function prepareModelWithTools(model, tools, baseConfig = {}) {
|
|
6722
|
-
if (tools.length === 0) {
|
|
6723
|
-
return {
|
|
6724
|
-
modelWithTools: model,
|
|
6725
|
-
finalConfig: baseConfig,
|
|
6726
|
-
toolsMethod: "none"
|
|
6727
|
-
};
|
|
6728
|
-
}
|
|
6729
|
-
if (model.bindTools && typeof model.bindTools === "function") {
|
|
6730
|
-
try {
|
|
6731
|
-
const modelWithTools = model.bindTools(tools);
|
|
6732
|
-
return {
|
|
6733
|
-
modelWithTools,
|
|
6734
|
-
finalConfig: baseConfig,
|
|
6735
|
-
toolsMethod: "bindTools"
|
|
6736
|
-
};
|
|
6737
|
-
} catch (error) {
|
|
6738
|
-
const invokeConfig2 = { tools };
|
|
6739
|
-
const finalConfig2 = { ...baseConfig, ...invokeConfig2 };
|
|
6740
|
-
return {
|
|
6741
|
-
modelWithTools: model,
|
|
6742
|
-
finalConfig: finalConfig2,
|
|
6743
|
-
toolsMethod: "manual"
|
|
6744
|
-
};
|
|
6745
|
-
}
|
|
6746
|
-
}
|
|
6747
|
-
const invokeConfig = { tools };
|
|
6748
|
-
const finalConfig = { ...baseConfig, ...invokeConfig };
|
|
6749
|
-
return {
|
|
6750
|
-
modelWithTools: model,
|
|
6751
|
-
finalConfig,
|
|
6752
|
-
toolsMethod: "manual"
|
|
6753
|
-
};
|
|
6754
|
-
}
|
|
6755
6738
|
|
|
6756
6739
|
// src/retriever/enums.ts
|
|
6757
6740
|
var RetrieverSearchType = /* @__PURE__ */ ((RetrieverSearchType2) => {
|
|
@@ -7101,7 +7084,6 @@ exports.getUIEndpointClassMetadata = getUIEndpointClassMetadata;
|
|
|
7101
7084
|
exports.getUIEndpointMethodsMetadata = getUIEndpointMethodsMetadata;
|
|
7102
7085
|
exports.hasCallbacks = hasCallbacks;
|
|
7103
7086
|
exports.hasUIEndpoints = hasUIEndpoints;
|
|
7104
|
-
exports.prepareModelWithTools = prepareModelWithTools;
|
|
7105
7087
|
exports.registerFinanceExampleCallback = registerFinanceExampleCallback;
|
|
7106
7088
|
exports.registerUIEndpointsFromClass = registerUIEndpointsFromClass;
|
|
7107
7089
|
exports.sanitizeTraceData = sanitizeTraceData;
|