@flutchai/flutch-sdk 0.1.9 → 0.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +266 -284
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +39 -30
- package/dist/index.d.ts +39 -30
- package/dist/index.js +267 -284
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -3871,7 +3871,6 @@ var AttachmentType = /* @__PURE__ */ ((AttachmentType2) => {
|
|
|
3871
3871
|
var StreamChannel = /* @__PURE__ */ ((StreamChannel2) => {
|
|
3872
3872
|
StreamChannel2["TEXT"] = "text";
|
|
3873
3873
|
StreamChannel2["PROCESSING"] = "processing";
|
|
3874
|
-
StreamChannel2["TOOLS"] = "tools";
|
|
3875
3874
|
return StreamChannel2;
|
|
3876
3875
|
})(StreamChannel || {});
|
|
3877
3876
|
|
|
@@ -4414,15 +4413,15 @@ var EventProcessor = class {
|
|
|
4414
4413
|
*/
|
|
4415
4414
|
createAccumulator() {
|
|
4416
4415
|
return {
|
|
4417
|
-
|
|
4418
|
-
|
|
4419
|
-
|
|
4420
|
-
|
|
4416
|
+
channels: /* @__PURE__ */ new Map([
|
|
4417
|
+
["text" /* TEXT */, { contentChain: [], currentBlock: null }],
|
|
4418
|
+
["processing" /* PROCESSING */, { contentChain: [], currentBlock: null }]
|
|
4419
|
+
]),
|
|
4420
|
+
attachments: [],
|
|
4421
|
+
metadata: {},
|
|
4421
4422
|
traceEvents: [],
|
|
4422
4423
|
traceStartedAt: null,
|
|
4423
|
-
traceCompletedAt: null
|
|
4424
|
-
currentReasoningSteps: [],
|
|
4425
|
-
currentToolUse: null
|
|
4424
|
+
traceCompletedAt: null
|
|
4426
4425
|
};
|
|
4427
4426
|
}
|
|
4428
4427
|
/**
|
|
@@ -4450,10 +4449,88 @@ var EventProcessor = class {
|
|
|
4450
4449
|
}
|
|
4451
4450
|
return [];
|
|
4452
4451
|
}
|
|
4452
|
+
/**
|
|
4453
|
+
* Send delta to UI (unified format)
|
|
4454
|
+
*/
|
|
4455
|
+
sendDelta(channel, delta, onPartial) {
|
|
4456
|
+
if (!onPartial) return;
|
|
4457
|
+
onPartial(
|
|
4458
|
+
JSON.stringify({
|
|
4459
|
+
channel,
|
|
4460
|
+
delta
|
|
4461
|
+
})
|
|
4462
|
+
);
|
|
4463
|
+
}
|
|
4464
|
+
/**
|
|
4465
|
+
* Process content stream blocks (universal for all channels)
|
|
4466
|
+
*/
|
|
4467
|
+
processContentStream(acc, channel, blocks, onPartial) {
|
|
4468
|
+
const state = acc.channels.get(channel);
|
|
4469
|
+
for (const block of blocks) {
|
|
4470
|
+
if (block.type === "tool_use" || block.type === "tool_call") {
|
|
4471
|
+
if (state.currentBlock) {
|
|
4472
|
+
state.contentChain.push(state.currentBlock);
|
|
4473
|
+
}
|
|
4474
|
+
state.currentBlock = {
|
|
4475
|
+
index: state.contentChain.length,
|
|
4476
|
+
type: "tool_use",
|
|
4477
|
+
name: block.name,
|
|
4478
|
+
id: block.id,
|
|
4479
|
+
input: block.input || "",
|
|
4480
|
+
output: ""
|
|
4481
|
+
};
|
|
4482
|
+
this.sendDelta(
|
|
4483
|
+
channel,
|
|
4484
|
+
{
|
|
4485
|
+
type: "step_started",
|
|
4486
|
+
step: state.currentBlock
|
|
4487
|
+
},
|
|
4488
|
+
onPartial
|
|
4489
|
+
);
|
|
4490
|
+
} else if (block.type === "input_json_delta") {
|
|
4491
|
+
if (state.currentBlock && state.currentBlock.type === "tool_use") {
|
|
4492
|
+
const chunk = block.input || "";
|
|
4493
|
+
state.currentBlock.input += chunk;
|
|
4494
|
+
this.sendDelta(
|
|
4495
|
+
channel,
|
|
4496
|
+
{
|
|
4497
|
+
type: "tool_input_chunk",
|
|
4498
|
+
stepId: state.currentBlock.id,
|
|
4499
|
+
chunk
|
|
4500
|
+
},
|
|
4501
|
+
onPartial
|
|
4502
|
+
);
|
|
4503
|
+
}
|
|
4504
|
+
} else if (block.type === "text") {
|
|
4505
|
+
const textChunk = block.text || "";
|
|
4506
|
+
if (state.currentBlock && state.currentBlock.type === "text") {
|
|
4507
|
+
state.currentBlock.text = (state.currentBlock.text || "") + textChunk;
|
|
4508
|
+
} else {
|
|
4509
|
+
if (state.currentBlock) {
|
|
4510
|
+
state.contentChain.push(state.currentBlock);
|
|
4511
|
+
}
|
|
4512
|
+
state.currentBlock = {
|
|
4513
|
+
index: state.contentChain.length,
|
|
4514
|
+
type: "text",
|
|
4515
|
+
text: textChunk
|
|
4516
|
+
};
|
|
4517
|
+
}
|
|
4518
|
+
this.sendDelta(
|
|
4519
|
+
channel,
|
|
4520
|
+
{
|
|
4521
|
+
type: "text_chunk",
|
|
4522
|
+
text: textChunk
|
|
4523
|
+
},
|
|
4524
|
+
onPartial
|
|
4525
|
+
);
|
|
4526
|
+
}
|
|
4527
|
+
}
|
|
4528
|
+
}
|
|
4453
4529
|
/**
|
|
4454
4530
|
* Groups tool_use and input_json_delta into proper structure
|
|
4455
4531
|
* tool_use.input → output (tool execution result)
|
|
4456
4532
|
* input_json_delta.input → output (tool execution result, accumulated)
|
|
4533
|
+
* @deprecated This method is for legacy fallback only
|
|
4457
4534
|
*/
|
|
4458
4535
|
mapReasoningSteps(rawSteps) {
|
|
4459
4536
|
const steps = [];
|
|
@@ -4502,60 +4579,10 @@ var EventProcessor = class {
|
|
|
4502
4579
|
*/
|
|
4503
4580
|
processEvent(acc, event, onPartial) {
|
|
4504
4581
|
this.captureTraceEvent(acc, event);
|
|
4505
|
-
if (event.event === "on_chat_model_stream" && event.
|
|
4506
|
-
const
|
|
4507
|
-
const blocks = this.normalizeContentBlocks(chunk);
|
|
4508
|
-
|
|
4509
|
-
onPartial(JSON.stringify({ text: blocks }));
|
|
4510
|
-
}
|
|
4511
|
-
const textOnly = blocks.filter((block) => block?.type === "text").map((block) => block.text || "").join("");
|
|
4512
|
-
if (textOnly) {
|
|
4513
|
-
acc.streamedText += textOnly;
|
|
4514
|
-
}
|
|
4515
|
-
return;
|
|
4516
|
-
}
|
|
4517
|
-
if (event.event === "on_chat_model_stream" && event.metadata?.stream_channel === "processing" /* PROCESSING */ && event.data?.chunk?.content) {
|
|
4518
|
-
const chunk = event.data.chunk.content;
|
|
4519
|
-
const blocks = this.normalizeContentBlocks(chunk);
|
|
4520
|
-
for (const block of blocks) {
|
|
4521
|
-
if (block.type === "tool_use" || block.type === "tool_call") {
|
|
4522
|
-
if (acc.currentToolUse) {
|
|
4523
|
-
acc.currentReasoningSteps.push(acc.currentToolUse);
|
|
4524
|
-
}
|
|
4525
|
-
acc.currentToolUse = {
|
|
4526
|
-
index: acc.currentReasoningSteps.length,
|
|
4527
|
-
type: "tool_use",
|
|
4528
|
-
name: block.name,
|
|
4529
|
-
id: block.id,
|
|
4530
|
-
input: block.input || "",
|
|
4531
|
-
output: ""
|
|
4532
|
-
};
|
|
4533
|
-
if (onPartial) {
|
|
4534
|
-
onPartial(
|
|
4535
|
-
JSON.stringify({
|
|
4536
|
-
processing_delta: {
|
|
4537
|
-
type: "step_started",
|
|
4538
|
-
step: acc.currentToolUse
|
|
4539
|
-
}
|
|
4540
|
-
})
|
|
4541
|
-
);
|
|
4542
|
-
}
|
|
4543
|
-
} else if (block.type === "input_json_delta") {
|
|
4544
|
-
if (acc.currentToolUse && onPartial) {
|
|
4545
|
-
const chunk2 = block.input || "";
|
|
4546
|
-
acc.currentToolUse.output += chunk2;
|
|
4547
|
-
onPartial(
|
|
4548
|
-
JSON.stringify({
|
|
4549
|
-
processing_delta: {
|
|
4550
|
-
type: "output_chunk",
|
|
4551
|
-
stepId: acc.currentToolUse.id,
|
|
4552
|
-
chunk: chunk2
|
|
4553
|
-
}
|
|
4554
|
-
})
|
|
4555
|
-
);
|
|
4556
|
-
}
|
|
4557
|
-
}
|
|
4558
|
-
}
|
|
4582
|
+
if (event.event === "on_chat_model_stream" && event.data?.chunk?.content) {
|
|
4583
|
+
const channel = event.metadata?.stream_channel ?? "text" /* TEXT */;
|
|
4584
|
+
const blocks = this.normalizeContentBlocks(event.data.chunk.content);
|
|
4585
|
+
this.processContentStream(acc, channel, blocks, onPartial);
|
|
4559
4586
|
return;
|
|
4560
4587
|
}
|
|
4561
4588
|
if (event.event === "on_tool_start") {
|
|
@@ -4568,11 +4595,27 @@ var EventProcessor = class {
|
|
|
4568
4595
|
return;
|
|
4569
4596
|
}
|
|
4570
4597
|
if (event.event === "on_tool_end") {
|
|
4571
|
-
|
|
4572
|
-
|
|
4573
|
-
|
|
4574
|
-
|
|
4575
|
-
|
|
4598
|
+
const channel = event.metadata?.stream_channel ?? "text" /* TEXT */;
|
|
4599
|
+
const state = acc.channels.get(channel);
|
|
4600
|
+
if (state?.currentBlock && state.currentBlock.type === "tool_use") {
|
|
4601
|
+
const output = event.data?.output;
|
|
4602
|
+
const outputString = typeof output === "string" ? output : JSON.stringify(output, null, 2);
|
|
4603
|
+
state.currentBlock.output = outputString;
|
|
4604
|
+
this.sendDelta(
|
|
4605
|
+
channel,
|
|
4606
|
+
{
|
|
4607
|
+
type: "tool_output_chunk",
|
|
4608
|
+
stepId: state.currentBlock.id,
|
|
4609
|
+
chunk: outputString
|
|
4610
|
+
},
|
|
4611
|
+
onPartial
|
|
4612
|
+
);
|
|
4613
|
+
this.logger.log("\u2705 Tool execution completed", {
|
|
4614
|
+
toolName: event.name,
|
|
4615
|
+
outputPreview: outputString.substring(0, 200) + (outputString.length > 200 ? "..." : ""),
|
|
4616
|
+
runId: event.run_id
|
|
4617
|
+
});
|
|
4618
|
+
}
|
|
4576
4619
|
return;
|
|
4577
4620
|
}
|
|
4578
4621
|
if (event.event === "on_tool_error") {
|
|
@@ -4584,133 +4627,48 @@ var EventProcessor = class {
|
|
|
4584
4627
|
return;
|
|
4585
4628
|
}
|
|
4586
4629
|
if (event.event === "on_chat_model_end") {
|
|
4587
|
-
|
|
4588
|
-
|
|
4589
|
-
|
|
4590
|
-
|
|
4591
|
-
const llmCall = {
|
|
4592
|
-
modelId,
|
|
4593
|
-
promptTokens: usageMetadata.input_tokens || 0,
|
|
4594
|
-
completionTokens: usageMetadata.output_tokens || 0,
|
|
4595
|
-
totalTokens: usageMetadata.total_tokens || 0,
|
|
4596
|
-
timestamp: Date.now(),
|
|
4597
|
-
nodeName: event.metadata?.langgraph_node || event.name
|
|
4598
|
-
};
|
|
4599
|
-
acc.llmCalls.push(llmCall);
|
|
4600
|
-
this.logger.log("\u2705 LLM call recorded", {
|
|
4601
|
-
modelId,
|
|
4602
|
-
tokens: llmCall.totalTokens,
|
|
4603
|
-
nodeName: llmCall.nodeName,
|
|
4604
|
-
totalRecorded: acc.llmCalls.length
|
|
4605
|
-
});
|
|
4606
|
-
} else {
|
|
4607
|
-
this.logger.warn(
|
|
4608
|
-
"\u26A0\uFE0F Missing usage metadata or modelId in on_chat_model_end",
|
|
4609
|
-
{
|
|
4610
|
-
hasUsageMetadata: !!usageMetadata,
|
|
4611
|
-
hasModelId: !!modelId,
|
|
4612
|
-
eventName: event.name,
|
|
4613
|
-
metadataKeys: event.metadata ? Object.keys(event.metadata) : [],
|
|
4614
|
-
outputKeys: output ? Object.keys(output) : []
|
|
4615
|
-
}
|
|
4616
|
-
);
|
|
4617
|
-
}
|
|
4618
|
-
if (event.metadata?.stream_channel === "processing" /* PROCESSING */) {
|
|
4619
|
-
if (acc.currentToolUse) {
|
|
4620
|
-
acc.currentReasoningSteps.push(acc.currentToolUse);
|
|
4621
|
-
acc.currentToolUse = null;
|
|
4622
|
-
}
|
|
4623
|
-
if (acc.currentReasoningSteps.length > 0) {
|
|
4624
|
-
acc.reasoningChains.push({
|
|
4625
|
-
steps: acc.currentReasoningSteps,
|
|
4626
|
-
isComplete: true
|
|
4627
|
-
});
|
|
4628
|
-
if (onPartial) {
|
|
4629
|
-
onPartial(
|
|
4630
|
-
JSON.stringify({
|
|
4631
|
-
processing_delta: {
|
|
4632
|
-
type: "chain_completed"
|
|
4633
|
-
}
|
|
4634
|
-
})
|
|
4635
|
-
);
|
|
4636
|
-
}
|
|
4637
|
-
acc.currentReasoningSteps = [];
|
|
4638
|
-
} else {
|
|
4639
|
-
const stepsRaw = output?.content || // AIMessageChunk object (direct)
|
|
4640
|
-
output?.kwargs?.content || // Serialized LangChain format
|
|
4641
|
-
event.data?.chunk?.content || // Older version
|
|
4642
|
-
[];
|
|
4643
|
-
let steps;
|
|
4644
|
-
if (Array.isArray(stepsRaw)) {
|
|
4645
|
-
steps = this.mapReasoningSteps(stepsRaw);
|
|
4646
|
-
} else if (typeof stepsRaw === "string" && stepsRaw.trim().length > 0) {
|
|
4647
|
-
steps = [
|
|
4648
|
-
{
|
|
4649
|
-
index: 0,
|
|
4650
|
-
type: "text",
|
|
4651
|
-
text: stepsRaw.trim()
|
|
4652
|
-
}
|
|
4653
|
-
];
|
|
4654
|
-
} else {
|
|
4655
|
-
steps = [];
|
|
4656
|
-
}
|
|
4657
|
-
if (steps.length > 0) {
|
|
4658
|
-
acc.reasoningChains.push({
|
|
4659
|
-
steps,
|
|
4660
|
-
isComplete: true
|
|
4661
|
-
});
|
|
4662
|
-
if (onPartial) {
|
|
4663
|
-
onPartial(
|
|
4664
|
-
JSON.stringify({
|
|
4665
|
-
processing_delta: {
|
|
4666
|
-
type: "chain_completed"
|
|
4667
|
-
}
|
|
4668
|
-
})
|
|
4669
|
-
);
|
|
4670
|
-
}
|
|
4671
|
-
}
|
|
4672
|
-
}
|
|
4673
|
-
}
|
|
4630
|
+
this.logger.debug("\u2705 LLM call completed", {
|
|
4631
|
+
nodeName: event.metadata?.langgraph_node || event.name,
|
|
4632
|
+
channel: event.metadata?.stream_channel
|
|
4633
|
+
});
|
|
4674
4634
|
return;
|
|
4675
4635
|
}
|
|
4676
|
-
if (event.event === "on_chain_end"
|
|
4677
|
-
const
|
|
4678
|
-
|
|
4679
|
-
|
|
4680
|
-
|
|
4681
|
-
|
|
4682
|
-
|
|
4683
|
-
|
|
4684
|
-
|
|
4685
|
-
|
|
4686
|
-
|
|
4687
|
-
|
|
4688
|
-
|
|
4689
|
-
|
|
4690
|
-
};
|
|
4691
|
-
} else if (output?.generation?.content) {
|
|
4692
|
-
generation = {
|
|
4693
|
-
text: output.generation.content,
|
|
4694
|
-
attachments: [],
|
|
4695
|
-
metadata: {}
|
|
4696
|
-
};
|
|
4697
|
-
} else if (output?.text) {
|
|
4698
|
-
generation = {
|
|
4699
|
-
text: output.text,
|
|
4700
|
-
attachments: output.attachments || [],
|
|
4701
|
-
metadata: output.metadata || {}
|
|
4702
|
-
};
|
|
4636
|
+
if (event.event === "on_chain_end") {
|
|
4637
|
+
const channel = event.metadata?.stream_channel ?? "text" /* TEXT */;
|
|
4638
|
+
if (channel === "text" /* TEXT */) {
|
|
4639
|
+
const output = event.data.output;
|
|
4640
|
+
if (output?.answer) {
|
|
4641
|
+
acc.attachments = output.answer.attachments || [];
|
|
4642
|
+
acc.metadata = output.answer.metadata || {};
|
|
4643
|
+
} else if (output?.generation) {
|
|
4644
|
+
acc.attachments = output.generation.attachments || [];
|
|
4645
|
+
acc.metadata = output.generation.metadata || {};
|
|
4646
|
+
} else if (output) {
|
|
4647
|
+
acc.attachments = output.attachments || [];
|
|
4648
|
+
acc.metadata = output.metadata || {};
|
|
4649
|
+
}
|
|
4703
4650
|
}
|
|
4704
|
-
acc.generation = generation;
|
|
4705
4651
|
return;
|
|
4706
4652
|
}
|
|
4707
4653
|
}
|
|
4708
4654
|
/**
|
|
4709
4655
|
* Build final result from accumulator
|
|
4710
|
-
*
|
|
4711
|
-
* Returns content and trace events (metrics should be extracted from trace on backend)
|
|
4656
|
+
* Returns unified content chains from all channels
|
|
4712
4657
|
*/
|
|
4713
4658
|
getResult(acc) {
|
|
4659
|
+
const allChains = [];
|
|
4660
|
+
for (const [channel, state] of acc.channels.entries()) {
|
|
4661
|
+
if (state.currentBlock) {
|
|
4662
|
+
state.contentChain.push(state.currentBlock);
|
|
4663
|
+
}
|
|
4664
|
+
if (state.contentChain.length > 0) {
|
|
4665
|
+
allChains.push({
|
|
4666
|
+
channel,
|
|
4667
|
+
steps: state.contentChain,
|
|
4668
|
+
isComplete: true
|
|
4669
|
+
});
|
|
4670
|
+
}
|
|
4671
|
+
}
|
|
4714
4672
|
const startedAt = acc.traceStartedAt ?? Date.now();
|
|
4715
4673
|
const completedAt = acc.traceCompletedAt ?? startedAt;
|
|
4716
4674
|
const trace = acc.traceEvents.length > 0 ? {
|
|
@@ -4718,26 +4676,19 @@ var EventProcessor = class {
|
|
|
4718
4676
|
startedAt,
|
|
4719
4677
|
completedAt,
|
|
4720
4678
|
durationMs: Math.max(0, completedAt - startedAt),
|
|
4721
|
-
totalEvents: acc.traceEvents.length
|
|
4722
|
-
totalModelCalls: acc.llmCalls.length
|
|
4679
|
+
totalEvents: acc.traceEvents.length
|
|
4723
4680
|
} : null;
|
|
4724
|
-
|
|
4725
|
-
|
|
4726
|
-
|
|
4727
|
-
|
|
4728
|
-
|
|
4729
|
-
|
|
4730
|
-
firstEventSample: trace.events[0] ? JSON.stringify(trace.events[0]).substring(0, 150) : null,
|
|
4731
|
-
allEventsNull: trace.events.every((e) => e === null),
|
|
4732
|
-
someEventsNull: trace.events.some((e) => e === null)
|
|
4733
|
-
});
|
|
4734
|
-
}
|
|
4681
|
+
this.logger.log("\u{1F4CA} [EventProcessor] Final result assembled", {
|
|
4682
|
+
totalChains: allChains.length,
|
|
4683
|
+
textChains: allChains.filter((c) => c.channel === "text").length,
|
|
4684
|
+
processingChains: allChains.filter((c) => c.channel === "processing").length,
|
|
4685
|
+
totalSteps: allChains.reduce((sum, c) => sum + c.steps.length, 0)
|
|
4686
|
+
});
|
|
4735
4687
|
return {
|
|
4736
4688
|
content: {
|
|
4737
|
-
|
|
4738
|
-
attachments: acc.
|
|
4739
|
-
metadata: acc.
|
|
4740
|
-
reasoningChains: acc.reasoningChains.length > 0 ? acc.reasoningChains : void 0
|
|
4689
|
+
contentChains: allChains.length > 0 ? allChains : void 0,
|
|
4690
|
+
attachments: acc.attachments,
|
|
4691
|
+
metadata: acc.metadata
|
|
4741
4692
|
},
|
|
4742
4693
|
trace
|
|
4743
4694
|
};
|
|
@@ -4858,8 +4809,7 @@ var LangGraphEngine = class {
|
|
|
4858
4809
|
hasContent: !!content,
|
|
4859
4810
|
hasContext: !!config.configurable?.context,
|
|
4860
4811
|
hasTrace: !!trace,
|
|
4861
|
-
traceEvents: trace?.events?.length || 0
|
|
4862
|
-
totalModelCalls: trace?.totalModelCalls || 0
|
|
4812
|
+
traceEvents: trace?.events?.length || 0
|
|
4863
4813
|
});
|
|
4864
4814
|
if (trace && trace.events.length > 0 && config.configurable?.context) {
|
|
4865
4815
|
const context = config.configurable.context;
|
|
@@ -4877,7 +4827,6 @@ var LangGraphEngine = class {
|
|
|
4877
4827
|
companyId: context.companyId || "unknown",
|
|
4878
4828
|
events: trace.events,
|
|
4879
4829
|
totalEvents: trace.totalEvents,
|
|
4880
|
-
totalModelCalls: trace.totalModelCalls,
|
|
4881
4830
|
startedAt: trace.startedAt,
|
|
4882
4831
|
completedAt: trace.completedAt,
|
|
4883
4832
|
durationMs: trace.durationMs
|
|
@@ -4960,7 +4909,6 @@ var LangGraphEngine = class {
|
|
|
4960
4909
|
agentId: payload.agentId,
|
|
4961
4910
|
companyId: payload.companyId,
|
|
4962
4911
|
totalEvents: payload.totalEvents,
|
|
4963
|
-
totalModelCalls: payload.totalModelCalls,
|
|
4964
4912
|
startedAt: payload.startedAt,
|
|
4965
4913
|
completedAt: payload.completedAt,
|
|
4966
4914
|
durationMs: payload.durationMs,
|
|
@@ -5568,37 +5516,43 @@ var McpToolFilter = class _McpToolFilter {
|
|
|
5568
5516
|
logger = new Logger(_McpToolFilter.name);
|
|
5569
5517
|
mcpConverter;
|
|
5570
5518
|
/**
|
|
5571
|
-
* Fetch available tools from MCP runtime with
|
|
5572
|
-
* @param
|
|
5573
|
-
* @returns Array of LangChain Tool instances
|
|
5519
|
+
* Fetch available tools from MCP runtime with dynamic schema generation
|
|
5520
|
+
* @param toolsConfig Array of tool configurations with dynamic config
|
|
5521
|
+
* @returns Array of LangChain Tool instances with dynamic schemas
|
|
5574
5522
|
*/
|
|
5575
|
-
async getFilteredTools(
|
|
5523
|
+
async getFilteredTools(toolsConfig = []) {
|
|
5576
5524
|
this.logger.debug(
|
|
5577
|
-
`[DEBUG] Getting filtered tools.
|
|
5525
|
+
`[DEBUG] Getting filtered tools with dynamic schemas. Config: ${JSON.stringify(toolsConfig)}`
|
|
5578
5526
|
);
|
|
5579
5527
|
this.logger.debug(`[DEBUG] MCP Runtime URL: ${this.mcpRuntimeUrl}`);
|
|
5580
|
-
if (
|
|
5581
|
-
this.logger.debug("No tools
|
|
5528
|
+
if (toolsConfig.length === 0) {
|
|
5529
|
+
this.logger.debug("No tools configured, returning empty array");
|
|
5582
5530
|
return [];
|
|
5583
5531
|
}
|
|
5584
5532
|
try {
|
|
5585
|
-
const filterParam = enabledTools.join(",");
|
|
5586
5533
|
this.logger.debug(
|
|
5587
|
-
`[DEBUG] Making HTTP request to: ${this.mcpRuntimeUrl}/tools/
|
|
5534
|
+
`[DEBUG] Making HTTP POST request to: ${this.mcpRuntimeUrl}/tools/schemas`
|
|
5535
|
+
);
|
|
5536
|
+
this.logger.debug(`[DEBUG] Request body: ${JSON.stringify(toolsConfig)}`);
|
|
5537
|
+
const response = await axios2.post(
|
|
5538
|
+
`${this.mcpRuntimeUrl}/tools/schemas`,
|
|
5539
|
+
{ tools: toolsConfig },
|
|
5540
|
+
{
|
|
5541
|
+
timeout: 5e3,
|
|
5542
|
+
headers: {
|
|
5543
|
+
"Content-Type": "application/json"
|
|
5544
|
+
}
|
|
5545
|
+
}
|
|
5588
5546
|
);
|
|
5589
|
-
const response = await axios2.get(`${this.mcpRuntimeUrl}/tools/list`, {
|
|
5590
|
-
params: { filter: filterParam },
|
|
5591
|
-
timeout: 5e3
|
|
5592
|
-
});
|
|
5593
5547
|
this.logger.debug(
|
|
5594
5548
|
`[DEBUG] HTTP response status: ${response.status}, data length: ${Array.isArray(response.data) ? response.data.length : "not array"}`
|
|
5595
5549
|
);
|
|
5596
|
-
const
|
|
5550
|
+
const dynamicTools = Array.isArray(response.data) ? response.data : [];
|
|
5597
5551
|
this.logger.debug(
|
|
5598
|
-
`Retrieved ${
|
|
5552
|
+
`Retrieved ${dynamicTools.length} dynamic tool schemas from MCP Runtime`
|
|
5599
5553
|
);
|
|
5600
5554
|
const mcpClient = {
|
|
5601
|
-
getTools: async () =>
|
|
5555
|
+
getTools: async () => dynamicTools,
|
|
5602
5556
|
executeTool: async (name, args) => {
|
|
5603
5557
|
this.logger.debug(`[DEBUG] Executing tool ${name} with args:`, args);
|
|
5604
5558
|
const response2 = await axios2.post(
|
|
@@ -5613,20 +5567,20 @@ var McpToolFilter = class _McpToolFilter {
|
|
|
5613
5567
|
isHealthy: async () => true
|
|
5614
5568
|
};
|
|
5615
5569
|
this.logger.log(
|
|
5616
|
-
`\u{1F680} [McpToolFilter] Converting ${
|
|
5570
|
+
`\u{1F680} [McpToolFilter] Converting ${dynamicTools.length} dynamic tools using McpConverter`
|
|
5617
5571
|
);
|
|
5618
|
-
const tools = await this.mcpConverter.convertTools(
|
|
5572
|
+
const tools = await this.mcpConverter.convertTools(dynamicTools);
|
|
5619
5573
|
this.logger.log(
|
|
5620
5574
|
`\u{1F680} [McpToolFilter] Converted tools: ${tools.map((t) => t.name).join(", ")}`
|
|
5621
5575
|
);
|
|
5622
5576
|
this.logger.log(
|
|
5623
|
-
`Configured ${tools.length} tools from MCP runtime: ${
|
|
5577
|
+
`Configured ${tools.length} tools with dynamic schemas from MCP runtime: ${dynamicTools.map((t) => t.name).join(", ")}`
|
|
5624
5578
|
);
|
|
5625
5579
|
return tools;
|
|
5626
5580
|
} catch (error) {
|
|
5627
5581
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
5628
5582
|
this.logger.warn(
|
|
5629
|
-
`[DEBUG] Failed to fetch
|
|
5583
|
+
`[DEBUG] Failed to fetch dynamic tool schemas from MCP runtime (${this.mcpRuntimeUrl}): ${errorMessage}`
|
|
5630
5584
|
);
|
|
5631
5585
|
this.logger.warn(`[DEBUG] Error details:`, {
|
|
5632
5586
|
error,
|
|
@@ -6171,8 +6125,30 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6171
6125
|
/**
|
|
6172
6126
|
* Generate cache key for model instances based on configuration
|
|
6173
6127
|
*/
|
|
6174
|
-
|
|
6175
|
-
|
|
6128
|
+
/**
|
|
6129
|
+
* Generate hash from toolsConfig for cache key
|
|
6130
|
+
* Uses MD5 hash to create short, unique identifier
|
|
6131
|
+
*/
|
|
6132
|
+
hashToolsConfig(toolsConfig) {
|
|
6133
|
+
const sorted = toolsConfig.map((t) => `${t.toolName}:${t.enabled}:${JSON.stringify(t.config || {})}`).sort().join("|");
|
|
6134
|
+
return createHash("md5").update(sorted).digest("hex").slice(0, 16);
|
|
6135
|
+
}
|
|
6136
|
+
/**
|
|
6137
|
+
* Generate cache key from ModelByIdConfig
|
|
6138
|
+
* Format: modelId:temperature:maxTokens[:toolsHash]
|
|
6139
|
+
* Example: "model123:0.7:4096" or "model123:0.7:4096:a1b2c3d4e5f6g7h8"
|
|
6140
|
+
*/
|
|
6141
|
+
generateModelCacheKey(config) {
|
|
6142
|
+
const parts = [
|
|
6143
|
+
config.modelId,
|
|
6144
|
+
config.temperature ?? "default",
|
|
6145
|
+
config.maxTokens ?? "default"
|
|
6146
|
+
];
|
|
6147
|
+
if (config.toolsConfig && config.toolsConfig.length > 0) {
|
|
6148
|
+
const toolsHash = this.hashToolsConfig(config.toolsConfig);
|
|
6149
|
+
parts.push(toolsHash);
|
|
6150
|
+
}
|
|
6151
|
+
return parts.join(":");
|
|
6176
6152
|
}
|
|
6177
6153
|
/**
|
|
6178
6154
|
* TEMPORARY SOLUTION for compatibility with new OpenAI models
|
|
@@ -6409,12 +6385,7 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6409
6385
|
["voyageai" /* VOYAGEAI */]: void 0
|
|
6410
6386
|
};
|
|
6411
6387
|
async initializeChatModel(config) {
|
|
6412
|
-
const cacheKey = this.generateModelCacheKey(
|
|
6413
|
-
config.modelId,
|
|
6414
|
-
config.temperature,
|
|
6415
|
-
config.maxTokens,
|
|
6416
|
-
"chat" /* CHAT */
|
|
6417
|
-
);
|
|
6388
|
+
const cacheKey = this.generateModelCacheKey(config);
|
|
6418
6389
|
const cachedModel = this.modelInstanceCache.get(cacheKey);
|
|
6419
6390
|
if (cachedModel) {
|
|
6420
6391
|
this.logger.debug(`Using cached chat model instance: ${cacheKey}`);
|
|
@@ -6452,17 +6423,70 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6452
6423
|
metadataKeys: Object.keys(model.metadata || {}),
|
|
6453
6424
|
hasModelId: !!model.metadata?.modelId
|
|
6454
6425
|
});
|
|
6426
|
+
this.logger.debug(`[TOOLS CHECK] toolsConfig exists: ${!!config.toolsConfig}, customTools exists: ${!!config.customTools}`);
|
|
6427
|
+
if (config.toolsConfig) {
|
|
6428
|
+
this.logger.debug(`[TOOLS CHECK] toolsConfig length: ${config.toolsConfig.length}, content: ${JSON.stringify(config.toolsConfig)}`);
|
|
6429
|
+
}
|
|
6430
|
+
if (config.toolsConfig || config.customTools) {
|
|
6431
|
+
this.logger.debug(`[TOOLS] Calling bindToolsToModel with toolsConfig: ${JSON.stringify(config.toolsConfig)}`);
|
|
6432
|
+
const boundModel = await this.bindToolsToModel(
|
|
6433
|
+
model,
|
|
6434
|
+
config.toolsConfig,
|
|
6435
|
+
config.customTools
|
|
6436
|
+
);
|
|
6437
|
+
this.logger.debug(`[TOOLS] bindToolsToModel returned successfully`);
|
|
6438
|
+
this.modelInstanceCache.set(cacheKey, boundModel);
|
|
6439
|
+
return boundModel;
|
|
6440
|
+
}
|
|
6455
6441
|
this.modelInstanceCache.set(cacheKey, model);
|
|
6456
6442
|
return model;
|
|
6457
6443
|
}
|
|
6444
|
+
/**
|
|
6445
|
+
* Bind tools to model (merge toolsConfig and customTools)
|
|
6446
|
+
* For toolsConfig: fetch tool executors from MCP Runtime
|
|
6447
|
+
* For customTools: use as-is (already prepared DynamicStructuredTool)
|
|
6448
|
+
*
|
|
6449
|
+
* Returns:
|
|
6450
|
+
* - Runnable when tools are bound (model.bindTools returns Runnable)
|
|
6451
|
+
* - BaseChatModel when no tools
|
|
6452
|
+
*/
|
|
6453
|
+
async bindToolsToModel(model, toolsConfig, customTools) {
|
|
6454
|
+
const allTools = [];
|
|
6455
|
+
if (toolsConfig && toolsConfig.length > 0) {
|
|
6456
|
+
try {
|
|
6457
|
+
const enabledToolsConfig = toolsConfig.filter((tc) => tc.enabled !== false);
|
|
6458
|
+
if (enabledToolsConfig.length > 0) {
|
|
6459
|
+
this.logger.debug(
|
|
6460
|
+
`Fetching ${enabledToolsConfig.length} tools with dynamic schemas from MCP Runtime: ${enabledToolsConfig.map((tc) => tc.toolName).join(", ")}`
|
|
6461
|
+
);
|
|
6462
|
+
const mcpToolFilter = new McpToolFilter();
|
|
6463
|
+
const mcpTools = await mcpToolFilter.getFilteredTools(
|
|
6464
|
+
enabledToolsConfig
|
|
6465
|
+
);
|
|
6466
|
+
this.logger.debug(
|
|
6467
|
+
`Successfully fetched ${mcpTools.length} tools with dynamic schemas from MCP Runtime`
|
|
6468
|
+
);
|
|
6469
|
+
allTools.push(...mcpTools);
|
|
6470
|
+
}
|
|
6471
|
+
} catch (error) {
|
|
6472
|
+
this.logger.error(
|
|
6473
|
+
`Failed to fetch tools from MCP Runtime: ${error instanceof Error ? error.message : String(error)}`
|
|
6474
|
+
);
|
|
6475
|
+
}
|
|
6476
|
+
}
|
|
6477
|
+
if (customTools && customTools.length > 0) {
|
|
6478
|
+
allTools.push(...customTools);
|
|
6479
|
+
this.logger.debug(`Added ${customTools.length} custom tools to model`);
|
|
6480
|
+
}
|
|
6481
|
+
if (allTools.length > 0) {
|
|
6482
|
+
this.logger.debug(`Binding ${allTools.length} tools to model`);
|
|
6483
|
+
const modelWithTools = model.bindTools(allTools);
|
|
6484
|
+
return modelWithTools;
|
|
6485
|
+
}
|
|
6486
|
+
return model;
|
|
6487
|
+
}
|
|
6458
6488
|
async initializeRerankModel(config) {
|
|
6459
|
-
const cacheKey = this.generateModelCacheKey(
|
|
6460
|
-
config.modelId,
|
|
6461
|
-
void 0,
|
|
6462
|
-
// rerank models typically don't use temperature
|
|
6463
|
-
config.maxTokens,
|
|
6464
|
-
"rerank" /* RERANK */
|
|
6465
|
-
);
|
|
6489
|
+
const cacheKey = this.generateModelCacheKey(config);
|
|
6466
6490
|
const cachedModel = this.modelInstanceCache.get(cacheKey);
|
|
6467
6491
|
if (cachedModel) {
|
|
6468
6492
|
this.logger.debug(`Using cached rerank model instance: ${cacheKey}`);
|
|
@@ -6490,14 +6514,7 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6490
6514
|
return model;
|
|
6491
6515
|
}
|
|
6492
6516
|
async initializeEmbeddingModel(config) {
|
|
6493
|
-
const cacheKey = this.generateModelCacheKey(
|
|
6494
|
-
config.modelId,
|
|
6495
|
-
void 0,
|
|
6496
|
-
// embedding models typically don't use temperature
|
|
6497
|
-
void 0,
|
|
6498
|
-
// embedding models typically don't use maxTokens
|
|
6499
|
-
"embedding" /* EMBEDDING */
|
|
6500
|
-
);
|
|
6517
|
+
const cacheKey = this.generateModelCacheKey(config);
|
|
6501
6518
|
const cachedModel = this.modelInstanceCache.get(cacheKey);
|
|
6502
6519
|
if (cachedModel) {
|
|
6503
6520
|
this.logger.debug(`Using cached embedding model instance: ${cacheKey}`);
|
|
@@ -6645,7 +6662,7 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6645
6662
|
}
|
|
6646
6663
|
// Simple API request for microservices (copy from original LLMInitializer)
|
|
6647
6664
|
async fetchFromApi(modelId) {
|
|
6648
|
-
const apiUrl = process.env.API_URL
|
|
6665
|
+
const apiUrl = process.env.API_URL;
|
|
6649
6666
|
const token = process.env.INTERNAL_API_TOKEN;
|
|
6650
6667
|
if (!token) {
|
|
6651
6668
|
throw new Error("INTERNAL_API_TOKEN required for API mode");
|
|
@@ -6689,40 +6706,6 @@ var ModelInitializer = class _ModelInitializer {
|
|
|
6689
6706
|
return result;
|
|
6690
6707
|
}
|
|
6691
6708
|
};
|
|
6692
|
-
function prepareModelWithTools(model, tools, baseConfig = {}) {
|
|
6693
|
-
if (tools.length === 0) {
|
|
6694
|
-
return {
|
|
6695
|
-
modelWithTools: model,
|
|
6696
|
-
finalConfig: baseConfig,
|
|
6697
|
-
toolsMethod: "none"
|
|
6698
|
-
};
|
|
6699
|
-
}
|
|
6700
|
-
if (model.bindTools && typeof model.bindTools === "function") {
|
|
6701
|
-
try {
|
|
6702
|
-
const modelWithTools = model.bindTools(tools);
|
|
6703
|
-
return {
|
|
6704
|
-
modelWithTools,
|
|
6705
|
-
finalConfig: baseConfig,
|
|
6706
|
-
toolsMethod: "bindTools"
|
|
6707
|
-
};
|
|
6708
|
-
} catch (error) {
|
|
6709
|
-
const invokeConfig2 = { tools };
|
|
6710
|
-
const finalConfig2 = { ...baseConfig, ...invokeConfig2 };
|
|
6711
|
-
return {
|
|
6712
|
-
modelWithTools: model,
|
|
6713
|
-
finalConfig: finalConfig2,
|
|
6714
|
-
toolsMethod: "manual"
|
|
6715
|
-
};
|
|
6716
|
-
}
|
|
6717
|
-
}
|
|
6718
|
-
const invokeConfig = { tools };
|
|
6719
|
-
const finalConfig = { ...baseConfig, ...invokeConfig };
|
|
6720
|
-
return {
|
|
6721
|
-
modelWithTools: model,
|
|
6722
|
-
finalConfig,
|
|
6723
|
-
toolsMethod: "manual"
|
|
6724
|
-
};
|
|
6725
|
-
}
|
|
6726
6709
|
|
|
6727
6710
|
// src/retriever/enums.ts
|
|
6728
6711
|
var RetrieverSearchType = /* @__PURE__ */ ((RetrieverSearchType2) => {
|
|
@@ -7033,6 +7016,6 @@ StaticDiscovery = __decorateClass([
|
|
|
7033
7016
|
Injectable()
|
|
7034
7017
|
], StaticDiscovery);
|
|
7035
7018
|
|
|
7036
|
-
export { AbstractGraphBuilder, AttachmentType, GraphController as BaseGraphServiceController, UniversalGraphModule as BaseGraphServiceModule, BuilderRegistryService, Callback, CallbackACL, CallbackAuditAction, CallbackAuditor, CallbackController, CallbackMetrics, CallbackPatchService, CallbackRateLimiter, CallbackRegistry, CallbackStore, CallbackTokenGuard, ChatFeature, DEFAULT_TRACER_OPTIONS, ENDPOINT_METADATA_KEY, Endpoint, EndpointRegistry, EventProcessor, FileBasedDiscovery, GraphController, GraphEngineFactory, GraphEngineType, GraphManifestSchema, GraphManifestValidator, GraphServiceTokens, GraphTypeUtils, IdempotencyManager, IdempotencyStatus, LangGraphEngine, McpConverter, McpRuntimeHttpClient, McpToolFilter, ModelInitializer, ModelProvider, ModelType, RetrieverSearchType, RetrieverService, SmartCallbackRouter, StaticDiscovery, StreamChannel, TelegramPatchHandler, UIDispatchController, UIEndpoint, UIEndpointsDiscoveryService, UniversalCallbackService, UniversalGraphModule, UniversalGraphService, VersionedGraphService, VoyageAIRerank, WebPatchHandler, WithCallbacks, WithEndpoints, WithUIEndpoints, bootstrap, createEndpointDescriptors, findCallbackMethod, findEndpointMethod, getCallbackMetadata, getEndpointMetadata, getUIEndpointClassMetadata, getUIEndpointMethodsMetadata, hasCallbacks, hasUIEndpoints,
|
|
7019
|
+
export { AbstractGraphBuilder, AttachmentType, GraphController as BaseGraphServiceController, UniversalGraphModule as BaseGraphServiceModule, BuilderRegistryService, Callback, CallbackACL, CallbackAuditAction, CallbackAuditor, CallbackController, CallbackMetrics, CallbackPatchService, CallbackRateLimiter, CallbackRegistry, CallbackStore, CallbackTokenGuard, ChatFeature, DEFAULT_TRACER_OPTIONS, ENDPOINT_METADATA_KEY, Endpoint, EndpointRegistry, EventProcessor, FileBasedDiscovery, GraphController, GraphEngineFactory, GraphEngineType, GraphManifestSchema, GraphManifestValidator, GraphServiceTokens, GraphTypeUtils, IdempotencyManager, IdempotencyStatus, LangGraphEngine, McpConverter, McpRuntimeHttpClient, McpToolFilter, ModelInitializer, ModelProvider, ModelType, RetrieverSearchType, RetrieverService, SmartCallbackRouter, StaticDiscovery, StreamChannel, TelegramPatchHandler, UIDispatchController, UIEndpoint, UIEndpointsDiscoveryService, UniversalCallbackService, UniversalGraphModule, UniversalGraphService, VersionedGraphService, VoyageAIRerank, WebPatchHandler, WithCallbacks, WithEndpoints, WithUIEndpoints, bootstrap, createEndpointDescriptors, findCallbackMethod, findEndpointMethod, getCallbackMetadata, getEndpointMetadata, getUIEndpointClassMetadata, getUIEndpointMethodsMetadata, hasCallbacks, hasUIEndpoints, registerFinanceExampleCallback, registerUIEndpointsFromClass, sanitizeTraceData, traceApiCall };
|
|
7037
7020
|
//# sourceMappingURL=index.js.map
|
|
7038
7021
|
//# sourceMappingURL=index.js.map
|