@madh-io/alfred-ai 0.3.7 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/bundle/index.js +54 -3
  2. package/package.json +1 -1
package/bundle/index.js CHANGED
@@ -3898,7 +3898,7 @@ var init_message_pipeline = __esm({
3898
3898
  this.memoryRepo = memoryRepo;
3899
3899
  this.promptBuilder = new PromptBuilder();
3900
3900
  }
3901
- async process(message) {
3901
+ async process(message, onProgress) {
3902
3902
  const startTime = Date.now();
3903
3903
  this.logger.info({ platform: message.platform, userId: message.userId, chatId: message.chatId }, "Processing message");
3904
3904
  try {
@@ -3921,6 +3921,7 @@ var init_message_pipeline = __esm({
3921
3921
  const messages = this.trimToContextWindow(system, allMessages);
3922
3922
  let response;
3923
3923
  let iteration = 0;
3924
+ onProgress?.("Thinking...");
3924
3925
  while (true) {
3925
3926
  response = await this.llm.complete({
3926
3927
  messages,
@@ -3950,6 +3951,8 @@ var init_message_pipeline = __esm({
3950
3951
  messages.push({ role: "assistant", content: assistantContent });
3951
3952
  const toolResultBlocks = [];
3952
3953
  for (const toolCall of response.toolCalls) {
3954
+ const toolLabel = this.getToolLabel(toolCall.name, toolCall.input);
3955
+ onProgress?.(toolLabel);
3953
3956
  const result = await this.executeToolCall(toolCall, {
3954
3957
  userId: message.userId,
3955
3958
  chatId: message.chatId,
@@ -3972,6 +3975,9 @@ var init_message_pipeline = __esm({
3972
3975
  this.conversationManager.addMessage(conversation.id, "assistant", `${response.content ? response.content + "\n" : ""}${toolCallSummary}`, JSON.stringify(response.toolCalls));
3973
3976
  this.conversationManager.addMessage(conversation.id, "user", toolResultSummary);
3974
3977
  messages.push({ role: "user", content: toolResultBlocks });
3978
+ if (iteration < MAX_TOOL_ITERATIONS) {
3979
+ onProgress?.("Thinking...");
3980
+ }
3975
3981
  }
3976
3982
  const responseText = response.content || "(no response)";
3977
3983
  this.conversationManager.addMessage(conversation.id, "assistant", responseText);
@@ -4024,6 +4030,28 @@ var init_message_pipeline = __esm({
4024
4030
  return { content: `Skill execution failed: ${msg}`, isError: true };
4025
4031
  }
4026
4032
  }
4033
+ getToolLabel(toolName, input2) {
4034
+ switch (toolName) {
4035
+ case "shell":
4036
+ return `Running: ${String(input2.command ?? "").slice(0, 60)}`;
4037
+ case "web_search":
4038
+ return `Searching: ${String(input2.query ?? "")}`;
4039
+ case "email":
4040
+ return `Email: ${String(input2.action ?? "")}`;
4041
+ case "memory":
4042
+ return `Memory: ${String(input2.action ?? "")}`;
4043
+ case "reminder":
4044
+ return `Reminder: ${String(input2.action ?? "")}`;
4045
+ case "calculator":
4046
+ return `Calculating...`;
4047
+ case "system_info":
4048
+ return `Getting system info...`;
4049
+ case "delegate":
4050
+ return `Delegating sub-task...`;
4051
+ default:
4052
+ return `Using ${toolName}...`;
4053
+ }
4054
+ }
4027
4055
  /**
4028
4056
  * Trim messages to fit within the LLM's context window.
4029
4057
  * Keeps the system prompt, the latest user message, and as many
@@ -4748,8 +4776,31 @@ var init_alfred = __esm({
4748
4776
  setupAdapterHandlers(platform, adapter) {
4749
4777
  adapter.on("message", async (message) => {
4750
4778
  try {
4751
- const response = await this.pipeline.process(message);
4752
- await adapter.sendMessage(message.chatId, response);
4779
+ let statusMessageId;
4780
+ let lastStatus = "";
4781
+ const onProgress = async (status) => {
4782
+ if (status === lastStatus)
4783
+ return;
4784
+ lastStatus = status;
4785
+ try {
4786
+ if (!statusMessageId) {
4787
+ statusMessageId = await adapter.sendMessage(message.chatId, status);
4788
+ } else {
4789
+ await adapter.editMessage(message.chatId, statusMessageId, status);
4790
+ }
4791
+ } catch {
4792
+ }
4793
+ };
4794
+ const response = await this.pipeline.process(message, onProgress);
4795
+ if (statusMessageId) {
4796
+ try {
4797
+ await adapter.editMessage(message.chatId, statusMessageId, response);
4798
+ } catch {
4799
+ await adapter.sendMessage(message.chatId, response);
4800
+ }
4801
+ } else {
4802
+ await adapter.sendMessage(message.chatId, response);
4803
+ }
4753
4804
  } catch (error) {
4754
4805
  this.logger.error({ platform, err: error, chatId: message.chatId }, "Failed to handle message");
4755
4806
  try {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@madh-io/alfred-ai",
3
- "version": "0.3.7",
3
+ "version": "0.4.0",
4
4
  "description": "Alfred — Personal AI Assistant across Telegram, Discord, WhatsApp, Matrix & Signal",
5
5
  "type": "module",
6
6
  "bin": {