chattercatcher 0.1.27 → 0.1.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -8,7 +8,7 @@ import fs15 from "fs/promises";
8
8
  // package.json
9
9
  var package_default = {
10
10
  name: "chattercatcher",
11
- version: "0.1.27",
11
+ version: "0.1.28",
12
12
  description: "\u672C\u5730\u4F18\u5148\u7684\u98DE\u4E66/Lark \u5BB6\u5EAD\u7FA4\u77E5\u8BC6\u5E93\u673A\u5668\u4EBA",
13
13
  type: "module",
14
14
  main: "dist/index.js",
@@ -971,6 +971,7 @@ function getGatewayStatus(config, secrets) {
971
971
  }
972
972
 
973
973
  // src/llm/openai-compatible.ts
974
+ var OPENAI_EMBEDDING_BATCH_SIZE = 64;
974
975
  function normalizeBaseUrl(baseUrl) {
975
976
  return baseUrl.replace(/\/+$/, "");
976
977
  }
@@ -1002,12 +1003,66 @@ function toOpenAITool(tool) {
1002
1003
  }
1003
1004
  };
1004
1005
  }
1006
+ function parseToolCallArguments(value) {
1007
+ try {
1008
+ return JSON.parse(value);
1009
+ } catch {
1010
+ return {};
1011
+ }
1012
+ }
1013
+ function decodeDsmlValue(value, isString) {
1014
+ const trimmed = value.trim();
1015
+ if (isString) {
1016
+ return trimmed;
1017
+ }
1018
+ if (trimmed === "true") return true;
1019
+ if (trimmed === "false") return false;
1020
+ if (trimmed === "null") return null;
1021
+ const numberValue = Number(trimmed);
1022
+ if (trimmed && Number.isFinite(numberValue)) {
1023
+ return numberValue;
1024
+ }
1025
+ return trimmed;
1026
+ }
1027
+ function parseDsmlToolCalls(content) {
1028
+ if (!content?.includes("DSML")) {
1029
+ return [];
1030
+ }
1031
+ const toolCalls = [];
1032
+ const invokePattern = /<||DSML||invoke\s+name="([^"]+)"\s*>([\s\S]*?)<\/||DSML||invoke>/g;
1033
+ const parameterPattern = /<||DSML||parameter\s+name="([^"]+)"\s+string="(true|false)"\s*>([\s\S]*?)<\/||DSML||parameter>/g;
1034
+ for (const invoke of content.matchAll(invokePattern)) {
1035
+ const name = invoke[1];
1036
+ if (!name) {
1037
+ continue;
1038
+ }
1039
+ const input2 = {};
1040
+ const body = invoke[2] ?? "";
1041
+ for (const parameter of body.matchAll(parameterPattern)) {
1042
+ const parameterName = parameter[1];
1043
+ if (!parameterName) {
1044
+ continue;
1045
+ }
1046
+ input2[parameterName] = decodeDsmlValue(parameter[3] ?? "", parameter[2] === "true");
1047
+ }
1048
+ toolCalls.push({
1049
+ id: `dsml_${toolCalls.length + 1}`,
1050
+ name,
1051
+ input: input2
1052
+ });
1053
+ }
1054
+ return toolCalls;
1055
+ }
1005
1056
  function parseToolCalls(message) {
1006
- return message?.tool_calls?.map((toolCall) => ({
1057
+ const standardToolCalls = message?.tool_calls?.map((toolCall) => ({
1007
1058
  id: toolCall.id,
1008
1059
  name: toolCall.function.name,
1009
- input: JSON.parse(toolCall.function.arguments)
1060
+ input: parseToolCallArguments(toolCall.function.arguments)
1010
1061
  })) ?? [];
1062
+ return standardToolCalls.length > 0 ? standardToolCalls : parseDsmlToolCalls(message?.content);
1063
+ }
1064
+ function isDsmlToolCallContent(content) {
1065
+ return parseDsmlToolCalls(content).length > 0;
1011
1066
  }
1012
1067
  var OpenAICompatibleChatModel = class {
1013
1068
  constructor(options) {
@@ -1066,9 +1121,10 @@ var OpenAICompatibleChatModel = class {
1066
1121
  }
1067
1122
  const data2 = await response.json();
1068
1123
  const message = data2.choices?.[0]?.message;
1124
+ const toolCalls = parseToolCalls(message);
1069
1125
  return {
1070
- content: message?.content ?? "",
1071
- toolCalls: parseToolCalls(message),
1126
+ content: toolCalls.length > 0 && isDsmlToolCallContent(message?.content) ? "" : message?.content ?? "",
1127
+ toolCalls,
1072
1128
  reasoningContent: message?.reasoning_content ?? void 0
1073
1129
  };
1074
1130
  }
@@ -1086,6 +1142,13 @@ var OpenAICompatibleEmbeddingModel = class {
1086
1142
  if (!this.options.baseUrl || !this.options.apiKey || !this.options.model) {
1087
1143
  throw new Error("Embedding \u914D\u7F6E\u4E0D\u5B8C\u6574\u3002\u8BF7\u8FD0\u884C chattercatcher setup \u6216 chattercatcher settings\u3002");
1088
1144
  }
1145
+ const vectors = [];
1146
+ for (let index2 = 0; index2 < texts.length; index2 += OPENAI_EMBEDDING_BATCH_SIZE) {
1147
+ vectors.push(...await this.fetchEmbeddingBatch(texts.slice(index2, index2 + OPENAI_EMBEDDING_BATCH_SIZE)));
1148
+ }
1149
+ return vectors;
1150
+ }
1151
+ async fetchEmbeddingBatch(texts) {
1089
1152
  const response = await fetch(`${normalizeBaseUrl(this.options.baseUrl)}/embeddings`, {
1090
1153
  method: "POST",
1091
1154
  headers: {
@@ -3263,12 +3326,20 @@ function parseExactNumber2(field, min, max) {
3263
3326
  }
3264
3327
 
3265
3328
  // src/rag/indexer.ts
3329
+ var EMBEDDING_INDEX_BATCH_SIZE = 64;
3266
3330
  async function indexMessageChunks(input2) {
3267
3331
  const chunks = input2.messageIds ? input2.messages.listMessageChunksByMessageIds(input2.messageIds, input2.limit ?? 1e4) : input2.messages.listAllMessageChunks(input2.limit ?? 1e4);
3268
3332
  if (chunks.length === 0) {
3269
3333
  return { chunks: 0, vectors: 0 };
3270
3334
  }
3271
- const vectors = await input2.embedding.embedBatch(chunks.map((chunk) => chunk.text));
3335
+ const vectors = [];
3336
+ for (let index2 = 0; index2 < chunks.length; index2 += EMBEDDING_INDEX_BATCH_SIZE) {
3337
+ vectors.push(
3338
+ ...await input2.embedding.embedBatch(
3339
+ chunks.slice(index2, index2 + EMBEDDING_INDEX_BATCH_SIZE).map((chunk) => chunk.text)
3340
+ )
3341
+ );
3342
+ }
3272
3343
  const records = [];
3273
3344
  for (const [index2, chunk] of chunks.entries()) {
3274
3345
  const vector = vectors[index2];
@@ -3823,6 +3894,9 @@ var DEFAULT_MAX_MODEL_TURNS = 4;
3823
3894
  var DEFAULT_MAX_TOOL_CALLS = 8;
3824
3895
  var FEISHU_TOOL_LOOP_FALLBACK = "\u5B9A\u65F6\u4EFB\u52A1\u64CD\u4F5C\u5DF2\u63D0\u4EA4\uFF0C\u4F46\u6A21\u578B\u6CA1\u6709\u751F\u6210\u6700\u7EC8\u56DE\u590D\u3002";
3825
3896
  var FEISHU_TOOL_LOOP_LIMIT_REACHED = "\u5DE5\u5177\u8C03\u7528\u6B21\u6570\u5DF2\u8FBE\u5230\u4E0A\u9650\uFF0C\u8BF7\u7F29\u5C0F\u8BF7\u6C42\u540E\u91CD\u8BD5\u3002";
3897
+ function containsRawToolCallMarkup(content) {
3898
+ return /<||DSML||tool_calls>|<||DSML||invoke\s+name=|<tool_call>|<tool_calls>/i.test(content);
3899
+ }
3826
3900
  function toToolResultContent(value) {
3827
3901
  if (typeof value === "string") return value;
3828
3902
  return JSON.stringify(value);
@@ -3865,6 +3939,7 @@ async function runFeishuToolLoop(input2) {
3865
3939
  let toolCallsUsed = 0;
3866
3940
  for (let turn = 0; turn < maxModelTurns; turn += 1) {
3867
3941
  const assistantResult = await input2.model.completeWithTools(messages, input2.tools);
3942
+ const hasRawToolCallMarkup = containsRawToolCallMarkup(assistantResult.content);
3868
3943
  messages.push({
3869
3944
  role: "assistant",
3870
3945
  content: assistantResult.content,
@@ -3872,6 +3947,9 @@ async function runFeishuToolLoop(input2) {
3872
3947
  reasoningContent: assistantResult.reasoningContent
3873
3948
  });
3874
3949
  if (assistantResult.toolCalls.length === 0) {
3950
+ if (hasRawToolCallMarkup) {
3951
+ break;
3952
+ }
3875
3953
  return assistantResult.content || FEISHU_TOOL_LOOP_FALLBACK;
3876
3954
  }
3877
3955
  for (const toolCall of assistantResult.toolCalls) {
@@ -3984,7 +4062,7 @@ var FeishuQuestionHandler = class {
3984
4062
  }
3985
4063
  if (this.options.sender.addReactionToMessage) {
3986
4064
  try {
3987
- await this.options.sender.addReactionToMessage(messageId, this.options.thinkingEmojiType ?? "keyboard");
4065
+ await this.options.sender.addReactionToMessage(messageId, this.options.thinkingEmojiType ?? "OK");
3988
4066
  return;
3989
4067
  } catch (error) {
3990
4068
  console.log(`\u98DE\u4E66\u63D0\u95EE\u8868\u60C5\u53CD\u9988\u5931\u8D25\uFF0C\u6539\u7528\u6587\u5B57\u53CD\u9988\uFF1A${error instanceof Error ? error.message : String(error)}`);