@superatomai/sdk-node 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1866,6 +1866,7 @@ import fs3 from "fs";
1866
1866
  import path3 from "path";
1867
1867
  var PromptLoader = class {
1868
1868
  constructor(config) {
1869
+ logger.debug("Initializing PromptLoader...", process.cwd());
1869
1870
  this.promptsDir = config?.promptsDir || path3.join(process.cwd(), ".prompts");
1870
1871
  }
1871
1872
  /**
@@ -1882,6 +1883,7 @@ var PromptLoader = class {
1882
1883
  promptName,
1883
1884
  `${promptType}.md`
1884
1885
  );
1886
+ logger.debug(`Loading prompt '${promptName}/${promptType}.md' from ${promptPath} process path: ${process.cwd()}`);
1885
1887
  let content = fs3.readFileSync(promptPath, "utf-8");
1886
1888
  for (const [key, value] of Object.entries(variables)) {
1887
1889
  const pattern = new RegExp(`{{${key}}}`, "g");
@@ -1960,7 +1962,7 @@ var LLM = class {
1960
1962
  *
1961
1963
  * @example
1962
1964
  * "anthropic/claude-sonnet-4-5" → ["anthropic", "claude-sonnet-4-5"]
1963
- * "groq/gpt-oss-120b" → ["groq", "gpt-oss-120b"]
1965
+ * "groq/openai/gpt-oss-120b" → ["groq", "openai/gpt-oss-120b"]
1964
1966
  * "claude-sonnet-4-5" → ["anthropic", "claude-sonnet-4-5"] (default)
1965
1967
  */
1966
1968
  static _parseModel(modelString) {
@@ -1968,8 +1970,10 @@ var LLM = class {
1968
1970
  return ["anthropic", "claude-sonnet-4-5"];
1969
1971
  }
1970
1972
  if (modelString.includes("/")) {
1971
- const [provider, model] = modelString.split("/");
1972
- return [provider.toLowerCase().trim(), model.trim()];
1973
+ const firstSlashIndex = modelString.indexOf("/");
1974
+ const provider = modelString.substring(0, firstSlashIndex).toLowerCase().trim();
1975
+ const model = modelString.substring(firstSlashIndex + 1).trim();
1976
+ return [provider, model];
1973
1977
  }
1974
1978
  return ["anthropic", modelString];
1975
1979
  }
@@ -1977,8 +1981,11 @@ var LLM = class {
1977
1981
  // ANTHROPIC IMPLEMENTATION
1978
1982
  // ============================================================
1979
1983
  static async _anthropicText(messages, modelName, options) {
1984
+ const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY || "";
1985
+ console.log("[LLM DEBUG] Anthropic Text - apiKey from options:", options.apiKey ? `${options.apiKey.substring(0, 10)}...` : "NOT SET");
1986
+ console.log("[LLM DEBUG] Anthropic Text - final apiKey:", apiKey ? `${apiKey.substring(0, 10)}...` : "EMPTY STRING");
1980
1987
  const client = new Anthropic({
1981
- apiKey: options.apiKey || process.env.ANTHROPIC_API_KEY || ""
1988
+ apiKey
1982
1989
  });
1983
1990
  const response = await client.messages.create({
1984
1991
  model: modelName,
@@ -1994,8 +2001,12 @@ var LLM = class {
1994
2001
  return textBlock?.type === "text" ? textBlock.text : "";
1995
2002
  }
1996
2003
  static async _anthropicStream(messages, modelName, options, json) {
2004
+ const apiKey = options.apiKey || process.env.ANTHROPIC_API_KEY || "";
2005
+ console.log("[LLM DEBUG] Anthropic - apiKey from options:", options.apiKey ? `${options.apiKey.substring(0, 10)}...` : "NOT SET");
2006
+ console.log("[LLM DEBUG] Anthropic - apiKey from env:", process.env.ANTHROPIC_API_KEY ? `${process.env.ANTHROPIC_API_KEY.substring(0, 10)}...` : "NOT SET");
2007
+ console.log("[LLM DEBUG] Anthropic - final apiKey:", apiKey ? `${apiKey.substring(0, 10)}...` : "EMPTY STRING");
1997
2008
  const client = new Anthropic({
1998
- apiKey: options.apiKey || process.env.ANTHROPIC_API_KEY || ""
2009
+ apiKey
1999
2010
  });
2000
2011
  const stream = await client.messages.create({
2001
2012
  model: modelName,
@@ -2042,8 +2053,12 @@ var LLM = class {
2042
2053
  return response.choices[0]?.message?.content || "";
2043
2054
  }
2044
2055
  static async _groqStream(messages, modelName, options, json) {
2056
+ const apiKey = options.apiKey || process.env.GROQ_API_KEY || "";
2057
+ console.log("[LLM DEBUG] Groq - apiKey from options:", options.apiKey ? `${options.apiKey.substring(0, 10)}...` : "NOT SET");
2058
+ console.log("[LLM DEBUG] Groq - model:", modelName);
2059
+ console.log("[LLM DEBUG] Groq - final apiKey:", apiKey ? `${apiKey.substring(0, 10)}...` : "EMPTY STRING");
2045
2060
  const client = new Groq({
2046
- apiKey: options.apiKey || process.env.GROQ_API_KEY || ""
2061
+ apiKey
2047
2062
  });
2048
2063
  const stream = await client.chat.completions.create({
2049
2064
  model: modelName,
@@ -2114,6 +2129,7 @@ var BaseLLM = class {
2114
2129
  */
2115
2130
  async classifyUserQuestion(userPrompt, apiKey, logCollector, conversationHistory) {
2116
2131
  const schemaDoc = schema.generateSchemaDocumentation();
2132
+ logger.info("Generating prompts...", userPrompt, conversationHistory);
2117
2133
  try {
2118
2134
  const prompts = await promptLoader.loadPrompts("classify", {
2119
2135
  SCHEMA_DOC: schemaDoc || "No schema available",
@@ -2770,8 +2786,15 @@ var useAnthropicMethod = async (prompt, components, apiKey, logCollector, conver
2770
2786
  logCollector?.error(emptyMsg);
2771
2787
  return { success: false, reason: emptyMsg };
2772
2788
  }
2773
- const matchResult = await anthropicLLM.handleUserRequest(prompt, components, apiKey, logCollector, conversationHistory);
2774
- return { success: true, data: matchResult };
2789
+ try {
2790
+ const matchResult = await anthropicLLM.handleUserRequest(prompt, components, apiKey, logCollector, conversationHistory);
2791
+ logger.debug(`Anthropic method success: ${matchResult}`);
2792
+ return { success: true, data: matchResult };
2793
+ } catch (error) {
2794
+ const errorMsg = error instanceof Error ? error.message : String(error);
2795
+ logCollector?.error(`Anthropic method failed: ${errorMsg}`);
2796
+ throw error;
2797
+ }
2775
2798
  };
2776
2799
  var useGroqMethod = async (prompt, components, apiKey, logCollector, conversationHistory) => {
2777
2800
  const msg = "Using Groq LLM matching method...";
@@ -2782,8 +2805,14 @@ var useGroqMethod = async (prompt, components, apiKey, logCollector, conversatio
2782
2805
  logCollector?.error(emptyMsg);
2783
2806
  return { success: false, reason: emptyMsg };
2784
2807
  }
2785
- const matchResult = await groqLLM.handleUserRequest(prompt, components, apiKey, logCollector, conversationHistory);
2786
- return { success: true, data: matchResult };
2808
+ try {
2809
+ const matchResult = await groqLLM.handleUserRequest(prompt, components, apiKey, logCollector, conversationHistory);
2810
+ return { success: true, data: matchResult };
2811
+ } catch (error) {
2812
+ const errorMsg = error instanceof Error ? error.message : String(error);
2813
+ logCollector?.error(`Groq method failed: ${errorMsg}`);
2814
+ throw error;
2815
+ }
2787
2816
  };
2788
2817
  var getUserResponseFromCache = async (prompt) => {
2789
2818
  return false;
@@ -2813,6 +2842,7 @@ var get_user_response = async (prompt, components, anthropicApiKey, groqApiKey,
2813
2842
  let result;
2814
2843
  if (provider === "anthropic") {
2815
2844
  result = await useAnthropicMethod(prompt, components, anthropicApiKey, logCollector, conversationHistory);
2845
+ logger.debug("Anthropic result:", result);
2816
2846
  } else if (provider === "groq") {
2817
2847
  result = await useGroqMethod(prompt, components, groqApiKey, logCollector, conversationHistory);
2818
2848
  } else {
@@ -3004,6 +3034,7 @@ var CONTEXT_CONFIG = {
3004
3034
  };
3005
3035
 
3006
3036
  // src/handlers/user-prompt-request.ts
3037
+ var processedMessageIds = /* @__PURE__ */ new Set();
3007
3038
  async function handleUserPromptRequest(data, components, sendMessage, anthropicApiKey, groqApiKey, llmProviders) {
3008
3039
  try {
3009
3040
  const userPromptRequest = UserPromptRequestMessageSchema.parse(data);
@@ -3011,6 +3042,19 @@ async function handleUserPromptRequest(data, components, sendMessage, anthropicA
3011
3042
  const prompt = payload.prompt;
3012
3043
  const SA_RUNTIME = payload.SA_RUNTIME;
3013
3044
  const wsId = userPromptRequest.from.id || "unknown";
3045
+ logger.info(`[REQUEST ${id}] Processing user prompt: "${prompt.substring(0, 50)}..."`);
3046
+ logger.info(`[REQUEST ${id}] Providers: ${llmProviders?.join(", ")}, Anthropic key: ${anthropicApiKey ? "SET" : "NOT SET"}, Groq key: ${groqApiKey ? "SET" : "NOT SET"}`);
3047
+ if (processedMessageIds.has(id)) {
3048
+ logger.warn(`[REQUEST ${id}] Duplicate request detected - ignoring`);
3049
+ return;
3050
+ }
3051
+ processedMessageIds.add(id);
3052
+ if (processedMessageIds.size > 100) {
3053
+ const firstId = processedMessageIds.values().next().value;
3054
+ if (firstId) {
3055
+ processedMessageIds.delete(firstId);
3056
+ }
3057
+ }
3014
3058
  if (!SA_RUNTIME) {
3015
3059
  sendDataResponse4(id, {
3016
3060
  success: false,
@@ -3060,6 +3104,7 @@ async function handleUserPromptRequest(data, components, sendMessage, anthropicA
3060
3104
  const conversationHistory = thread.getConversationContext(CONTEXT_CONFIG.MAX_CONVERSATION_CONTEXT_BLOCKS, existingUiBlockId);
3061
3105
  const userResponse = await get_user_response(prompt, components, anthropicApiKey, groqApiKey, llmProviders, logCollector, conversationHistory);
3062
3106
  logCollector.info("User prompt request completed");
3107
+ logger.info(`[REQUEST ${id}] Response success: ${userResponse.success}, reason: ${userResponse.success ? "N/A" : userResponse}`);
3063
3108
  if (userResponse.success && userResponse.data && typeof userResponse.data === "object" && "component" in userResponse.data) {
3064
3109
  const component = userResponse.data.component;
3065
3110
  const uiBlockId = existingUiBlockId;
@@ -3101,6 +3146,10 @@ function sendDataResponse4(id, res, sendMessage, clientId) {
3101
3146
  ...res
3102
3147
  }
3103
3148
  };
3149
+ logger.info(`[REQUEST ${id}] Sending USER_PROMPT_RES with success=${res.success}`);
3150
+ if (!res.success && res.reason) {
3151
+ logger.info(`[REQUEST ${id}] Error reason: ${res.reason}`);
3152
+ }
3104
3153
  sendMessage(response);
3105
3154
  }
3106
3155
 
@@ -4210,6 +4259,9 @@ var UserManager = class {
4210
4259
  if (!user) {
4211
4260
  return false;
4212
4261
  }
4262
+ if (!user.wsIds || !Array.isArray(user.wsIds)) {
4263
+ user.wsIds = [];
4264
+ }
4213
4265
  if (!user.wsIds.includes(wsId)) {
4214
4266
  user.wsIds.push(wsId);
4215
4267
  this.hasChanged = true;
@@ -4228,6 +4280,9 @@ var UserManager = class {
4228
4280
  if (!user) {
4229
4281
  return false;
4230
4282
  }
4283
+ if (!user.wsIds || !Array.isArray(user.wsIds)) {
4284
+ return false;
4285
+ }
4231
4286
  const initialLength = user.wsIds.length;
4232
4287
  user.wsIds = user.wsIds.filter((id) => id !== wsId);
4233
4288
  if (user.wsIds.length < initialLength) {