@rigstate/mcp 0.5.2 → 0.5.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -974,19 +974,30 @@ architecture rules, decisions, and constraints.`,
974
974
  }
975
975
  });
976
976
  async function generateQueryEmbedding(query) {
977
- const apiKey = process.env.GOOGLE_GENERATIVE_AI_API_KEY;
978
- if (!apiKey) {
979
- console.warn("GOOGLE_GENERATIVE_AI_API_KEY not found, skipping vector search.");
977
+ const openRouterKey = process.env.OPENROUTER_API_KEY;
978
+ const googleKey = process.env.GOOGLE_GENERATIVE_AI_API_KEY;
979
+ if (!openRouterKey && !googleKey) {
980
+ console.warn("Neither OPENROUTER_API_KEY nor GOOGLE_GENERATIVE_AI_API_KEY found, skipping vector search.");
980
981
  return null;
981
982
  }
982
983
  try {
983
- const { google } = await import("@ai-sdk/google");
984
984
  const { embed } = await import("ai");
985
- const { embedding } = await embed({
986
- model: google.embedding("text-embedding-004"),
987
- value: query.replace(/\n/g, " ")
988
- });
989
- return embedding;
985
+ if (openRouterKey) {
986
+ const { createOpenRouter } = await import("@openrouter/ai-sdk-provider");
987
+ const openrouter = createOpenRouter({ apiKey: openRouterKey });
988
+ const { embedding } = await embed({
989
+ model: openrouter.embedding("google/text-embedding-004"),
990
+ value: query.replace(/\n/g, " ")
991
+ });
992
+ return embedding;
993
+ } else {
994
+ const { google } = await import("@ai-sdk/google");
995
+ const { embedding } = await embed({
996
+ model: google.embedding("text-embedding-004"),
997
+ value: query.replace(/\n/g, " ")
998
+ });
999
+ return embedding;
1000
+ }
990
1001
  } catch (error) {
991
1002
  console.error("Failed to generate embedding for search:", error);
992
1003
  return null;
@@ -999,8 +1010,8 @@ async function queryBrain(supabase, userId, projectId, query, limit = 8, thresho
999
1010
  }
1000
1011
  const embedding = await generateQueryEmbedding(query);
1001
1012
  let memories = [];
1002
- console.log(`Searching brain for "${query}" (limit: ${limit}, threshold: ${threshold})`);
1003
- console.log(`Embedding present: ${!!embedding}`);
1013
+ console.error(`Searching brain for "${query}" (limit: ${limit}, threshold: ${threshold})`);
1014
+ console.error(`Embedding present: ${!!embedding}`);
1004
1015
  const { data: searchResults, error: searchError } = await supabase.rpc("hybrid_search_memories", {
1005
1016
  p_project_id: projectId,
1006
1017
  p_query: query,
@@ -1022,7 +1033,7 @@ async function queryBrain(supabase, userId, projectId, query, limit = 8, thresho
1022
1033
  }));
1023
1034
  }
1024
1035
  } else if (searchResults) {
1025
- console.log(`Found ${searchResults.length} results from RPC`);
1036
+ console.error(`Found ${searchResults.length} results from RPC`);
1026
1037
  memories = searchResults.map((m) => ({
1027
1038
  id: m.id,
1028
1039
  content: m.content,