@solongate/proxy 0.25.0 → 0.25.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +19 -9
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -134,8 +134,8 @@ function parseArgs(argv) {
134
134
  let policyId;
135
135
  let advancedDetection = true;
136
136
  let aiJudgeEnabled = false;
137
- let aiJudgeModel = "llama3.2";
138
- let aiJudgeEndpoint = "http://localhost:11434";
137
+ let aiJudgeModel = "llama-3.1-8b-instant";
138
+ let aiJudgeEndpoint = "https://api.groq.com/openai";
139
139
  let aiJudgeApiKey;
140
140
  let aiJudgeTimeout = 5e3;
141
141
  let separatorIndex = args.indexOf("--");
@@ -203,6 +203,17 @@ function parseArgs(argv) {
203
203
  break;
204
204
  }
205
205
  }
206
+ if (!aiJudgeApiKey) {
207
+ const dotenvPath = resolve(".env");
208
+ if (existsSync(dotenvPath)) {
209
+ const dotenvContent = readFileSync(dotenvPath, "utf-8");
210
+ const groqMatch = dotenvContent.match(/^GROQ_API_KEY=(.+)/m);
211
+ if (groqMatch) aiJudgeApiKey = groqMatch[1].trim();
212
+ }
213
+ if (!aiJudgeApiKey) {
214
+ aiJudgeApiKey = process.env.GROQ_API_KEY;
215
+ }
216
+ }
206
217
  const aiJudge = aiJudgeEnabled ? {
207
218
  enabled: true,
208
219
  model: aiJudgeModel,
@@ -307,7 +318,8 @@ function parseArgs(argv) {
307
318
  port,
308
319
  policyPath: resolvedPolicyPath ?? void 0,
309
320
  policyId,
310
- advancedDetection: advancedDetection ? { enabled: true } : void 0
321
+ advancedDetection: advancedDetection ? { enabled: true } : void 0,
322
+ aiJudge
311
323
  };
312
324
  }
313
325
  function resolvePolicyPath(source) {
@@ -6176,7 +6188,7 @@ var AiJudge = class {
6176
6188
  }
6177
6189
  }
6178
6190
  /**
6179
- * Call the LLM endpoint. Supports Ollama and OpenAI-compatible APIs.
6191
+ * Call the LLM endpoint. Supports Groq, OpenAI, and Ollama.
6180
6192
  */
6181
6193
  async callLLM(userMessage) {
6182
6194
  const controller = new AbortController();
@@ -6194,10 +6206,7 @@ var AiJudge = class {
6194
6206
  { role: "user", content: userMessage }
6195
6207
  ],
6196
6208
  stream: false,
6197
- options: {
6198
- temperature: 0,
6199
- num_predict: 200
6200
- }
6209
+ options: { temperature: 0, num_predict: 200 }
6201
6210
  });
6202
6211
  } else {
6203
6212
  url = `${this.config.endpoint}/v1/chat/completions`;
@@ -6221,7 +6230,8 @@ var AiJudge = class {
6221
6230
  signal: controller.signal
6222
6231
  });
6223
6232
  if (!res.ok) {
6224
- throw new Error(`LLM endpoint returned ${res.status}: ${res.statusText}`);
6233
+ const errBody = await res.text().catch(() => "");
6234
+ throw new Error(`LLM endpoint returned ${res.status}: ${errBody.slice(0, 200)}`);
6225
6235
  }
6226
6236
  const data = await res.json();
6227
6237
  if (this.isOllamaEndpoint) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@solongate/proxy",
3
- "version": "0.25.0",
3
+ "version": "0.25.2",
4
4
  "description": "MCP security proxy — protect any MCP server with customizable policies, path/command constraints, rate limiting, and audit logging. Zero code changes required.",
5
5
  "type": "module",
6
6
  "bin": {