@solongate/proxy 0.25.0 → 0.25.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +11 -9
  2. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -134,8 +134,8 @@ function parseArgs(argv) {
134
134
  let policyId;
135
135
  let advancedDetection = true;
136
136
  let aiJudgeEnabled = false;
137
- let aiJudgeModel = "llama3.2";
138
- let aiJudgeEndpoint = "http://localhost:11434";
137
+ let aiJudgeModel = "llama-3.1-8b-instant";
138
+ let aiJudgeEndpoint = "https://api.groq.com/openai";
139
139
  let aiJudgeApiKey;
140
140
  let aiJudgeTimeout = 5e3;
141
141
  let separatorIndex = args.indexOf("--");
@@ -203,6 +203,9 @@ function parseArgs(argv) {
203
203
  break;
204
204
  }
205
205
  }
206
+ if (!aiJudgeApiKey) {
207
+ aiJudgeApiKey = process.env.GROQ_API_KEY;
208
+ }
206
209
  const aiJudge = aiJudgeEnabled ? {
207
210
  enabled: true,
208
211
  model: aiJudgeModel,
@@ -307,7 +310,8 @@ function parseArgs(argv) {
307
310
  port,
308
311
  policyPath: resolvedPolicyPath ?? void 0,
309
312
  policyId,
310
- advancedDetection: advancedDetection ? { enabled: true } : void 0
313
+ advancedDetection: advancedDetection ? { enabled: true } : void 0,
314
+ aiJudge
311
315
  };
312
316
  }
313
317
  function resolvePolicyPath(source) {
@@ -6176,7 +6180,7 @@ var AiJudge = class {
6176
6180
  }
6177
6181
  }
6178
6182
  /**
6179
- * Call the LLM endpoint. Supports Ollama and OpenAI-compatible APIs.
6183
+ * Call the LLM endpoint. Supports Groq, OpenAI, and Ollama.
6180
6184
  */
6181
6185
  async callLLM(userMessage) {
6182
6186
  const controller = new AbortController();
@@ -6194,10 +6198,7 @@ var AiJudge = class {
6194
6198
  { role: "user", content: userMessage }
6195
6199
  ],
6196
6200
  stream: false,
6197
- options: {
6198
- temperature: 0,
6199
- num_predict: 200
6200
- }
6201
+ options: { temperature: 0, num_predict: 200 }
6201
6202
  });
6202
6203
  } else {
6203
6204
  url = `${this.config.endpoint}/v1/chat/completions`;
@@ -6221,7 +6222,8 @@ var AiJudge = class {
6221
6222
  signal: controller.signal
6222
6223
  });
6223
6224
  if (!res.ok) {
6224
- throw new Error(`LLM endpoint returned ${res.status}: ${res.statusText}`);
6225
+ const errBody = await res.text().catch(() => "");
6226
+ throw new Error(`LLM endpoint returned ${res.status}: ${errBody.slice(0, 200)}`);
6225
6227
  }
6226
6228
  const data = await res.json();
6227
6229
  if (this.isOllamaEndpoint) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@solongate/proxy",
3
- "version": "0.25.0",
3
+ "version": "0.25.1",
4
4
  "description": "MCP security proxy — protect any MCP server with customizable policies, path/command constraints, rate limiting, and audit logging. Zero code changes required.",
5
5
  "type": "module",
6
6
  "bin": {