@contentgrowth/llm-service 0.6.97 → 0.6.99

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@contentgrowth/llm-service",
3
- "version": "0.6.97",
3
+ "version": "0.6.99",
4
4
  "description": "Unified LLM Service for Content Growth",
5
5
  "main": "src/index.js",
6
6
  "type": "module",
@@ -102,7 +102,7 @@ export class GeminiProvider extends BaseLLMProvider {
102
102
  }
103
103
  break;
104
104
  case 'tool':
105
- role = 'function';
105
+ role = 'user';
106
106
  const preceding_message = messages[index - 1];
107
107
  const tool_call = preceding_message?.tool_calls?.find(tc => tc.id === msg.tool_call_id);
108
108
  parts = [{
@@ -139,7 +139,15 @@ export class GeminiProvider extends BaseLLMProvider {
139
139
  }
140
140
 
141
141
  if (tools && tools.length > 0) {
142
- requestOptions.tools = [{ functionDeclarations: tools.map(t => t.function) }];
142
+ requestOptions.config.tools = [{ functionDeclarations: tools.map(t => t.function) }];
143
+ // CRITICAL: Cannot enforce JSON mode (responseMimeType/responseSchema) when tools are present
144
+ // because the model needs to be able to return tool calls (which are not JSON text).
145
+ // We must rely on the system prompt for JSON formatting in this case.
146
+ if (requestOptions.config.responseMimeType === 'application/json') {
147
+ console.warn('[GeminiProvider] Disabling strict JSON mode because tools are present. Relying on system prompt.');
148
+ delete requestOptions.config.responseMimeType;
149
+ delete requestOptions.config.responseSchema;
150
+ }
143
151
  }
144
152
 
145
153
  console.log('[GeminiProvider] generateContent request:', JSON.stringify(requestOptions, null, 2));