@contentgrowth/llm-service 0.6.99 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -111,6 +111,16 @@ export class GeminiProvider extends BaseLLMProvider {
|
|
|
111
111
|
response: { content: msg.content },
|
|
112
112
|
}
|
|
113
113
|
}];
|
|
114
|
+
|
|
115
|
+
// Fix for JSON mode: If JSON is requested, remind the model to output JSON after tool execution
|
|
116
|
+
// This is necessary because strict JSON mode is disabled when tools are present.
|
|
117
|
+
if (options.responseFormat === 'json' || options.responseFormat?.type === 'json_schema' || options.responseSchema) {
|
|
118
|
+
parts.push({ text: "Please ensure the final response is valid JSON as per the system instructions." });
|
|
119
|
+
} else {
|
|
120
|
+
// Generic reminder to help model stay on track with system prompt instructions (e.g. formatting)
|
|
121
|
+
// even if no specific JSON mode is configured.
|
|
122
|
+
parts.push({ text: "Please ensure the final response follows the system prompt instructions." });
|
|
123
|
+
}
|
|
114
124
|
break;
|
|
115
125
|
default:
|
|
116
126
|
return null;
|