@contentgrowth/llm-service 0.7.5 → 0.7.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@contentgrowth/llm-service",
|
|
3
|
-
"version": "0.7.
|
|
3
|
+
"version": "0.7.7",
|
|
4
4
|
"description": "Unified LLM Service for Content Growth",
|
|
5
5
|
"main": "src/index.js",
|
|
6
6
|
"type": "module",
|
|
@@ -15,8 +15,8 @@
|
|
|
15
15
|
"author": "Content Growth",
|
|
16
16
|
"license": "MIT",
|
|
17
17
|
"dependencies": {
|
|
18
|
-
"@google/genai": "^1.
|
|
19
|
-
"openai": "^6.
|
|
18
|
+
"@google/genai": "^1.34.0",
|
|
19
|
+
"openai": "^6.15.0"
|
|
20
20
|
},
|
|
21
21
|
"devDependencies": {
|
|
22
22
|
"dotenv": "^17.2.3"
|
package/src/index.js
CHANGED
|
@@ -4,6 +4,6 @@ export { BaseConfigProvider, DefaultConfigProvider } from './llm/config-provider
|
|
|
4
4
|
export { MODEL_CONFIGS } from './llm/config-manager.js';
|
|
5
5
|
export { OpenAIProvider } from './llm/providers/openai-provider.js';
|
|
6
6
|
export { GeminiProvider } from './llm/providers/gemini-provider.js';
|
|
7
|
-
export { extractJsonFromResponse } from './llm/json-utils.js';
|
|
7
|
+
export { extractJsonFromResponse, extractTextAndJson } from './llm/json-utils.js';
|
|
8
8
|
export { FINISH_REASONS } from './llm/providers/base-provider.js';
|
|
9
9
|
|
package/src/llm/json-utils.js
CHANGED
|
@@ -97,3 +97,51 @@ export function extractJsonFromResponse(text) {
|
|
|
97
97
|
// If no valid JSON could be extracted by any method, return null.
|
|
98
98
|
return null;
|
|
99
99
|
}
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* Generic helper to separate conversational text from a structured JSON payload.
|
|
103
|
+
* Supports both "Text + JSON" and "JSON + Text" patterns.
|
|
104
|
+
*
|
|
105
|
+
* @param {string} input - The full response string
|
|
106
|
+
* @returns {{text: string, json: object|null}}
|
|
107
|
+
*/
|
|
108
|
+
export function extractTextAndJson(input) {
|
|
109
|
+
if (!input) return { text: '', json: null };
|
|
110
|
+
|
|
111
|
+
// 1. Try to extract JSON using the existing robust extractor
|
|
112
|
+
const json = extractJsonFromResponse(input);
|
|
113
|
+
|
|
114
|
+
// 2. If no JSON found, return full input as text
|
|
115
|
+
if (!json) {
|
|
116
|
+
return { text: input, json: null };
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// 3. If JSON found, we need to remove the JSON block to get the clean text
|
|
120
|
+
let text = input;
|
|
121
|
+
|
|
122
|
+
// Try fenced block first (most reliable) - same regex as extractJsonFromResponse
|
|
123
|
+
const fencedRegex = /```(?:json)?\s*({[\s\S]*?})\s*```/;
|
|
124
|
+
const fencedMatch = input.match(fencedRegex);
|
|
125
|
+
|
|
126
|
+
if (fencedMatch) {
|
|
127
|
+
// Replace the entire fenced block with empty string to leave just the text
|
|
128
|
+
// This handles both "Text + JSON" and "JSON + Text" patterns
|
|
129
|
+
text = input.replace(fencedMatch[0], '').trim();
|
|
130
|
+
return { text, json };
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Try brace extraction as fallback - same logic as extractJsonFromResponse
|
|
134
|
+
const firstBrace = input.indexOf('{');
|
|
135
|
+
const lastBrace = input.lastIndexOf('}');
|
|
136
|
+
|
|
137
|
+
if (firstBrace !== -1 && lastBrace > firstBrace) {
|
|
138
|
+
// Remove the brace block, keeping text before and after
|
|
139
|
+
const pre = input.substring(0, firstBrace);
|
|
140
|
+
const post = input.substring(lastBrace + 1);
|
|
141
|
+
text = (pre + post).trim();
|
|
142
|
+
return { text, json };
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
// Fallback: Return original text if we couldn't cleanly separate it
|
|
146
|
+
return { text: input, json };
|
|
147
|
+
}
|
|
@@ -113,4 +113,15 @@ export class BaseLLMProvider {
|
|
|
113
113
|
async getVideoGenerationStatus(operationName) {
|
|
114
114
|
throw new Error('Video generation not supported by this provider');
|
|
115
115
|
}
|
|
116
|
+
|
|
117
|
+
/**
|
|
118
|
+
* Helper to get the last 6 digits of the API key for logging.
|
|
119
|
+
* @returns {string} "..." + last 6 chars, or "not_set"
|
|
120
|
+
*/
|
|
121
|
+
_getMaskedApiKey() {
|
|
122
|
+
const key = this.config.apiKey;
|
|
123
|
+
if (!key) return 'not_set';
|
|
124
|
+
if (key.length <= 6) return '...';
|
|
125
|
+
return '...' + key.slice(-6);
|
|
126
|
+
}
|
|
116
127
|
}
|
|
@@ -186,7 +186,7 @@ export class GeminiProvider extends BaseLLMProvider {
|
|
|
186
186
|
try {
|
|
187
187
|
response = await this.client.models.generateContent(requestOptions);
|
|
188
188
|
} catch (error) {
|
|
189
|
-
console.error(
|
|
189
|
+
console.error(`[GeminiProvider] generateContent failed (API Key: ${this._getMaskedApiKey()}):`, error);
|
|
190
190
|
throw error;
|
|
191
191
|
}
|
|
192
192
|
|
|
@@ -486,7 +486,7 @@ export class GeminiProvider extends BaseLLMProvider {
|
|
|
486
486
|
|
|
487
487
|
return { operationName: operation.name };
|
|
488
488
|
} catch (error) {
|
|
489
|
-
console.error(
|
|
489
|
+
console.error(`[GeminiProvider] startVideoGeneration failed (API Key: ${this._getMaskedApiKey()}):`, error);
|
|
490
490
|
throw error;
|
|
491
491
|
}
|
|
492
492
|
}
|
|
@@ -56,7 +56,13 @@ export class OpenAIProvider extends BaseLLMProvider {
|
|
|
56
56
|
requestPayload.response_format = this._buildResponseFormat(options);
|
|
57
57
|
}
|
|
58
58
|
|
|
59
|
-
|
|
59
|
+
let response;
|
|
60
|
+
try {
|
|
61
|
+
response = await this.client.chat.completions.create(requestPayload);
|
|
62
|
+
} catch (error) {
|
|
63
|
+
console.error(`[OpenAIProvider] chat completion failed (API Key: ${this._getMaskedApiKey()}):`, error);
|
|
64
|
+
throw error;
|
|
65
|
+
}
|
|
60
66
|
const message = response.choices[0].message;
|
|
61
67
|
|
|
62
68
|
// Validate that we have EITHER content OR tool calls
|