@renderify/llm 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +83 -0
- package/dist/llm.cjs.js +1769 -0
- package/dist/llm.cjs.js.map +1 -0
- package/dist/llm.d.mts +117 -0
- package/dist/llm.d.ts +117 -0
- package/dist/llm.esm.js +1746 -0
- package/dist/llm.esm.js.map +1 -0
- package/package.json +59 -0
package/dist/llm.esm.js
ADDED
|
@@ -0,0 +1,1746 @@
|
|
|
1
|
+
// src/providers/anthropic.ts
|
|
2
|
+
import { isRuntimePlan } from "@renderify/ir";
|
|
3
|
+
|
|
4
|
+
// src/providers/shared.ts
|
|
5
|
+
function pickString(source, ...keys) {
|
|
6
|
+
for (const key of keys) {
|
|
7
|
+
const value = source[key];
|
|
8
|
+
if (typeof value === "string" && value.trim().length > 0) {
|
|
9
|
+
return value.trim();
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
return void 0;
|
|
13
|
+
}
|
|
14
|
+
function pickPositiveInt(source, ...keys) {
|
|
15
|
+
for (const key of keys) {
|
|
16
|
+
const value = source[key];
|
|
17
|
+
if (typeof value === "number" && Number.isFinite(value) && value > 0) {
|
|
18
|
+
return Math.floor(value);
|
|
19
|
+
}
|
|
20
|
+
if (typeof value === "string" && value.trim().length > 0) {
|
|
21
|
+
const parsed = Number(value);
|
|
22
|
+
if (Number.isFinite(parsed) && parsed > 0) {
|
|
23
|
+
return Math.floor(parsed);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return void 0;
|
|
28
|
+
}
|
|
29
|
+
function pickFetch(source, key) {
|
|
30
|
+
const value = source[key];
|
|
31
|
+
if (typeof value === "function") {
|
|
32
|
+
return value;
|
|
33
|
+
}
|
|
34
|
+
return void 0;
|
|
35
|
+
}
|
|
36
|
+
function resolveFetch(fetchImpl, missingMessage) {
|
|
37
|
+
if (fetchImpl) {
|
|
38
|
+
return fetchImpl;
|
|
39
|
+
}
|
|
40
|
+
if (typeof globalThis.fetch === "function") {
|
|
41
|
+
return globalThis.fetch.bind(globalThis);
|
|
42
|
+
}
|
|
43
|
+
throw new Error(missingMessage);
|
|
44
|
+
}
|
|
45
|
+
function createTimeoutAbortScope(timeoutMs, upstreamSignal) {
|
|
46
|
+
const controller = new AbortController();
|
|
47
|
+
const timeout = setTimeout(() => {
|
|
48
|
+
controller.abort();
|
|
49
|
+
}, timeoutMs);
|
|
50
|
+
let onAbort;
|
|
51
|
+
if (upstreamSignal) {
|
|
52
|
+
if (upstreamSignal.aborted) {
|
|
53
|
+
controller.abort();
|
|
54
|
+
} else {
|
|
55
|
+
onAbort = () => {
|
|
56
|
+
controller.abort();
|
|
57
|
+
};
|
|
58
|
+
upstreamSignal.addEventListener("abort", onAbort, { once: true });
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
return {
|
|
62
|
+
signal: controller.signal,
|
|
63
|
+
release() {
|
|
64
|
+
clearTimeout(timeout);
|
|
65
|
+
if (upstreamSignal && onAbort) {
|
|
66
|
+
upstreamSignal.removeEventListener("abort", onAbort);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
async function withTimeoutAbortScope(timeoutMs, upstreamSignal, operation) {
|
|
72
|
+
const scope = createTimeoutAbortScope(timeoutMs, upstreamSignal);
|
|
73
|
+
try {
|
|
74
|
+
return await operation(scope.signal);
|
|
75
|
+
} finally {
|
|
76
|
+
scope.release();
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
function formatContext(context) {
|
|
80
|
+
if (!context || Object.keys(context).length === 0) {
|
|
81
|
+
return "";
|
|
82
|
+
}
|
|
83
|
+
try {
|
|
84
|
+
return JSON.stringify(context);
|
|
85
|
+
} catch {
|
|
86
|
+
return "";
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
function tryParseJson(raw) {
|
|
90
|
+
const fenced = raw.match(/```(?:json)?\s*([\s\S]*?)\s*```/i);
|
|
91
|
+
const payload = fenced ? fenced[1] : raw;
|
|
92
|
+
try {
|
|
93
|
+
return {
|
|
94
|
+
ok: true,
|
|
95
|
+
value: JSON.parse(payload)
|
|
96
|
+
};
|
|
97
|
+
} catch (error) {
|
|
98
|
+
return {
|
|
99
|
+
ok: false,
|
|
100
|
+
error: error instanceof Error ? error.message : String(error)
|
|
101
|
+
};
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
async function readErrorResponse(response) {
|
|
105
|
+
try {
|
|
106
|
+
const body = await response.json();
|
|
107
|
+
if (body.error?.message) {
|
|
108
|
+
return body.error.message;
|
|
109
|
+
}
|
|
110
|
+
return JSON.stringify(body);
|
|
111
|
+
} catch {
|
|
112
|
+
try {
|
|
113
|
+
return await response.text();
|
|
114
|
+
} catch {
|
|
115
|
+
return "unknown error";
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
function consumeSseEvents(buffer, flush = false) {
|
|
120
|
+
const events = [];
|
|
121
|
+
const separator = /\r?\n\r?\n/g;
|
|
122
|
+
let cursor = 0;
|
|
123
|
+
let match = separator.exec(buffer);
|
|
124
|
+
while (match) {
|
|
125
|
+
const block = buffer.slice(cursor, match.index);
|
|
126
|
+
const parsed = parseSseEventBlock(block);
|
|
127
|
+
if (parsed) {
|
|
128
|
+
events.push(parsed);
|
|
129
|
+
}
|
|
130
|
+
cursor = match.index + match[0].length;
|
|
131
|
+
match = separator.exec(buffer);
|
|
132
|
+
}
|
|
133
|
+
let remaining = buffer.slice(cursor);
|
|
134
|
+
if (flush) {
|
|
135
|
+
const tail = parseSseEventBlock(remaining);
|
|
136
|
+
if (tail) {
|
|
137
|
+
events.push(tail);
|
|
138
|
+
}
|
|
139
|
+
remaining = "";
|
|
140
|
+
}
|
|
141
|
+
return {
|
|
142
|
+
events,
|
|
143
|
+
remaining
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
function parseSseEventBlock(block) {
|
|
147
|
+
const lines = block.split(/\r?\n/);
|
|
148
|
+
let eventName;
|
|
149
|
+
const dataLines = [];
|
|
150
|
+
for (const line of lines) {
|
|
151
|
+
if (line.startsWith(":")) {
|
|
152
|
+
continue;
|
|
153
|
+
}
|
|
154
|
+
if (line.startsWith("event:")) {
|
|
155
|
+
eventName = line.slice(6).trim();
|
|
156
|
+
continue;
|
|
157
|
+
}
|
|
158
|
+
if (line.startsWith("data:")) {
|
|
159
|
+
dataLines.push(line.slice(5).trimStart());
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
if (dataLines.length === 0) {
|
|
163
|
+
return void 0;
|
|
164
|
+
}
|
|
165
|
+
const data = dataLines.join("\n").trim();
|
|
166
|
+
if (data.length === 0) {
|
|
167
|
+
return void 0;
|
|
168
|
+
}
|
|
169
|
+
return {
|
|
170
|
+
event: eventName,
|
|
171
|
+
data
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// src/providers/anthropic.ts
|
|
176
|
+
var DEFAULT_BASE_URL = "https://api.anthropic.com/v1";
|
|
177
|
+
var DEFAULT_MODEL = "claude-3-5-sonnet-latest";
|
|
178
|
+
var DEFAULT_TIMEOUT_MS = 3e4;
|
|
179
|
+
var DEFAULT_MAX_TOKENS = 2048;
|
|
180
|
+
var DEFAULT_ANTHROPIC_VERSION = "2023-06-01";
|
|
181
|
+
var AnthropicLLMInterpreter = class {
|
|
182
|
+
templates = /* @__PURE__ */ new Map();
|
|
183
|
+
options = {
|
|
184
|
+
baseUrl: DEFAULT_BASE_URL,
|
|
185
|
+
model: DEFAULT_MODEL,
|
|
186
|
+
timeoutMs: DEFAULT_TIMEOUT_MS,
|
|
187
|
+
maxTokens: DEFAULT_MAX_TOKENS,
|
|
188
|
+
version: DEFAULT_ANTHROPIC_VERSION,
|
|
189
|
+
apiKey: void 0,
|
|
190
|
+
systemPrompt: void 0
|
|
191
|
+
};
|
|
192
|
+
fetchImpl;
|
|
193
|
+
constructor(options = {}) {
|
|
194
|
+
this.configure({ ...options });
|
|
195
|
+
if (options.fetchImpl) {
|
|
196
|
+
this.fetchImpl = options.fetchImpl;
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
configure(options) {
|
|
200
|
+
const apiKey = pickString(options, "apiKey", "llmApiKey");
|
|
201
|
+
const model = pickString(options, "model", "llmModel");
|
|
202
|
+
const baseUrl = pickString(options, "baseUrl", "llmBaseUrl");
|
|
203
|
+
const systemPrompt = pickString(options, "systemPrompt");
|
|
204
|
+
const version = pickString(options, "version", "anthropicVersion");
|
|
205
|
+
const timeoutMs = pickPositiveInt(
|
|
206
|
+
options,
|
|
207
|
+
"timeoutMs",
|
|
208
|
+
"llmRequestTimeoutMs"
|
|
209
|
+
);
|
|
210
|
+
const maxTokens = pickPositiveInt(options, "maxTokens");
|
|
211
|
+
const fetchImpl = pickFetch(options, "fetchImpl");
|
|
212
|
+
this.options = {
|
|
213
|
+
...this.options,
|
|
214
|
+
...apiKey !== void 0 ? { apiKey } : {},
|
|
215
|
+
...model !== void 0 ? { model } : {},
|
|
216
|
+
...baseUrl !== void 0 ? { baseUrl } : {},
|
|
217
|
+
...systemPrompt !== void 0 ? { systemPrompt } : {},
|
|
218
|
+
...version !== void 0 ? { version } : {},
|
|
219
|
+
...timeoutMs !== void 0 ? { timeoutMs } : {},
|
|
220
|
+
...maxTokens !== void 0 ? { maxTokens } : {}
|
|
221
|
+
};
|
|
222
|
+
if (fetchImpl) {
|
|
223
|
+
this.fetchImpl = fetchImpl;
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
async generateResponse(req) {
|
|
227
|
+
const payload = await this.requestMessages(
|
|
228
|
+
{
|
|
229
|
+
model: this.options.model,
|
|
230
|
+
max_tokens: this.options.maxTokens,
|
|
231
|
+
system: this.resolveSystemPrompt(req),
|
|
232
|
+
messages: [
|
|
233
|
+
{
|
|
234
|
+
role: "user",
|
|
235
|
+
content: this.buildUserPrompt(req)
|
|
236
|
+
}
|
|
237
|
+
]
|
|
238
|
+
},
|
|
239
|
+
req.signal
|
|
240
|
+
);
|
|
241
|
+
const text = this.extractText(payload);
|
|
242
|
+
return {
|
|
243
|
+
text,
|
|
244
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
245
|
+
model: payload.model ?? this.options.model,
|
|
246
|
+
raw: {
|
|
247
|
+
mode: "text",
|
|
248
|
+
responseId: payload.id
|
|
249
|
+
}
|
|
250
|
+
};
|
|
251
|
+
}
|
|
252
|
+
async *generateResponseStream(req) {
|
|
253
|
+
const fetchImpl = resolveFetch(
|
|
254
|
+
this.fetchImpl,
|
|
255
|
+
"Global fetch is unavailable. Provide fetchImpl in AnthropicLLMInterpreter options."
|
|
256
|
+
);
|
|
257
|
+
const apiKey = this.options.apiKey;
|
|
258
|
+
if (!apiKey || apiKey.trim().length === 0) {
|
|
259
|
+
throw new Error(
|
|
260
|
+
"Anthropic apiKey is missing. Set RENDERIFY_LLM_API_KEY or configure apiKey."
|
|
261
|
+
);
|
|
262
|
+
}
|
|
263
|
+
const abortScope = createTimeoutAbortScope(
|
|
264
|
+
this.options.timeoutMs,
|
|
265
|
+
req.signal
|
|
266
|
+
);
|
|
267
|
+
let aggregatedText = "";
|
|
268
|
+
let chunkIndex = 0;
|
|
269
|
+
let tokensUsed;
|
|
270
|
+
let inputTokens;
|
|
271
|
+
let outputTokens;
|
|
272
|
+
let model = this.options.model;
|
|
273
|
+
let responseId;
|
|
274
|
+
let doneEmitted = false;
|
|
275
|
+
const processEvents = (events) => {
|
|
276
|
+
const chunks = [];
|
|
277
|
+
for (const event of events) {
|
|
278
|
+
if (event.data === "[DONE]" || event.event === "message_stop") {
|
|
279
|
+
if (!doneEmitted) {
|
|
280
|
+
chunkIndex += 1;
|
|
281
|
+
doneEmitted = true;
|
|
282
|
+
chunks.push({
|
|
283
|
+
delta: "",
|
|
284
|
+
text: aggregatedText,
|
|
285
|
+
done: true,
|
|
286
|
+
index: chunkIndex,
|
|
287
|
+
tokensUsed,
|
|
288
|
+
model,
|
|
289
|
+
raw: {
|
|
290
|
+
mode: "stream",
|
|
291
|
+
responseId,
|
|
292
|
+
done: true,
|
|
293
|
+
event: event.event ?? "done"
|
|
294
|
+
}
|
|
295
|
+
});
|
|
296
|
+
}
|
|
297
|
+
continue;
|
|
298
|
+
}
|
|
299
|
+
let payload;
|
|
300
|
+
try {
|
|
301
|
+
payload = JSON.parse(event.data);
|
|
302
|
+
} catch (error) {
|
|
303
|
+
throw new Error(
|
|
304
|
+
`Anthropic stream chunk parse failed: ${error instanceof Error ? error.message : String(error)}`
|
|
305
|
+
);
|
|
306
|
+
}
|
|
307
|
+
if (payload.error?.message) {
|
|
308
|
+
throw new Error(`Anthropic error: ${payload.error.message}`);
|
|
309
|
+
}
|
|
310
|
+
if (typeof payload.message?.id === "string") {
|
|
311
|
+
responseId = payload.message.id;
|
|
312
|
+
}
|
|
313
|
+
if (typeof payload.message?.model === "string" && payload.message.model.trim().length > 0) {
|
|
314
|
+
model = payload.message.model;
|
|
315
|
+
}
|
|
316
|
+
const usageInput = payload.message?.usage?.input_tokens;
|
|
317
|
+
const usageOutput = payload.message?.usage?.output_tokens ?? payload.usage?.output_tokens;
|
|
318
|
+
if (typeof usageInput === "number") {
|
|
319
|
+
inputTokens = usageInput;
|
|
320
|
+
}
|
|
321
|
+
if (typeof usageOutput === "number") {
|
|
322
|
+
outputTokens = usageOutput;
|
|
323
|
+
}
|
|
324
|
+
if (typeof inputTokens === "number" || typeof outputTokens === "number") {
|
|
325
|
+
tokensUsed = (inputTokens ?? 0) + (outputTokens ?? 0);
|
|
326
|
+
}
|
|
327
|
+
const deltaText = payload.type === "content_block_delta" && typeof payload.delta?.text === "string" ? payload.delta.text : "";
|
|
328
|
+
if (deltaText.length === 0) {
|
|
329
|
+
continue;
|
|
330
|
+
}
|
|
331
|
+
aggregatedText += deltaText;
|
|
332
|
+
chunkIndex += 1;
|
|
333
|
+
chunks.push({
|
|
334
|
+
delta: deltaText,
|
|
335
|
+
text: aggregatedText,
|
|
336
|
+
done: false,
|
|
337
|
+
index: chunkIndex,
|
|
338
|
+
tokensUsed,
|
|
339
|
+
model,
|
|
340
|
+
raw: {
|
|
341
|
+
mode: "stream",
|
|
342
|
+
responseId,
|
|
343
|
+
event: event.event ?? payload.type,
|
|
344
|
+
chunk: payload
|
|
345
|
+
}
|
|
346
|
+
});
|
|
347
|
+
}
|
|
348
|
+
return chunks;
|
|
349
|
+
};
|
|
350
|
+
try {
|
|
351
|
+
const response = await fetchImpl(
|
|
352
|
+
`${this.options.baseUrl.replace(/\/$/, "")}/messages`,
|
|
353
|
+
{
|
|
354
|
+
method: "POST",
|
|
355
|
+
headers: {
|
|
356
|
+
"content-type": "application/json",
|
|
357
|
+
"x-api-key": apiKey,
|
|
358
|
+
"anthropic-version": this.options.version
|
|
359
|
+
},
|
|
360
|
+
body: JSON.stringify({
|
|
361
|
+
model: this.options.model,
|
|
362
|
+
max_tokens: this.options.maxTokens,
|
|
363
|
+
system: this.resolveSystemPrompt(req),
|
|
364
|
+
stream: true,
|
|
365
|
+
messages: [
|
|
366
|
+
{
|
|
367
|
+
role: "user",
|
|
368
|
+
content: this.buildUserPrompt(req)
|
|
369
|
+
}
|
|
370
|
+
]
|
|
371
|
+
}),
|
|
372
|
+
signal: abortScope.signal
|
|
373
|
+
}
|
|
374
|
+
);
|
|
375
|
+
if (!response.ok) {
|
|
376
|
+
const details = await readErrorResponse(response);
|
|
377
|
+
throw new Error(
|
|
378
|
+
`Anthropic request failed (${response.status}): ${details}`
|
|
379
|
+
);
|
|
380
|
+
}
|
|
381
|
+
if (!response.body) {
|
|
382
|
+
throw new Error("Anthropic streaming response body is empty");
|
|
383
|
+
}
|
|
384
|
+
const decoder = new TextDecoder();
|
|
385
|
+
const reader = response.body.getReader();
|
|
386
|
+
let buffer = "";
|
|
387
|
+
while (true) {
|
|
388
|
+
const { done, value } = await reader.read();
|
|
389
|
+
if (done) {
|
|
390
|
+
break;
|
|
391
|
+
}
|
|
392
|
+
if (value) {
|
|
393
|
+
buffer += decoder.decode(value, { stream: true });
|
|
394
|
+
}
|
|
395
|
+
const parsedEvents = consumeSseEvents(buffer);
|
|
396
|
+
buffer = parsedEvents.remaining;
|
|
397
|
+
for (const chunk of processEvents(parsedEvents.events)) {
|
|
398
|
+
yield chunk;
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
buffer += decoder.decode();
|
|
402
|
+
const finalEvents = consumeSseEvents(buffer, true);
|
|
403
|
+
for (const chunk of processEvents(finalEvents.events)) {
|
|
404
|
+
yield chunk;
|
|
405
|
+
}
|
|
406
|
+
if (!doneEmitted) {
|
|
407
|
+
chunkIndex += 1;
|
|
408
|
+
doneEmitted = true;
|
|
409
|
+
yield {
|
|
410
|
+
delta: "",
|
|
411
|
+
text: aggregatedText,
|
|
412
|
+
done: true,
|
|
413
|
+
index: chunkIndex,
|
|
414
|
+
tokensUsed,
|
|
415
|
+
model,
|
|
416
|
+
raw: {
|
|
417
|
+
mode: "stream",
|
|
418
|
+
responseId,
|
|
419
|
+
done: true,
|
|
420
|
+
reason: "eof"
|
|
421
|
+
}
|
|
422
|
+
};
|
|
423
|
+
}
|
|
424
|
+
} catch (error) {
|
|
425
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
426
|
+
if (req.signal?.aborted) {
|
|
427
|
+
throw new Error("Anthropic request aborted by caller");
|
|
428
|
+
}
|
|
429
|
+
throw new Error(
|
|
430
|
+
`Anthropic request timed out after ${this.options.timeoutMs}ms`
|
|
431
|
+
);
|
|
432
|
+
}
|
|
433
|
+
throw error;
|
|
434
|
+
} finally {
|
|
435
|
+
abortScope.release();
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
async generateStructuredResponse(req) {
|
|
439
|
+
if (req.format !== "runtime-plan") {
|
|
440
|
+
return {
|
|
441
|
+
text: "",
|
|
442
|
+
valid: false,
|
|
443
|
+
errors: [`Unsupported structured format: ${String(req.format)}`],
|
|
444
|
+
model: this.options.model
|
|
445
|
+
};
|
|
446
|
+
}
|
|
447
|
+
const payload = await this.requestMessages(
|
|
448
|
+
{
|
|
449
|
+
model: this.options.model,
|
|
450
|
+
max_tokens: this.options.maxTokens,
|
|
451
|
+
system: this.resolveStructuredSystemPrompt(req),
|
|
452
|
+
messages: [
|
|
453
|
+
{
|
|
454
|
+
role: "user",
|
|
455
|
+
content: this.buildUserPrompt(req)
|
|
456
|
+
}
|
|
457
|
+
]
|
|
458
|
+
},
|
|
459
|
+
req.signal
|
|
460
|
+
);
|
|
461
|
+
const text = this.extractText(payload);
|
|
462
|
+
if (text.trim().length === 0) {
|
|
463
|
+
return {
|
|
464
|
+
text,
|
|
465
|
+
valid: false,
|
|
466
|
+
errors: ["Structured response content is empty"],
|
|
467
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
468
|
+
model: payload.model ?? this.options.model,
|
|
469
|
+
raw: {
|
|
470
|
+
mode: "structured",
|
|
471
|
+
responseId: payload.id
|
|
472
|
+
}
|
|
473
|
+
};
|
|
474
|
+
}
|
|
475
|
+
const parsed = tryParseJson(text);
|
|
476
|
+
if (!parsed.ok) {
|
|
477
|
+
return {
|
|
478
|
+
text,
|
|
479
|
+
valid: false,
|
|
480
|
+
errors: [`Structured JSON parse failed: ${parsed.error}`],
|
|
481
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
482
|
+
model: payload.model ?? this.options.model,
|
|
483
|
+
raw: {
|
|
484
|
+
mode: "structured",
|
|
485
|
+
responseId: payload.id
|
|
486
|
+
}
|
|
487
|
+
};
|
|
488
|
+
}
|
|
489
|
+
if (!isRuntimePlan(parsed.value)) {
|
|
490
|
+
return {
|
|
491
|
+
text,
|
|
492
|
+
value: parsed.value,
|
|
493
|
+
valid: false,
|
|
494
|
+
errors: ["Structured payload is not a valid RuntimePlan"],
|
|
495
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
496
|
+
model: payload.model ?? this.options.model,
|
|
497
|
+
raw: {
|
|
498
|
+
mode: "structured",
|
|
499
|
+
responseId: payload.id
|
|
500
|
+
}
|
|
501
|
+
};
|
|
502
|
+
}
|
|
503
|
+
return {
|
|
504
|
+
text,
|
|
505
|
+
value: parsed.value,
|
|
506
|
+
valid: true,
|
|
507
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
508
|
+
model: payload.model ?? this.options.model,
|
|
509
|
+
raw: {
|
|
510
|
+
mode: "structured",
|
|
511
|
+
responseId: payload.id
|
|
512
|
+
}
|
|
513
|
+
};
|
|
514
|
+
}
|
|
515
|
+
setPromptTemplate(templateName, templateContent) {
|
|
516
|
+
this.templates.set(templateName, templateContent);
|
|
517
|
+
}
|
|
518
|
+
getPromptTemplate(templateName) {
|
|
519
|
+
return this.templates.get(templateName);
|
|
520
|
+
}
|
|
521
|
+
resolveSystemPrompt(req) {
|
|
522
|
+
const templateSystem = this.templates.get("default");
|
|
523
|
+
const configuredSystem = this.options.systemPrompt;
|
|
524
|
+
const requestSystem = req.systemPrompt;
|
|
525
|
+
const candidates = [configuredSystem, templateSystem, requestSystem].filter((entry) => typeof entry === "string").map((entry) => entry.trim()).filter((entry) => entry.length > 0);
|
|
526
|
+
if (candidates.length === 0) {
|
|
527
|
+
return void 0;
|
|
528
|
+
}
|
|
529
|
+
return candidates.join("\n\n");
|
|
530
|
+
}
|
|
531
|
+
resolveStructuredSystemPrompt(req) {
|
|
532
|
+
const template = this.templates.get("runtime-plan");
|
|
533
|
+
const strictHint = req.strict === false ? "false" : "true";
|
|
534
|
+
const defaultPrompt = [
|
|
535
|
+
"You generate RuntimePlan JSON for Renderify.",
|
|
536
|
+
"Return only JSON with no markdown or explanations.",
|
|
537
|
+
"Schema priority: id/version/root/capabilities must be valid.",
|
|
538
|
+
`Strict mode: ${strictHint}.`
|
|
539
|
+
].join(" ");
|
|
540
|
+
const combined = [this.resolveSystemPrompt(req), template, defaultPrompt].filter((entry) => typeof entry === "string").map((entry) => entry.trim()).filter((entry) => entry.length > 0).join("\n\n");
|
|
541
|
+
return combined;
|
|
542
|
+
}
|
|
543
|
+
buildUserPrompt(req) {
|
|
544
|
+
const contextSnippet = formatContext(req.context);
|
|
545
|
+
if (!contextSnippet) {
|
|
546
|
+
return req.prompt;
|
|
547
|
+
}
|
|
548
|
+
return `${req.prompt}
|
|
549
|
+
|
|
550
|
+
Context:
|
|
551
|
+
${contextSnippet}`;
|
|
552
|
+
}
|
|
553
|
+
async requestMessages(body, signal) {
|
|
554
|
+
const fetchImpl = resolveFetch(
|
|
555
|
+
this.fetchImpl,
|
|
556
|
+
"Global fetch is unavailable. Provide fetchImpl in AnthropicLLMInterpreter options."
|
|
557
|
+
);
|
|
558
|
+
const apiKey = this.options.apiKey;
|
|
559
|
+
if (!apiKey || apiKey.trim().length === 0) {
|
|
560
|
+
throw new Error(
|
|
561
|
+
"Anthropic apiKey is missing. Set RENDERIFY_LLM_API_KEY or configure apiKey."
|
|
562
|
+
);
|
|
563
|
+
}
|
|
564
|
+
try {
|
|
565
|
+
return await withTimeoutAbortScope(
|
|
566
|
+
this.options.timeoutMs,
|
|
567
|
+
signal,
|
|
568
|
+
async (timeoutSignal) => {
|
|
569
|
+
const response = await fetchImpl(
|
|
570
|
+
`${this.options.baseUrl.replace(/\/$/, "")}/messages`,
|
|
571
|
+
{
|
|
572
|
+
method: "POST",
|
|
573
|
+
headers: {
|
|
574
|
+
"content-type": "application/json",
|
|
575
|
+
"x-api-key": apiKey,
|
|
576
|
+
"anthropic-version": this.options.version
|
|
577
|
+
},
|
|
578
|
+
body: JSON.stringify(body),
|
|
579
|
+
signal: timeoutSignal
|
|
580
|
+
}
|
|
581
|
+
);
|
|
582
|
+
if (!response.ok) {
|
|
583
|
+
const details = await readErrorResponse(response);
|
|
584
|
+
throw new Error(
|
|
585
|
+
`Anthropic request failed (${response.status}): ${details}`
|
|
586
|
+
);
|
|
587
|
+
}
|
|
588
|
+
const parsed = await response.json();
|
|
589
|
+
if (parsed.error?.message) {
|
|
590
|
+
throw new Error(`Anthropic error: ${parsed.error.message}`);
|
|
591
|
+
}
|
|
592
|
+
return parsed;
|
|
593
|
+
}
|
|
594
|
+
);
|
|
595
|
+
} catch (error) {
|
|
596
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
597
|
+
if (signal?.aborted) {
|
|
598
|
+
throw new Error("Anthropic request aborted by caller");
|
|
599
|
+
}
|
|
600
|
+
throw new Error(
|
|
601
|
+
`Anthropic request timed out after ${this.options.timeoutMs}ms`
|
|
602
|
+
);
|
|
603
|
+
}
|
|
604
|
+
throw error;
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
extractText(payload) {
|
|
608
|
+
const content = payload.content;
|
|
609
|
+
if (!Array.isArray(content) || content.length === 0) {
|
|
610
|
+
return "";
|
|
611
|
+
}
|
|
612
|
+
return content.map(
|
|
613
|
+
(part) => part.type === "text" && typeof part.text === "string" ? part.text : ""
|
|
614
|
+
).join("").trim();
|
|
615
|
+
}
|
|
616
|
+
extractTotalTokens(payload) {
|
|
617
|
+
const input = payload.usage?.input_tokens;
|
|
618
|
+
const output = payload.usage?.output_tokens;
|
|
619
|
+
if (typeof input !== "number" && typeof output !== "number") {
|
|
620
|
+
return void 0;
|
|
621
|
+
}
|
|
622
|
+
return (input ?? 0) + (output ?? 0);
|
|
623
|
+
}
|
|
624
|
+
};
|
|
625
|
+
|
|
626
|
+
// src/providers/google.ts
|
|
627
|
+
import { isRuntimePlan as isRuntimePlan2 } from "@renderify/ir";
|
|
628
|
+
var DEFAULT_BASE_URL2 = "https://generativelanguage.googleapis.com/v1beta";
|
|
629
|
+
var DEFAULT_MODEL2 = "gemini-2.0-flash";
|
|
630
|
+
var DEFAULT_TIMEOUT_MS2 = 3e4;
|
|
631
|
+
var GoogleLLMInterpreter = class {
|
|
632
|
+
templates = /* @__PURE__ */ new Map();
|
|
633
|
+
options = {
|
|
634
|
+
baseUrl: DEFAULT_BASE_URL2,
|
|
635
|
+
model: DEFAULT_MODEL2,
|
|
636
|
+
timeoutMs: DEFAULT_TIMEOUT_MS2,
|
|
637
|
+
apiKey: void 0,
|
|
638
|
+
systemPrompt: void 0
|
|
639
|
+
};
|
|
640
|
+
fetchImpl;
|
|
641
|
+
constructor(options = {}) {
|
|
642
|
+
this.configure({ ...options });
|
|
643
|
+
if (options.fetchImpl) {
|
|
644
|
+
this.fetchImpl = options.fetchImpl;
|
|
645
|
+
}
|
|
646
|
+
}
|
|
647
|
+
configure(options) {
|
|
648
|
+
const apiKey = pickString(options, "apiKey", "llmApiKey");
|
|
649
|
+
const model = pickString(options, "model", "llmModel");
|
|
650
|
+
const baseUrl = pickString(options, "baseUrl", "llmBaseUrl");
|
|
651
|
+
const systemPrompt = pickString(options, "systemPrompt");
|
|
652
|
+
const timeoutMs = pickPositiveInt(
|
|
653
|
+
options,
|
|
654
|
+
"timeoutMs",
|
|
655
|
+
"llmRequestTimeoutMs"
|
|
656
|
+
);
|
|
657
|
+
const fetchImpl = pickFetch(options, "fetchImpl");
|
|
658
|
+
this.options = {
|
|
659
|
+
...this.options,
|
|
660
|
+
...apiKey !== void 0 ? { apiKey } : {},
|
|
661
|
+
...model !== void 0 ? { model } : {},
|
|
662
|
+
...baseUrl !== void 0 ? { baseUrl } : {},
|
|
663
|
+
...systemPrompt !== void 0 ? { systemPrompt } : {},
|
|
664
|
+
...timeoutMs !== void 0 ? { timeoutMs } : {}
|
|
665
|
+
};
|
|
666
|
+
if (fetchImpl) {
|
|
667
|
+
this.fetchImpl = fetchImpl;
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
async generateResponse(req) {
|
|
671
|
+
const payload = await this.requestGenerateContent(
|
|
672
|
+
this.buildRequest(req),
|
|
673
|
+
req.signal
|
|
674
|
+
);
|
|
675
|
+
const refusal = this.extractRefusal(payload);
|
|
676
|
+
if (refusal) {
|
|
677
|
+
throw new Error(`Google refused request: ${refusal}`);
|
|
678
|
+
}
|
|
679
|
+
return {
|
|
680
|
+
text: this.extractText(payload),
|
|
681
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
682
|
+
model: payload.modelVersion ?? this.options.model,
|
|
683
|
+
raw: {
|
|
684
|
+
mode: "text",
|
|
685
|
+
finishReason: payload.candidates?.[0]?.finishReason
|
|
686
|
+
}
|
|
687
|
+
};
|
|
688
|
+
}
|
|
689
|
+
async *generateResponseStream(req) {
|
|
690
|
+
const fetchImpl = resolveFetch(
|
|
691
|
+
this.fetchImpl,
|
|
692
|
+
"Global fetch is unavailable. Provide fetchImpl in GoogleLLMInterpreter options."
|
|
693
|
+
);
|
|
694
|
+
const apiKey = this.options.apiKey;
|
|
695
|
+
if (!apiKey || apiKey.trim().length === 0) {
|
|
696
|
+
throw new Error(
|
|
697
|
+
"Google apiKey is missing. Set RENDERIFY_LLM_API_KEY or configure apiKey."
|
|
698
|
+
);
|
|
699
|
+
}
|
|
700
|
+
const abortScope = createTimeoutAbortScope(
|
|
701
|
+
this.options.timeoutMs,
|
|
702
|
+
req.signal
|
|
703
|
+
);
|
|
704
|
+
let aggregatedText = "";
|
|
705
|
+
let chunkIndex = 0;
|
|
706
|
+
let tokensUsed;
|
|
707
|
+
let model = this.options.model;
|
|
708
|
+
let doneEmitted = false;
|
|
709
|
+
const processEvents = (events) => {
|
|
710
|
+
const chunks = [];
|
|
711
|
+
for (const event of events) {
|
|
712
|
+
if (event.data === "[DONE]") {
|
|
713
|
+
if (!doneEmitted) {
|
|
714
|
+
chunkIndex += 1;
|
|
715
|
+
doneEmitted = true;
|
|
716
|
+
chunks.push({
|
|
717
|
+
delta: "",
|
|
718
|
+
text: aggregatedText,
|
|
719
|
+
done: true,
|
|
720
|
+
index: chunkIndex,
|
|
721
|
+
tokensUsed,
|
|
722
|
+
model,
|
|
723
|
+
raw: {
|
|
724
|
+
mode: "stream",
|
|
725
|
+
done: true
|
|
726
|
+
}
|
|
727
|
+
});
|
|
728
|
+
}
|
|
729
|
+
continue;
|
|
730
|
+
}
|
|
731
|
+
let payload;
|
|
732
|
+
try {
|
|
733
|
+
payload = JSON.parse(event.data);
|
|
734
|
+
} catch (error) {
|
|
735
|
+
throw new Error(
|
|
736
|
+
`Google stream chunk parse failed: ${error instanceof Error ? error.message : String(error)}`
|
|
737
|
+
);
|
|
738
|
+
}
|
|
739
|
+
if (payload.error?.message) {
|
|
740
|
+
throw new Error(`Google error: ${payload.error.message}`);
|
|
741
|
+
}
|
|
742
|
+
if (typeof payload.modelVersion === "string" && payload.modelVersion.trim().length > 0) {
|
|
743
|
+
model = payload.modelVersion;
|
|
744
|
+
}
|
|
745
|
+
const refusal = this.extractRefusal(payload);
|
|
746
|
+
if (refusal) {
|
|
747
|
+
throw new Error(`Google refused request: ${refusal}`);
|
|
748
|
+
}
|
|
749
|
+
const payloadTokens = this.extractTotalTokens(payload);
|
|
750
|
+
if (typeof payloadTokens === "number") {
|
|
751
|
+
tokensUsed = payloadTokens;
|
|
752
|
+
}
|
|
753
|
+
const deltaText = this.extractTextRaw(payload);
|
|
754
|
+
if (deltaText.length === 0) {
|
|
755
|
+
continue;
|
|
756
|
+
}
|
|
757
|
+
aggregatedText += deltaText;
|
|
758
|
+
chunkIndex += 1;
|
|
759
|
+
chunks.push({
|
|
760
|
+
delta: deltaText,
|
|
761
|
+
text: aggregatedText,
|
|
762
|
+
done: false,
|
|
763
|
+
index: chunkIndex,
|
|
764
|
+
tokensUsed,
|
|
765
|
+
model,
|
|
766
|
+
raw: {
|
|
767
|
+
mode: "stream",
|
|
768
|
+
chunk: payload
|
|
769
|
+
}
|
|
770
|
+
});
|
|
771
|
+
}
|
|
772
|
+
return chunks;
|
|
773
|
+
};
|
|
774
|
+
try {
|
|
775
|
+
const response = await fetchImpl(
|
|
776
|
+
`${this.options.baseUrl.replace(/\/$/, "")}/models/${encodeURIComponent(this.options.model)}:streamGenerateContent?alt=sse`,
|
|
777
|
+
{
|
|
778
|
+
method: "POST",
|
|
779
|
+
headers: {
|
|
780
|
+
"content-type": "application/json",
|
|
781
|
+
"x-goog-api-key": apiKey
|
|
782
|
+
},
|
|
783
|
+
body: JSON.stringify(this.buildRequest(req)),
|
|
784
|
+
signal: abortScope.signal
|
|
785
|
+
}
|
|
786
|
+
);
|
|
787
|
+
if (!response.ok) {
|
|
788
|
+
const details = await readErrorResponse(response);
|
|
789
|
+
throw new Error(
|
|
790
|
+
`Google request failed (${response.status}): ${details}`
|
|
791
|
+
);
|
|
792
|
+
}
|
|
793
|
+
if (!response.body) {
|
|
794
|
+
throw new Error("Google streaming response body is empty");
|
|
795
|
+
}
|
|
796
|
+
const decoder = new TextDecoder();
|
|
797
|
+
const reader = response.body.getReader();
|
|
798
|
+
let buffer = "";
|
|
799
|
+
while (true) {
|
|
800
|
+
const { done, value } = await reader.read();
|
|
801
|
+
if (done) {
|
|
802
|
+
break;
|
|
803
|
+
}
|
|
804
|
+
if (value) {
|
|
805
|
+
buffer += decoder.decode(value, { stream: true });
|
|
806
|
+
}
|
|
807
|
+
const parsedEvents = consumeSseEvents(buffer);
|
|
808
|
+
buffer = parsedEvents.remaining;
|
|
809
|
+
for (const chunk of processEvents(parsedEvents.events)) {
|
|
810
|
+
yield chunk;
|
|
811
|
+
}
|
|
812
|
+
}
|
|
813
|
+
buffer += decoder.decode();
|
|
814
|
+
const finalEvents = consumeSseEvents(buffer, true);
|
|
815
|
+
for (const chunk of processEvents(finalEvents.events)) {
|
|
816
|
+
yield chunk;
|
|
817
|
+
}
|
|
818
|
+
if (!doneEmitted) {
|
|
819
|
+
chunkIndex += 1;
|
|
820
|
+
doneEmitted = true;
|
|
821
|
+
yield {
|
|
822
|
+
delta: "",
|
|
823
|
+
text: aggregatedText,
|
|
824
|
+
done: true,
|
|
825
|
+
index: chunkIndex,
|
|
826
|
+
tokensUsed,
|
|
827
|
+
model,
|
|
828
|
+
raw: {
|
|
829
|
+
mode: "stream",
|
|
830
|
+
done: true,
|
|
831
|
+
reason: "eof"
|
|
832
|
+
}
|
|
833
|
+
};
|
|
834
|
+
}
|
|
835
|
+
} catch (error) {
|
|
836
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
837
|
+
if (req.signal?.aborted) {
|
|
838
|
+
throw new Error("Google request aborted by caller");
|
|
839
|
+
}
|
|
840
|
+
throw new Error(
|
|
841
|
+
`Google request timed out after ${this.options.timeoutMs}ms`
|
|
842
|
+
);
|
|
843
|
+
}
|
|
844
|
+
throw error;
|
|
845
|
+
} finally {
|
|
846
|
+
abortScope.release();
|
|
847
|
+
}
|
|
848
|
+
}
|
|
849
|
+
async generateStructuredResponse(req) {
|
|
850
|
+
if (req.format !== "runtime-plan") {
|
|
851
|
+
return {
|
|
852
|
+
text: "",
|
|
853
|
+
valid: false,
|
|
854
|
+
errors: [`Unsupported structured format: ${String(req.format)}`],
|
|
855
|
+
model: this.options.model
|
|
856
|
+
};
|
|
857
|
+
}
|
|
858
|
+
const payload = await this.requestGenerateContent(
|
|
859
|
+
this.buildStructuredRequest(req),
|
|
860
|
+
req.signal
|
|
861
|
+
);
|
|
862
|
+
const refusal = this.extractRefusal(payload);
|
|
863
|
+
if (refusal) {
|
|
864
|
+
return {
|
|
865
|
+
text: "",
|
|
866
|
+
valid: false,
|
|
867
|
+
errors: [`Google refusal: ${refusal}`],
|
|
868
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
869
|
+
model: payload.modelVersion ?? this.options.model,
|
|
870
|
+
raw: {
|
|
871
|
+
mode: "structured",
|
|
872
|
+
finishReason: payload.candidates?.[0]?.finishReason
|
|
873
|
+
}
|
|
874
|
+
};
|
|
875
|
+
}
|
|
876
|
+
const text = this.extractText(payload);
|
|
877
|
+
if (text.trim().length === 0) {
|
|
878
|
+
return {
|
|
879
|
+
text,
|
|
880
|
+
valid: false,
|
|
881
|
+
errors: ["Structured response content is empty"],
|
|
882
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
883
|
+
model: payload.modelVersion ?? this.options.model,
|
|
884
|
+
raw: {
|
|
885
|
+
mode: "structured",
|
|
886
|
+
finishReason: payload.candidates?.[0]?.finishReason
|
|
887
|
+
}
|
|
888
|
+
};
|
|
889
|
+
}
|
|
890
|
+
const parsed = tryParseJson(text);
|
|
891
|
+
if (!parsed.ok) {
|
|
892
|
+
return {
|
|
893
|
+
text,
|
|
894
|
+
valid: false,
|
|
895
|
+
errors: [`Structured JSON parse failed: ${parsed.error}`],
|
|
896
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
897
|
+
model: payload.modelVersion ?? this.options.model,
|
|
898
|
+
raw: {
|
|
899
|
+
mode: "structured",
|
|
900
|
+
finishReason: payload.candidates?.[0]?.finishReason
|
|
901
|
+
}
|
|
902
|
+
};
|
|
903
|
+
}
|
|
904
|
+
if (!isRuntimePlan2(parsed.value)) {
|
|
905
|
+
return {
|
|
906
|
+
text,
|
|
907
|
+
value: parsed.value,
|
|
908
|
+
valid: false,
|
|
909
|
+
errors: ["Structured payload is not a valid RuntimePlan"],
|
|
910
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
911
|
+
model: payload.modelVersion ?? this.options.model,
|
|
912
|
+
raw: {
|
|
913
|
+
mode: "structured",
|
|
914
|
+
finishReason: payload.candidates?.[0]?.finishReason
|
|
915
|
+
}
|
|
916
|
+
};
|
|
917
|
+
}
|
|
918
|
+
return {
|
|
919
|
+
text,
|
|
920
|
+
value: parsed.value,
|
|
921
|
+
valid: true,
|
|
922
|
+
tokensUsed: this.extractTotalTokens(payload),
|
|
923
|
+
model: payload.modelVersion ?? this.options.model,
|
|
924
|
+
raw: {
|
|
925
|
+
mode: "structured",
|
|
926
|
+
finishReason: payload.candidates?.[0]?.finishReason
|
|
927
|
+
}
|
|
928
|
+
};
|
|
929
|
+
}
|
|
930
|
+
setPromptTemplate(templateName, templateContent) {
|
|
931
|
+
this.templates.set(templateName, templateContent);
|
|
932
|
+
}
|
|
933
|
+
getPromptTemplate(templateName) {
|
|
934
|
+
return this.templates.get(templateName);
|
|
935
|
+
}
|
|
936
|
+
buildRequest(req) {
|
|
937
|
+
const system = this.resolveSystemPrompt(req);
|
|
938
|
+
const body = {
|
|
939
|
+
contents: [
|
|
940
|
+
{
|
|
941
|
+
role: "user",
|
|
942
|
+
parts: [
|
|
943
|
+
{
|
|
944
|
+
text: this.buildUserPrompt(req)
|
|
945
|
+
}
|
|
946
|
+
]
|
|
947
|
+
}
|
|
948
|
+
],
|
|
949
|
+
generationConfig: {
|
|
950
|
+
responseMimeType: "text/plain"
|
|
951
|
+
}
|
|
952
|
+
};
|
|
953
|
+
if (system) {
|
|
954
|
+
body.systemInstruction = {
|
|
955
|
+
parts: [
|
|
956
|
+
{
|
|
957
|
+
text: system
|
|
958
|
+
}
|
|
959
|
+
]
|
|
960
|
+
};
|
|
961
|
+
}
|
|
962
|
+
return body;
|
|
963
|
+
}
|
|
964
|
+
buildStructuredRequest(req) {
|
|
965
|
+
const body = this.buildRequest({
|
|
966
|
+
...req,
|
|
967
|
+
systemPrompt: this.resolveStructuredSystemPrompt(req)
|
|
968
|
+
});
|
|
969
|
+
body.generationConfig = {
|
|
970
|
+
responseMimeType: "application/json"
|
|
971
|
+
};
|
|
972
|
+
return body;
|
|
973
|
+
}
|
|
974
|
+
resolveSystemPrompt(req) {
|
|
975
|
+
const configuredSystem = this.options.systemPrompt;
|
|
976
|
+
const templateSystem = this.templates.get("default");
|
|
977
|
+
const requestSystem = req.systemPrompt;
|
|
978
|
+
const candidates = [configuredSystem, templateSystem, requestSystem].filter((entry) => typeof entry === "string").map((entry) => entry.trim()).filter((entry) => entry.length > 0);
|
|
979
|
+
if (candidates.length === 0) {
|
|
980
|
+
return void 0;
|
|
981
|
+
}
|
|
982
|
+
return candidates.join("\n\n");
|
|
983
|
+
}
|
|
984
|
+
resolveStructuredSystemPrompt(req) {
|
|
985
|
+
const template = this.templates.get("runtime-plan");
|
|
986
|
+
const strictHint = req.strict === false ? "false" : "true";
|
|
987
|
+
const defaultPrompt = [
|
|
988
|
+
"You generate RuntimePlan JSON for Renderify.",
|
|
989
|
+
"Return only JSON with no markdown or explanations.",
|
|
990
|
+
"Schema priority: id/version/root/capabilities must be valid.",
|
|
991
|
+
`Strict mode: ${strictHint}.`
|
|
992
|
+
].join(" ");
|
|
993
|
+
return [this.resolveSystemPrompt(req), template, defaultPrompt].filter((entry) => typeof entry === "string").map((entry) => entry.trim()).filter((entry) => entry.length > 0).join("\n\n");
|
|
994
|
+
}
|
|
995
|
+
buildUserPrompt(req) {
|
|
996
|
+
const contextSnippet = formatContext(req.context);
|
|
997
|
+
if (!contextSnippet) {
|
|
998
|
+
return req.prompt;
|
|
999
|
+
}
|
|
1000
|
+
return `${req.prompt}
|
|
1001
|
+
|
|
1002
|
+
Context:
|
|
1003
|
+
${contextSnippet}`;
|
|
1004
|
+
}
|
|
1005
|
+
async requestGenerateContent(body, signal) {
|
|
1006
|
+
const fetchImpl = resolveFetch(
|
|
1007
|
+
this.fetchImpl,
|
|
1008
|
+
"Global fetch is unavailable. Provide fetchImpl in GoogleLLMInterpreter options."
|
|
1009
|
+
);
|
|
1010
|
+
const apiKey = this.options.apiKey;
|
|
1011
|
+
if (!apiKey || apiKey.trim().length === 0) {
|
|
1012
|
+
throw new Error(
|
|
1013
|
+
"Google apiKey is missing. Set RENDERIFY_LLM_API_KEY or configure apiKey."
|
|
1014
|
+
);
|
|
1015
|
+
}
|
|
1016
|
+
try {
|
|
1017
|
+
return await withTimeoutAbortScope(
|
|
1018
|
+
this.options.timeoutMs,
|
|
1019
|
+
signal,
|
|
1020
|
+
async (timeoutSignal) => {
|
|
1021
|
+
const response = await fetchImpl(
|
|
1022
|
+
`${this.options.baseUrl.replace(/\/$/, "")}/models/${encodeURIComponent(this.options.model)}:generateContent`,
|
|
1023
|
+
{
|
|
1024
|
+
method: "POST",
|
|
1025
|
+
headers: {
|
|
1026
|
+
"content-type": "application/json",
|
|
1027
|
+
"x-goog-api-key": apiKey
|
|
1028
|
+
},
|
|
1029
|
+
body: JSON.stringify(body),
|
|
1030
|
+
signal: timeoutSignal
|
|
1031
|
+
}
|
|
1032
|
+
);
|
|
1033
|
+
if (!response.ok) {
|
|
1034
|
+
const details = await readErrorResponse(response);
|
|
1035
|
+
throw new Error(
|
|
1036
|
+
`Google request failed (${response.status}): ${details}`
|
|
1037
|
+
);
|
|
1038
|
+
}
|
|
1039
|
+
const parsed = await response.json();
|
|
1040
|
+
if (parsed.error?.message) {
|
|
1041
|
+
throw new Error(`Google error: ${parsed.error.message}`);
|
|
1042
|
+
}
|
|
1043
|
+
return parsed;
|
|
1044
|
+
}
|
|
1045
|
+
);
|
|
1046
|
+
} catch (error) {
|
|
1047
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
1048
|
+
if (signal?.aborted) {
|
|
1049
|
+
throw new Error("Google request aborted by caller");
|
|
1050
|
+
}
|
|
1051
|
+
throw new Error(
|
|
1052
|
+
`Google request timed out after ${this.options.timeoutMs}ms`
|
|
1053
|
+
);
|
|
1054
|
+
}
|
|
1055
|
+
throw error;
|
|
1056
|
+
}
|
|
1057
|
+
}
|
|
1058
|
+
extractText(payload) {
|
|
1059
|
+
return this.extractTextRaw(payload).trim();
|
|
1060
|
+
}
|
|
1061
|
+
extractTextRaw(payload) {
|
|
1062
|
+
const candidate = payload.candidates?.[0];
|
|
1063
|
+
if (!candidate) {
|
|
1064
|
+
return "";
|
|
1065
|
+
}
|
|
1066
|
+
return (candidate.content?.parts ?? []).map((part) => typeof part.text === "string" ? part.text : "").join("");
|
|
1067
|
+
}
|
|
1068
|
+
extractRefusal(payload) {
|
|
1069
|
+
const blockReason = payload.promptFeedback?.blockReason;
|
|
1070
|
+
if (typeof blockReason === "string" && blockReason.trim().length > 0) {
|
|
1071
|
+
const details = payload.promptFeedback?.blockReasonMessage;
|
|
1072
|
+
if (typeof details === "string" && details.trim().length > 0) {
|
|
1073
|
+
return `${blockReason.trim()}: ${details.trim()}`;
|
|
1074
|
+
}
|
|
1075
|
+
return blockReason.trim();
|
|
1076
|
+
}
|
|
1077
|
+
const finishReason = payload.candidates?.[0]?.finishReason;
|
|
1078
|
+
if (finishReason === "SAFETY" || finishReason === "RECITATION" || finishReason === "BLOCKLIST" || finishReason === "PROHIBITED_CONTENT" || finishReason === "SPII") {
|
|
1079
|
+
return `finishReason=${finishReason}`;
|
|
1080
|
+
}
|
|
1081
|
+
return void 0;
|
|
1082
|
+
}
|
|
1083
|
+
extractTotalTokens(payload) {
|
|
1084
|
+
const usage = payload.usageMetadata;
|
|
1085
|
+
if (!usage) {
|
|
1086
|
+
return void 0;
|
|
1087
|
+
}
|
|
1088
|
+
if (typeof usage.totalTokenCount === "number") {
|
|
1089
|
+
return usage.totalTokenCount;
|
|
1090
|
+
}
|
|
1091
|
+
const prompt = usage.promptTokenCount;
|
|
1092
|
+
const candidates = usage.candidatesTokenCount;
|
|
1093
|
+
if (typeof prompt !== "number" && typeof candidates !== "number") {
|
|
1094
|
+
return void 0;
|
|
1095
|
+
}
|
|
1096
|
+
return (prompt ?? 0) + (candidates ?? 0);
|
|
1097
|
+
}
|
|
1098
|
+
};
|
|
1099
|
+
|
|
1100
|
+
// src/providers/openai.ts
|
|
1101
|
+
import { isRuntimePlan as isRuntimePlan3 } from "@renderify/ir";
|
|
1102
|
+
var DEFAULT_BASE_URL3 = "https://api.openai.com/v1";
|
|
1103
|
+
var DEFAULT_MODEL3 = "gpt-4.1-mini";
|
|
1104
|
+
var DEFAULT_TIMEOUT_MS3 = 3e4;
|
|
1105
|
+
var RUNTIME_PLAN_JSON_SCHEMA = {
|
|
1106
|
+
type: "object",
|
|
1107
|
+
additionalProperties: true,
|
|
1108
|
+
required: ["id", "version", "root", "capabilities"],
|
|
1109
|
+
properties: {
|
|
1110
|
+
specVersion: {
|
|
1111
|
+
type: "string",
|
|
1112
|
+
minLength: 1
|
|
1113
|
+
},
|
|
1114
|
+
id: {
|
|
1115
|
+
type: "string",
|
|
1116
|
+
minLength: 1
|
|
1117
|
+
},
|
|
1118
|
+
version: {
|
|
1119
|
+
type: "integer",
|
|
1120
|
+
minimum: 1
|
|
1121
|
+
},
|
|
1122
|
+
root: {
|
|
1123
|
+
type: "object",
|
|
1124
|
+
additionalProperties: true
|
|
1125
|
+
},
|
|
1126
|
+
capabilities: {
|
|
1127
|
+
type: "object",
|
|
1128
|
+
additionalProperties: true
|
|
1129
|
+
},
|
|
1130
|
+
imports: {
|
|
1131
|
+
type: "array",
|
|
1132
|
+
items: {
|
|
1133
|
+
type: "string"
|
|
1134
|
+
}
|
|
1135
|
+
},
|
|
1136
|
+
moduleManifest: {
|
|
1137
|
+
type: "object",
|
|
1138
|
+
additionalProperties: {
|
|
1139
|
+
type: "object",
|
|
1140
|
+
additionalProperties: false,
|
|
1141
|
+
required: ["resolvedUrl"],
|
|
1142
|
+
properties: {
|
|
1143
|
+
resolvedUrl: { type: "string", minLength: 1 },
|
|
1144
|
+
integrity: { type: "string", minLength: 1 },
|
|
1145
|
+
version: { type: "string", minLength: 1 },
|
|
1146
|
+
signer: { type: "string", minLength: 1 }
|
|
1147
|
+
}
|
|
1148
|
+
}
|
|
1149
|
+
},
|
|
1150
|
+
metadata: {
|
|
1151
|
+
type: "object",
|
|
1152
|
+
additionalProperties: true
|
|
1153
|
+
},
|
|
1154
|
+
state: {
|
|
1155
|
+
type: "object",
|
|
1156
|
+
additionalProperties: true
|
|
1157
|
+
},
|
|
1158
|
+
source: {
|
|
1159
|
+
type: "object",
|
|
1160
|
+
additionalProperties: false,
|
|
1161
|
+
required: ["language", "code"],
|
|
1162
|
+
properties: {
|
|
1163
|
+
language: {
|
|
1164
|
+
type: "string",
|
|
1165
|
+
enum: ["js", "jsx", "ts", "tsx"]
|
|
1166
|
+
},
|
|
1167
|
+
code: {
|
|
1168
|
+
type: "string",
|
|
1169
|
+
minLength: 1
|
|
1170
|
+
},
|
|
1171
|
+
exportName: {
|
|
1172
|
+
type: "string",
|
|
1173
|
+
minLength: 1
|
|
1174
|
+
},
|
|
1175
|
+
runtime: {
|
|
1176
|
+
type: "string",
|
|
1177
|
+
enum: ["renderify", "preact"]
|
|
1178
|
+
}
|
|
1179
|
+
}
|
|
1180
|
+
}
|
|
1181
|
+
}
|
|
1182
|
+
};
|
|
1183
|
+
var OpenAILLMInterpreter = class {
|
|
1184
|
+
templates = /* @__PURE__ */ new Map();
|
|
1185
|
+
options = {
|
|
1186
|
+
baseUrl: DEFAULT_BASE_URL3,
|
|
1187
|
+
model: DEFAULT_MODEL3,
|
|
1188
|
+
timeoutMs: DEFAULT_TIMEOUT_MS3,
|
|
1189
|
+
apiKey: void 0,
|
|
1190
|
+
organization: void 0,
|
|
1191
|
+
project: void 0,
|
|
1192
|
+
systemPrompt: void 0
|
|
1193
|
+
};
|
|
1194
|
+
fetchImpl;
|
|
1195
|
+
constructor(options = {}) {
|
|
1196
|
+
this.configure({ ...options });
|
|
1197
|
+
if (options.fetchImpl) {
|
|
1198
|
+
this.fetchImpl = options.fetchImpl;
|
|
1199
|
+
}
|
|
1200
|
+
}
|
|
1201
|
+
configure(options) {
|
|
1202
|
+
const apiKey = pickString(options, "apiKey", "llmApiKey");
|
|
1203
|
+
const model = pickString(options, "model", "llmModel");
|
|
1204
|
+
const baseUrl = pickString(options, "baseUrl", "llmBaseUrl");
|
|
1205
|
+
const organization = pickString(
|
|
1206
|
+
options,
|
|
1207
|
+
"organization",
|
|
1208
|
+
"openaiOrganization"
|
|
1209
|
+
);
|
|
1210
|
+
const project = pickString(options, "project", "openaiProject");
|
|
1211
|
+
const systemPrompt = pickString(options, "systemPrompt");
|
|
1212
|
+
const timeoutMs = pickPositiveInt(
|
|
1213
|
+
options,
|
|
1214
|
+
"timeoutMs",
|
|
1215
|
+
"llmRequestTimeoutMs"
|
|
1216
|
+
);
|
|
1217
|
+
const fetchImpl = pickFetch(options, "fetchImpl");
|
|
1218
|
+
this.options = {
|
|
1219
|
+
...this.options,
|
|
1220
|
+
...apiKey !== void 0 ? { apiKey } : {},
|
|
1221
|
+
...model !== void 0 ? { model } : {},
|
|
1222
|
+
...baseUrl !== void 0 ? { baseUrl } : {},
|
|
1223
|
+
...organization !== void 0 ? { organization } : {},
|
|
1224
|
+
...project !== void 0 ? { project } : {},
|
|
1225
|
+
...systemPrompt !== void 0 ? { systemPrompt } : {},
|
|
1226
|
+
...timeoutMs !== void 0 ? { timeoutMs } : {}
|
|
1227
|
+
};
|
|
1228
|
+
if (fetchImpl) {
|
|
1229
|
+
this.fetchImpl = fetchImpl;
|
|
1230
|
+
}
|
|
1231
|
+
}
|
|
1232
|
+
async generateResponse(req) {
|
|
1233
|
+
const payload = await this.requestChatCompletions(
|
|
1234
|
+
{
|
|
1235
|
+
model: this.options.model,
|
|
1236
|
+
messages: this.buildMessages(req)
|
|
1237
|
+
},
|
|
1238
|
+
req.signal
|
|
1239
|
+
);
|
|
1240
|
+
const output = this.extractOutput(payload);
|
|
1241
|
+
if (output.refusal) {
|
|
1242
|
+
throw new Error(`OpenAI refused request: ${output.refusal}`);
|
|
1243
|
+
}
|
|
1244
|
+
return {
|
|
1245
|
+
text: output.text,
|
|
1246
|
+
tokensUsed: payload.usage?.total_tokens,
|
|
1247
|
+
model: payload.model ?? this.options.model,
|
|
1248
|
+
raw: {
|
|
1249
|
+
mode: "text",
|
|
1250
|
+
responseId: payload.id
|
|
1251
|
+
}
|
|
1252
|
+
};
|
|
1253
|
+
}
|
|
1254
|
+
async *generateResponseStream(req) {
|
|
1255
|
+
const fetchImpl = resolveFetch(
|
|
1256
|
+
this.fetchImpl,
|
|
1257
|
+
"Global fetch is unavailable. Provide fetchImpl in OpenAILLMInterpreter options."
|
|
1258
|
+
);
|
|
1259
|
+
const apiKey = this.options.apiKey;
|
|
1260
|
+
if (!apiKey || apiKey.trim().length === 0) {
|
|
1261
|
+
throw new Error(
|
|
1262
|
+
"OpenAI apiKey is missing. Set RENDERIFY_LLM_API_KEY or configure apiKey."
|
|
1263
|
+
);
|
|
1264
|
+
}
|
|
1265
|
+
const abortScope = createTimeoutAbortScope(
|
|
1266
|
+
this.options.timeoutMs,
|
|
1267
|
+
req.signal
|
|
1268
|
+
);
|
|
1269
|
+
let aggregatedText = "";
|
|
1270
|
+
let chunkIndex = 0;
|
|
1271
|
+
let tokensUsed;
|
|
1272
|
+
let model = this.options.model;
|
|
1273
|
+
let doneEmitted = false;
|
|
1274
|
+
const processEvents = (events) => {
|
|
1275
|
+
const chunks = [];
|
|
1276
|
+
for (const event of events) {
|
|
1277
|
+
const eventData = event.data;
|
|
1278
|
+
if (eventData === "[DONE]") {
|
|
1279
|
+
if (!doneEmitted) {
|
|
1280
|
+
chunkIndex += 1;
|
|
1281
|
+
doneEmitted = true;
|
|
1282
|
+
chunks.push({
|
|
1283
|
+
delta: "",
|
|
1284
|
+
text: aggregatedText,
|
|
1285
|
+
done: true,
|
|
1286
|
+
index: chunkIndex,
|
|
1287
|
+
tokensUsed,
|
|
1288
|
+
model,
|
|
1289
|
+
raw: {
|
|
1290
|
+
mode: "stream",
|
|
1291
|
+
done: true
|
|
1292
|
+
}
|
|
1293
|
+
});
|
|
1294
|
+
}
|
|
1295
|
+
continue;
|
|
1296
|
+
}
|
|
1297
|
+
let payload;
|
|
1298
|
+
try {
|
|
1299
|
+
payload = JSON.parse(eventData);
|
|
1300
|
+
} catch (error) {
|
|
1301
|
+
throw new Error(
|
|
1302
|
+
`OpenAI stream chunk parse failed: ${error instanceof Error ? error.message : String(error)}`
|
|
1303
|
+
);
|
|
1304
|
+
}
|
|
1305
|
+
if (payload.error?.message) {
|
|
1306
|
+
throw new Error(`OpenAI error: ${payload.error.message}`);
|
|
1307
|
+
}
|
|
1308
|
+
if (typeof payload.model === "string" && payload.model.trim().length > 0) {
|
|
1309
|
+
model = payload.model;
|
|
1310
|
+
}
|
|
1311
|
+
if (typeof payload.usage?.total_tokens === "number" && Number.isFinite(payload.usage.total_tokens)) {
|
|
1312
|
+
tokensUsed = payload.usage.total_tokens;
|
|
1313
|
+
}
|
|
1314
|
+
const output = this.extractStreamDelta(payload);
|
|
1315
|
+
if (output.refusal) {
|
|
1316
|
+
throw new Error(`OpenAI refused request: ${output.refusal}`);
|
|
1317
|
+
}
|
|
1318
|
+
if (output.text.length === 0) {
|
|
1319
|
+
continue;
|
|
1320
|
+
}
|
|
1321
|
+
aggregatedText += output.text;
|
|
1322
|
+
chunkIndex += 1;
|
|
1323
|
+
chunks.push({
|
|
1324
|
+
delta: output.text,
|
|
1325
|
+
text: aggregatedText,
|
|
1326
|
+
done: false,
|
|
1327
|
+
index: chunkIndex,
|
|
1328
|
+
tokensUsed,
|
|
1329
|
+
model,
|
|
1330
|
+
raw: {
|
|
1331
|
+
mode: "stream",
|
|
1332
|
+
chunk: payload
|
|
1333
|
+
}
|
|
1334
|
+
});
|
|
1335
|
+
}
|
|
1336
|
+
return chunks;
|
|
1337
|
+
};
|
|
1338
|
+
try {
|
|
1339
|
+
const response = await fetchImpl(
|
|
1340
|
+
`${this.options.baseUrl.replace(/\/$/, "")}/chat/completions`,
|
|
1341
|
+
{
|
|
1342
|
+
method: "POST",
|
|
1343
|
+
headers: this.createHeaders(apiKey),
|
|
1344
|
+
body: JSON.stringify({
|
|
1345
|
+
model: this.options.model,
|
|
1346
|
+
messages: this.buildMessages(req),
|
|
1347
|
+
stream: true,
|
|
1348
|
+
stream_options: {
|
|
1349
|
+
include_usage: true
|
|
1350
|
+
}
|
|
1351
|
+
}),
|
|
1352
|
+
signal: abortScope.signal
|
|
1353
|
+
}
|
|
1354
|
+
);
|
|
1355
|
+
if (!response.ok) {
|
|
1356
|
+
const details = await readErrorResponse(response);
|
|
1357
|
+
throw new Error(
|
|
1358
|
+
`OpenAI request failed (${response.status}): ${details}`
|
|
1359
|
+
);
|
|
1360
|
+
}
|
|
1361
|
+
if (!response.body) {
|
|
1362
|
+
throw new Error("OpenAI streaming response body is empty");
|
|
1363
|
+
}
|
|
1364
|
+
const decoder = new TextDecoder();
|
|
1365
|
+
const reader = response.body.getReader();
|
|
1366
|
+
let buffer = "";
|
|
1367
|
+
while (true) {
|
|
1368
|
+
const { done, value } = await reader.read();
|
|
1369
|
+
if (done) {
|
|
1370
|
+
break;
|
|
1371
|
+
}
|
|
1372
|
+
if (value) {
|
|
1373
|
+
buffer += decoder.decode(value, { stream: true });
|
|
1374
|
+
}
|
|
1375
|
+
const parsedEvents = consumeSseEvents(buffer);
|
|
1376
|
+
buffer = parsedEvents.remaining;
|
|
1377
|
+
for (const chunk of processEvents(parsedEvents.events)) {
|
|
1378
|
+
yield chunk;
|
|
1379
|
+
}
|
|
1380
|
+
}
|
|
1381
|
+
buffer += decoder.decode();
|
|
1382
|
+
const finalEvents = consumeSseEvents(buffer, true);
|
|
1383
|
+
for (const chunk of processEvents(finalEvents.events)) {
|
|
1384
|
+
yield chunk;
|
|
1385
|
+
}
|
|
1386
|
+
if (!doneEmitted) {
|
|
1387
|
+
chunkIndex += 1;
|
|
1388
|
+
doneEmitted = true;
|
|
1389
|
+
yield {
|
|
1390
|
+
delta: "",
|
|
1391
|
+
text: aggregatedText,
|
|
1392
|
+
done: true,
|
|
1393
|
+
index: chunkIndex,
|
|
1394
|
+
tokensUsed,
|
|
1395
|
+
model,
|
|
1396
|
+
raw: {
|
|
1397
|
+
mode: "stream",
|
|
1398
|
+
done: true,
|
|
1399
|
+
reason: "eof"
|
|
1400
|
+
}
|
|
1401
|
+
};
|
|
1402
|
+
}
|
|
1403
|
+
} catch (error) {
|
|
1404
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
1405
|
+
if (req.signal?.aborted) {
|
|
1406
|
+
throw new Error("OpenAI request aborted by caller");
|
|
1407
|
+
}
|
|
1408
|
+
throw new Error(
|
|
1409
|
+
`OpenAI request timed out after ${this.options.timeoutMs}ms`
|
|
1410
|
+
);
|
|
1411
|
+
}
|
|
1412
|
+
throw error;
|
|
1413
|
+
} finally {
|
|
1414
|
+
abortScope.release();
|
|
1415
|
+
}
|
|
1416
|
+
}
|
|
1417
|
+
async generateStructuredResponse(req) {
|
|
1418
|
+
if (req.format !== "runtime-plan") {
|
|
1419
|
+
return {
|
|
1420
|
+
text: "",
|
|
1421
|
+
valid: false,
|
|
1422
|
+
errors: [`Unsupported structured format: ${String(req.format)}`],
|
|
1423
|
+
model: this.options.model
|
|
1424
|
+
};
|
|
1425
|
+
}
|
|
1426
|
+
const payload = await this.requestChatCompletions(
|
|
1427
|
+
{
|
|
1428
|
+
model: this.options.model,
|
|
1429
|
+
messages: this.buildMessages({
|
|
1430
|
+
...req,
|
|
1431
|
+
systemPrompt: this.resolveStructuredSystemPrompt(req)
|
|
1432
|
+
}),
|
|
1433
|
+
response_format: {
|
|
1434
|
+
type: "json_schema",
|
|
1435
|
+
json_schema: {
|
|
1436
|
+
name: "runtime_plan",
|
|
1437
|
+
strict: req.strict !== false,
|
|
1438
|
+
schema: RUNTIME_PLAN_JSON_SCHEMA
|
|
1439
|
+
}
|
|
1440
|
+
}
|
|
1441
|
+
},
|
|
1442
|
+
req.signal
|
|
1443
|
+
);
|
|
1444
|
+
const output = this.extractOutput(payload);
|
|
1445
|
+
if (output.refusal) {
|
|
1446
|
+
return {
|
|
1447
|
+
text: "",
|
|
1448
|
+
valid: false,
|
|
1449
|
+
errors: [`OpenAI refusal: ${output.refusal}`],
|
|
1450
|
+
tokensUsed: payload.usage?.total_tokens,
|
|
1451
|
+
model: payload.model ?? this.options.model,
|
|
1452
|
+
raw: {
|
|
1453
|
+
mode: "structured",
|
|
1454
|
+
responseId: payload.id,
|
|
1455
|
+
refusal: output.refusal
|
|
1456
|
+
}
|
|
1457
|
+
};
|
|
1458
|
+
}
|
|
1459
|
+
if (output.text.trim().length === 0) {
|
|
1460
|
+
return {
|
|
1461
|
+
text: "",
|
|
1462
|
+
valid: false,
|
|
1463
|
+
errors: ["Structured response content is empty"],
|
|
1464
|
+
tokensUsed: payload.usage?.total_tokens,
|
|
1465
|
+
model: payload.model ?? this.options.model,
|
|
1466
|
+
raw: {
|
|
1467
|
+
mode: "structured",
|
|
1468
|
+
responseId: payload.id
|
|
1469
|
+
}
|
|
1470
|
+
};
|
|
1471
|
+
}
|
|
1472
|
+
const parsed = tryParseJson(output.text);
|
|
1473
|
+
if (!parsed.ok) {
|
|
1474
|
+
return {
|
|
1475
|
+
text: output.text,
|
|
1476
|
+
valid: false,
|
|
1477
|
+
errors: [`Structured JSON parse failed: ${parsed.error}`],
|
|
1478
|
+
tokensUsed: payload.usage?.total_tokens,
|
|
1479
|
+
model: payload.model ?? this.options.model,
|
|
1480
|
+
raw: {
|
|
1481
|
+
mode: "structured",
|
|
1482
|
+
responseId: payload.id
|
|
1483
|
+
}
|
|
1484
|
+
};
|
|
1485
|
+
}
|
|
1486
|
+
if (!isRuntimePlan3(parsed.value)) {
|
|
1487
|
+
return {
|
|
1488
|
+
text: output.text,
|
|
1489
|
+
value: parsed.value,
|
|
1490
|
+
valid: false,
|
|
1491
|
+
errors: ["Structured payload is not a valid RuntimePlan"],
|
|
1492
|
+
tokensUsed: payload.usage?.total_tokens,
|
|
1493
|
+
model: payload.model ?? this.options.model,
|
|
1494
|
+
raw: {
|
|
1495
|
+
mode: "structured",
|
|
1496
|
+
responseId: payload.id
|
|
1497
|
+
}
|
|
1498
|
+
};
|
|
1499
|
+
}
|
|
1500
|
+
return {
|
|
1501
|
+
text: output.text,
|
|
1502
|
+
value: parsed.value,
|
|
1503
|
+
valid: true,
|
|
1504
|
+
tokensUsed: payload.usage?.total_tokens,
|
|
1505
|
+
model: payload.model ?? this.options.model,
|
|
1506
|
+
raw: {
|
|
1507
|
+
mode: "structured",
|
|
1508
|
+
responseId: payload.id
|
|
1509
|
+
}
|
|
1510
|
+
};
|
|
1511
|
+
}
|
|
1512
|
+
setPromptTemplate(templateName, templateContent) {
|
|
1513
|
+
this.templates.set(templateName, templateContent);
|
|
1514
|
+
}
|
|
1515
|
+
getPromptTemplate(templateName) {
|
|
1516
|
+
return this.templates.get(templateName);
|
|
1517
|
+
}
|
|
1518
|
+
async requestChatCompletions(body, signal) {
|
|
1519
|
+
const fetchImpl = resolveFetch(
|
|
1520
|
+
this.fetchImpl,
|
|
1521
|
+
"Global fetch is unavailable. Provide fetchImpl in OpenAILLMInterpreter options."
|
|
1522
|
+
);
|
|
1523
|
+
const apiKey = this.options.apiKey;
|
|
1524
|
+
if (!apiKey || apiKey.trim().length === 0) {
|
|
1525
|
+
throw new Error(
|
|
1526
|
+
"OpenAI apiKey is missing. Set RENDERIFY_LLM_API_KEY or configure apiKey."
|
|
1527
|
+
);
|
|
1528
|
+
}
|
|
1529
|
+
try {
|
|
1530
|
+
return await withTimeoutAbortScope(
|
|
1531
|
+
this.options.timeoutMs,
|
|
1532
|
+
signal,
|
|
1533
|
+
async (timeoutSignal) => {
|
|
1534
|
+
const response = await fetchImpl(
|
|
1535
|
+
`${this.options.baseUrl.replace(/\/$/, "")}/chat/completions`,
|
|
1536
|
+
{
|
|
1537
|
+
method: "POST",
|
|
1538
|
+
headers: this.createHeaders(apiKey),
|
|
1539
|
+
body: JSON.stringify(body),
|
|
1540
|
+
signal: timeoutSignal
|
|
1541
|
+
}
|
|
1542
|
+
);
|
|
1543
|
+
if (!response.ok) {
|
|
1544
|
+
const details = await readErrorResponse(response);
|
|
1545
|
+
throw new Error(
|
|
1546
|
+
`OpenAI request failed (${response.status}): ${details}`
|
|
1547
|
+
);
|
|
1548
|
+
}
|
|
1549
|
+
const parsed = await response.json();
|
|
1550
|
+
if (parsed.error?.message) {
|
|
1551
|
+
throw new Error(`OpenAI error: ${parsed.error.message}`);
|
|
1552
|
+
}
|
|
1553
|
+
return parsed;
|
|
1554
|
+
}
|
|
1555
|
+
);
|
|
1556
|
+
} catch (error) {
|
|
1557
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
1558
|
+
if (signal?.aborted) {
|
|
1559
|
+
throw new Error("OpenAI request aborted by caller");
|
|
1560
|
+
}
|
|
1561
|
+
throw new Error(
|
|
1562
|
+
`OpenAI request timed out after ${this.options.timeoutMs}ms`
|
|
1563
|
+
);
|
|
1564
|
+
}
|
|
1565
|
+
throw error;
|
|
1566
|
+
}
|
|
1567
|
+
}
|
|
1568
|
+
buildMessages(req) {
|
|
1569
|
+
const messages = [];
|
|
1570
|
+
const templateSystem = this.templates.get("default");
|
|
1571
|
+
const promptSystem = req.systemPrompt;
|
|
1572
|
+
const configuredSystem = this.options.systemPrompt;
|
|
1573
|
+
for (const system of [configuredSystem, templateSystem, promptSystem]) {
|
|
1574
|
+
if (typeof system === "string" && system.trim().length > 0) {
|
|
1575
|
+
messages.push({
|
|
1576
|
+
role: "system",
|
|
1577
|
+
content: system.trim()
|
|
1578
|
+
});
|
|
1579
|
+
}
|
|
1580
|
+
}
|
|
1581
|
+
const contextSnippet = formatContext(req.context);
|
|
1582
|
+
const prompt = contextSnippet ? `${req.prompt}
|
|
1583
|
+
|
|
1584
|
+
Context:
|
|
1585
|
+
${contextSnippet}` : req.prompt;
|
|
1586
|
+
messages.push({
|
|
1587
|
+
role: "user",
|
|
1588
|
+
content: prompt
|
|
1589
|
+
});
|
|
1590
|
+
return messages;
|
|
1591
|
+
}
|
|
1592
|
+
resolveStructuredSystemPrompt(req) {
|
|
1593
|
+
const template = this.templates.get("runtime-plan");
|
|
1594
|
+
if (template && template.trim().length > 0) {
|
|
1595
|
+
return template;
|
|
1596
|
+
}
|
|
1597
|
+
const strictHint = req.strict === false ? "false" : "true";
|
|
1598
|
+
return [
|
|
1599
|
+
"You generate RuntimePlan JSON for Renderify.",
|
|
1600
|
+
"Return only JSON with no markdown or explanations.",
|
|
1601
|
+
"Schema priority: id/version/root/capabilities must be valid.",
|
|
1602
|
+
`Strict mode: ${strictHint}.`
|
|
1603
|
+
].join(" ");
|
|
1604
|
+
}
|
|
1605
|
+
createHeaders(apiKey) {
|
|
1606
|
+
const headers = {
|
|
1607
|
+
"content-type": "application/json",
|
|
1608
|
+
authorization: `Bearer ${apiKey}`
|
|
1609
|
+
};
|
|
1610
|
+
if (this.options.organization) {
|
|
1611
|
+
headers["OpenAI-Organization"] = this.options.organization;
|
|
1612
|
+
}
|
|
1613
|
+
if (this.options.project) {
|
|
1614
|
+
headers["OpenAI-Project"] = this.options.project;
|
|
1615
|
+
}
|
|
1616
|
+
return headers;
|
|
1617
|
+
}
|
|
1618
|
+
extractOutput(payload) {
|
|
1619
|
+
const choice = payload.choices?.[0];
|
|
1620
|
+
if (!choice || !choice.message) {
|
|
1621
|
+
throw new Error("OpenAI response missing assistant choice");
|
|
1622
|
+
}
|
|
1623
|
+
const refusal = choice.message.refusal;
|
|
1624
|
+
if (typeof refusal === "string" && refusal.trim().length > 0) {
|
|
1625
|
+
return {
|
|
1626
|
+
text: "",
|
|
1627
|
+
refusal: refusal.trim()
|
|
1628
|
+
};
|
|
1629
|
+
}
|
|
1630
|
+
const content = choice.message.content;
|
|
1631
|
+
if (typeof content === "string") {
|
|
1632
|
+
return {
|
|
1633
|
+
text: content.trim()
|
|
1634
|
+
};
|
|
1635
|
+
}
|
|
1636
|
+
if (Array.isArray(content)) {
|
|
1637
|
+
const combined = content.map((part) => typeof part.text === "string" ? part.text : "").join("").trim();
|
|
1638
|
+
return {
|
|
1639
|
+
text: combined
|
|
1640
|
+
};
|
|
1641
|
+
}
|
|
1642
|
+
return {
|
|
1643
|
+
text: ""
|
|
1644
|
+
};
|
|
1645
|
+
}
|
|
1646
|
+
extractStreamDelta(payload) {
|
|
1647
|
+
const choices = payload.choices ?? [];
|
|
1648
|
+
let text = "";
|
|
1649
|
+
for (const choice of choices) {
|
|
1650
|
+
const refusal = choice.delta?.refusal;
|
|
1651
|
+
if (typeof refusal === "string" && refusal.trim().length > 0) {
|
|
1652
|
+
return {
|
|
1653
|
+
text: "",
|
|
1654
|
+
refusal: refusal.trim()
|
|
1655
|
+
};
|
|
1656
|
+
}
|
|
1657
|
+
const content = choice.delta?.content;
|
|
1658
|
+
if (typeof content === "string") {
|
|
1659
|
+
text += content;
|
|
1660
|
+
continue;
|
|
1661
|
+
}
|
|
1662
|
+
if (Array.isArray(content)) {
|
|
1663
|
+
text += content.map((part) => typeof part.text === "string" ? part.text : "").join("");
|
|
1664
|
+
}
|
|
1665
|
+
}
|
|
1666
|
+
return {
|
|
1667
|
+
text
|
|
1668
|
+
};
|
|
1669
|
+
}
|
|
1670
|
+
};
|
|
1671
|
+
|
|
1672
|
+
// src/registry.ts
|
|
1673
|
+
var openaiLLMProvider = {
|
|
1674
|
+
name: "openai",
|
|
1675
|
+
create: (options) => new OpenAILLMInterpreter(options)
|
|
1676
|
+
};
|
|
1677
|
+
var anthropicLLMProvider = {
|
|
1678
|
+
name: "anthropic",
|
|
1679
|
+
create: (options) => new AnthropicLLMInterpreter(options)
|
|
1680
|
+
};
|
|
1681
|
+
var googleLLMProvider = {
|
|
1682
|
+
name: "google",
|
|
1683
|
+
create: (options) => new GoogleLLMInterpreter(options)
|
|
1684
|
+
};
|
|
1685
|
+
var LLMProviderRegistry = class {
|
|
1686
|
+
providers = /* @__PURE__ */ new Map();
|
|
1687
|
+
register(definition) {
|
|
1688
|
+
const key = normalizeProviderName(definition.name);
|
|
1689
|
+
this.providers.set(key, definition);
|
|
1690
|
+
return this;
|
|
1691
|
+
}
|
|
1692
|
+
unregister(providerName) {
|
|
1693
|
+
return this.providers.delete(normalizeProviderName(providerName));
|
|
1694
|
+
}
|
|
1695
|
+
has(providerName) {
|
|
1696
|
+
return this.providers.has(normalizeProviderName(providerName));
|
|
1697
|
+
}
|
|
1698
|
+
list() {
|
|
1699
|
+
return [...this.providers.keys()].sort((a, b) => a.localeCompare(b));
|
|
1700
|
+
}
|
|
1701
|
+
resolve(providerName) {
|
|
1702
|
+
return this.providers.get(normalizeProviderName(providerName));
|
|
1703
|
+
}
|
|
1704
|
+
create(providerName, options) {
|
|
1705
|
+
const provider = this.resolve(providerName);
|
|
1706
|
+
if (!provider) {
|
|
1707
|
+
const available = this.list();
|
|
1708
|
+
const hint = available.length > 0 ? ` Available providers: ${available.join(", ")}.` : " No providers registered.";
|
|
1709
|
+
throw new Error(`Unknown LLM provider: ${providerName}.${hint}`);
|
|
1710
|
+
}
|
|
1711
|
+
return provider.create(options);
|
|
1712
|
+
}
|
|
1713
|
+
};
|
|
1714
|
+
function createDefaultLLMProviderRegistry() {
|
|
1715
|
+
const registry = new LLMProviderRegistry();
|
|
1716
|
+
registry.register(anthropicLLMProvider);
|
|
1717
|
+
registry.register(googleLLMProvider);
|
|
1718
|
+
registry.register(openaiLLMProvider);
|
|
1719
|
+
return registry;
|
|
1720
|
+
}
|
|
1721
|
+
var defaultLLMProviderRegistry = createDefaultLLMProviderRegistry();
|
|
1722
|
+
function createLLMInterpreter(options) {
|
|
1723
|
+
const provider = normalizeProviderName(options.provider ?? "openai");
|
|
1724
|
+
const registry = options.registry ?? defaultLLMProviderRegistry;
|
|
1725
|
+
return registry.create(provider, options.providerOptions);
|
|
1726
|
+
}
|
|
1727
|
+
function normalizeProviderName(providerName) {
|
|
1728
|
+
const normalized = String(providerName).trim().toLowerCase();
|
|
1729
|
+
if (normalized.length === 0) {
|
|
1730
|
+
return "openai";
|
|
1731
|
+
}
|
|
1732
|
+
return normalized;
|
|
1733
|
+
}
|
|
1734
|
+
export {
|
|
1735
|
+
AnthropicLLMInterpreter,
|
|
1736
|
+
GoogleLLMInterpreter,
|
|
1737
|
+
LLMProviderRegistry,
|
|
1738
|
+
OpenAILLMInterpreter,
|
|
1739
|
+
anthropicLLMProvider,
|
|
1740
|
+
createDefaultLLMProviderRegistry,
|
|
1741
|
+
createLLMInterpreter,
|
|
1742
|
+
defaultLLMProviderRegistry,
|
|
1743
|
+
googleLLMProvider,
|
|
1744
|
+
openaiLLMProvider
|
|
1745
|
+
};
|
|
1746
|
+
//# sourceMappingURL=llm.esm.js.map
|