utilitas 1998.1.16 → 1998.1.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/utilitas.lite.mjs +1 -1
- package/dist/utilitas.lite.mjs.map +1 -1
- package/lib/alan.mjs +4 -13
- package/lib/manifest.mjs +1 -1
- package/package.json +1 -1
package/lib/alan.mjs
CHANGED
|
@@ -322,16 +322,7 @@ const init = async (options) => {
|
|
|
322
322
|
if (options?.apiKey) {
|
|
323
323
|
const OpenAI = await need('openai');
|
|
324
324
|
const openai = new OpenAI(options);
|
|
325
|
-
|
|
326
|
-
? new OpenAI({
|
|
327
|
-
...options,
|
|
328
|
-
baseURL: options?.chatGptEndpoint || options?.baseURL,
|
|
329
|
-
apiKey: options?.chatGptApiKey || options?.apiKey,
|
|
330
|
-
}) : openai;
|
|
331
|
-
clients[provider] = {
|
|
332
|
-
client: openai, clientBeta: openai.beta,
|
|
333
|
-
chatGptClient: chatGpt,
|
|
334
|
-
};
|
|
325
|
+
clients[provider] = { client: openai, clientBeta: openai.beta };
|
|
335
326
|
}
|
|
336
327
|
break;
|
|
337
328
|
case GEMINI:
|
|
@@ -582,11 +573,11 @@ const packGptResp = async (resp, options) => {
|
|
|
582
573
|
};
|
|
583
574
|
|
|
584
575
|
const promptChatGPT = async (content, options = {}) => {
|
|
585
|
-
const {
|
|
576
|
+
const { client } = await getOpenAIClient(options);
|
|
586
577
|
// https://github.com/openai/openai-node?tab=readme-ov-file#streaming-responses
|
|
587
578
|
// custom api endpoint not supported vision apis @todo by @Leask
|
|
588
579
|
// Structured Outputs: https://openai.com/index/introducing-structured-outputs-in-the-api/
|
|
589
|
-
|
|
580
|
+
client.baseURL !== OPENAI_BASE_URL
|
|
590
581
|
&& options?.attachments?.length && (options.attachments = []);
|
|
591
582
|
if (options?.model) { } else if (options?.reasoning) {
|
|
592
583
|
options.model = DEFAULT_MODELS[CHATGPT_REASONING];
|
|
@@ -609,7 +600,7 @@ const promptChatGPT = async (content, options = {}) => {
|
|
|
609
600
|
[format, options.audioMimeType, options.suffix]
|
|
610
601
|
= options?.stream ? ['pcm16', pcm16, 'pcm.wav'] : [WAV, wav, WAV];
|
|
611
602
|
let [resp, resultText, resultAudio, chunk] = [
|
|
612
|
-
await
|
|
603
|
+
await client.chat.completions.create({
|
|
613
604
|
modalities, audio: options?.audio || (
|
|
614
605
|
modalities?.find?.(x => x === AUDIO) && {
|
|
615
606
|
voice: DEFAULT_MODELS[OPENAI_VOICE], format
|
package/lib/manifest.mjs
CHANGED