@saltcorn/large-language-model 0.9.2 → 0.9.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/generate.js +20 -28
  2. package/package.json +1 -1
package/generate.js CHANGED
@@ -207,7 +207,7 @@ const getCompletionOpenAICompatible = async (
207
207
  )
208
208
  body.temperature = 0.7;
209
209
  }
210
- if (rest.streamCallback) {
210
+ if (rest.streamCallback && global.fetch) {
211
211
  body.stream = true;
212
212
  delete body.streamCallback;
213
213
  }
@@ -315,7 +315,7 @@ const getCompletionOpenAICompatible = async (
315
315
  let streamParts = [];
316
316
  let streamToolCalls = null;
317
317
 
318
- if (rest.streamCallback) {
318
+ if (rest.streamCallback && body.stream) {
319
319
  // https://stackoverflow.com/a/75751803/19839414
320
320
  // https://stackoverflow.com/a/57664622/19839414
321
321
 
@@ -359,37 +359,29 @@ const getCompletionOpenAICompatible = async (
359
359
  }
360
360
  });
361
361
  };
362
- if (global.fetch) {
363
- const reader = rawResponse.body
364
- ?.pipeThrough(new TextDecoderStream())
365
- .getReader();
366
- if (!reader) return;
367
- // eslint-disable-next-line no-constant-condition
368
362
 
369
- while (!dataDone) {
370
- // eslint-disable-next-line no-await-in-loop
363
+ const reader = rawResponse.body
364
+ ?.pipeThrough(new TextDecoderStream())
365
+ .getReader();
366
+ if (!reader) return;
367
+ // eslint-disable-next-line no-constant-condition
371
368
 
372
- const { value, done } = await reader.read();
369
+ while (!dataDone) {
370
+ // eslint-disable-next-line no-await-in-loop
373
371
 
374
- if (done) {
375
- dataDone = true;
376
- break;
377
- }
378
- process_stream_data(value);
379
- if (dataDone) break;
372
+ const { value, done } = await reader.read();
373
+
374
+ if (done) {
375
+ dataDone = true;
376
+ break;
380
377
  }
381
- } else {
382
- await new Promise((resolve, reject) => {
383
- rawResponse.body.on("readable", () => {
384
- let chunk;
385
- while (null !== (chunk = rawResponse.body.read())) {
386
- let value = chunk.toString();
387
- process_stream_data(value, resolve);
388
- if (dataDone) break;
389
- }
390
- });
391
- });
378
+ if (typeof value === "string" && value.startsWith('{\n "error": {')) {
379
+ throw new Error(value);
380
+ }
381
+ process_stream_data(value);
382
+ if (dataDone) break;
392
383
  }
384
+
393
385
  if (debugCollector) {
394
386
  //TODO get the full response
395
387
  if (streamToolCalls) debugCollector.response = streamToolCalls;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@saltcorn/large-language-model",
3
- "version": "0.9.2",
3
+ "version": "0.9.4",
4
4
  "description": "Large language models and functionality for Saltcorn",
5
5
  "main": "index.js",
6
6
  "dependencies": {