@saltcorn/large-language-model 0.9.1 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/generate.js +58 -62
  2. package/package.json +1 -1
package/generate.js CHANGED
@@ -318,82 +318,78 @@ const getCompletionOpenAICompatible = async (
318
318
  if (rest.streamCallback) {
319
319
  // https://stackoverflow.com/a/75751803/19839414
320
320
  // https://stackoverflow.com/a/57664622/19839414
321
- await new Promise((resolve, reject) => {
322
- let dataDone = false;
323
- let stashed = "";
324
321
 
325
- const process_stream_data = (value) => {
326
- const arr = value.split("\n");
327
- arr.forEach((data) => {
328
- if (data.length === 0) return; // ignore empty message
329
- if (data.startsWith(":")) return; // ignore sse comment message
330
- if (data === "data: [DONE]") {
331
- dataDone = true;
332
- resolve();
333
- return;
334
- }
335
- try {
336
- const json = JSON.parse(stashed + data.substring(6));
337
- stashed = "";
338
- //console.log(json.choices[0]);
339
-
340
- // callback
322
+ let dataDone = false;
323
+ let stashed = "";
341
324
 
342
- //answer store
343
- if (json.choices?.[0]?.content)
344
- streamParts.push(json.choices[0].content);
345
- if (json.choices?.[0]?.delta?.content)
346
- streamParts.push(json.choices[0].delta.content);
347
- if (json.choices?.[0]?.delta?.tool_calls) {
348
- if (!streamToolCalls) streamToolCalls = json.choices?.[0]?.delta;
349
- else
350
- json.choices?.[0]?.delta?.tool_calls.forEach((tc, ix) => {
351
- streamToolCalls.tool_calls[ix].function.arguments +=
352
- tc.function.arguments;
353
- });
354
- }
355
- rest.streamCallback(json);
356
- } catch (e) {
357
- //console.error(e);
358
- stashed = data.substring(6);
359
- }
360
- });
361
- };
362
- if (global.fetch) {
363
- const reader = rawResponse.body
364
- ?.pipeThrough(new TextDecoderStream())
365
- .getReader();
366
- if (!reader) return;
367
- // eslint-disable-next-line no-constant-condition
368
- (async () => {
369
- while (!dataDone) {
370
- // eslint-disable-next-line no-await-in-loop
325
+ const process_stream_data = (value, resolve) => {
326
+ const arr = value.split("\n");
327
+ arr.forEach((data) => {
328
+ if (data.length === 0) return; // ignore empty message
329
+ if (data.startsWith(":")) return; // ignore sse comment message
330
+ if (data === "data: [DONE]") {
331
+ dataDone = true;
332
+ if (resolve) resolve();
333
+ return;
334
+ }
335
+ try {
336
+ const json = JSON.parse(stashed + data.substring(6));
337
+ stashed = "";
338
+ //console.log(json.choices[0]);
371
339
 
372
- const { value, done } = await reader.read();
340
+ // callback
373
341
 
374
- if (done) {
375
- dataDone = true;
376
- resolve();
377
- break;
378
- }
379
- process_stream_data(value);
380
- if (dataDone) break;
342
+ //answer store
343
+ if (json.choices?.[0]?.content)
344
+ streamParts.push(json.choices[0].content);
345
+ if (json.choices?.[0]?.delta?.content)
346
+ streamParts.push(json.choices[0].delta.content);
347
+ if (json.choices?.[0]?.delta?.tool_calls) {
348
+ if (!streamToolCalls) streamToolCalls = json.choices?.[0]?.delta;
349
+ else
350
+ json.choices?.[0]?.delta?.tool_calls.forEach((tc, ix) => {
351
+ streamToolCalls.tool_calls[ix].function.arguments +=
352
+ tc.function.arguments;
353
+ });
381
354
  }
382
- })().catch((e) => {
355
+ rest.streamCallback(json);
356
+ } catch (e) {
383
357
  //console.error(e);
358
+ stashed = data.substring(6);
359
+ }
360
+ });
361
+ };
362
+ if (global.fetch) {
363
+ const reader = rawResponse.body
364
+ ?.pipeThrough(new TextDecoderStream())
365
+ .getReader();
366
+ if (!reader) return;
367
+ // eslint-disable-next-line no-constant-condition
368
+
369
+ while (!dataDone) {
370
+ // eslint-disable-next-line no-await-in-loop
371
+
372
+ const { value, done } = await reader.read();
373
+
374
+ if (done) {
384
375
  dataDone = true;
385
- reject(e);
386
- });
387
- } else
376
+ break;
377
+ }
378
+ process_stream_data(value);
379
+ if (dataDone) break;
380
+ }
381
+ } else {
382
+ await new Promise((resolve, reject) => {
388
383
  rawResponse.body.on("readable", () => {
389
384
  let chunk;
390
385
  while (null !== (chunk = rawResponse.body.read())) {
391
386
  let value = chunk.toString();
392
- process_stream_data(value);
387
+ process_stream_data(value, resolve);
393
388
  if (dataDone) break;
394
389
  }
395
390
  });
396
- });
391
+ });
392
+ }
397
393
  if (debugCollector) {
398
394
  //TODO get the full response
399
395
  if (streamToolCalls) debugCollector.response = streamToolCalls;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@saltcorn/large-language-model",
3
- "version": "0.9.1",
3
+ "version": "0.9.2",
4
4
  "description": "Large language models and functionality for Saltcorn",
5
5
  "main": "index.js",
6
6
  "dependencies": {