modelfusion 0.33.1 → 0.35.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/README.md +47 -1
  2. package/{model-function/generate-text → event-source}/AsyncQueue.cjs +11 -10
  3. package/event-source/AsyncQueue.d.ts +8 -0
  4. package/{model-function/generate-text → event-source}/AsyncQueue.js +11 -10
  5. package/event-source/EventSourceParserStream.cjs +34 -0
  6. package/event-source/EventSourceParserStream.d.ts +15 -0
  7. package/event-source/EventSourceParserStream.js +30 -0
  8. package/event-source/convertReadableStreamToAsyncIterable.cjs +19 -0
  9. package/event-source/convertReadableStreamToAsyncIterable.d.ts +1 -0
  10. package/event-source/convertReadableStreamToAsyncIterable.js +15 -0
  11. package/event-source/createEventSourceStream.cjs +15 -0
  12. package/event-source/createEventSourceStream.d.ts +1 -0
  13. package/event-source/createEventSourceStream.js +11 -0
  14. package/event-source/index.cjs +19 -0
  15. package/event-source/index.d.ts +3 -0
  16. package/event-source/index.js +3 -0
  17. package/event-source/parseEventSourceStream.cjs +12 -0
  18. package/event-source/parseEventSourceStream.d.ts +4 -0
  19. package/event-source/parseEventSourceStream.js +8 -0
  20. package/event-source/readEventSourceStream.cjs +33 -0
  21. package/event-source/readEventSourceStream.d.ts +6 -0
  22. package/event-source/readEventSourceStream.js +26 -0
  23. package/index.cjs +1 -0
  24. package/index.d.ts +1 -0
  25. package/index.js +1 -0
  26. package/model-function/AsyncIterableResultPromise.cjs +37 -0
  27. package/model-function/AsyncIterableResultPromise.d.ts +16 -0
  28. package/model-function/AsyncIterableResultPromise.js +33 -0
  29. package/model-function/{generate-text/DeltaEvent.d.ts → DeltaEvent.d.ts} +1 -1
  30. package/model-function/ModelCallEvent.d.ts +3 -2
  31. package/model-function/generate-structure/StructureFromTextGenerationModel.d.ts +1 -1
  32. package/model-function/generate-structure/StructureGenerationModel.d.ts +10 -1
  33. package/model-function/generate-structure/StructureStreamingEvent.cjs +2 -0
  34. package/model-function/generate-structure/StructureStreamingEvent.d.ts +7 -0
  35. package/model-function/generate-structure/StructureStreamingEvent.js +1 -0
  36. package/model-function/generate-structure/fixJson.cjs +215 -0
  37. package/model-function/generate-structure/fixJson.d.ts +1 -0
  38. package/model-function/generate-structure/fixJson.js +211 -0
  39. package/model-function/generate-structure/fixJson.test.cjs +130 -0
  40. package/model-function/generate-structure/fixJson.test.d.ts +1 -0
  41. package/model-function/generate-structure/fixJson.test.js +128 -0
  42. package/model-function/generate-structure/generateStructure.cjs +3 -1
  43. package/model-function/generate-structure/generateStructure.d.ts +1 -1
  44. package/model-function/generate-structure/generateStructure.js +3 -1
  45. package/model-function/generate-structure/parsePartialJson.cjs +29 -0
  46. package/model-function/generate-structure/parsePartialJson.d.ts +1 -0
  47. package/model-function/generate-structure/parsePartialJson.js +22 -0
  48. package/model-function/generate-structure/streamStructure.cjs +167 -0
  49. package/model-function/generate-structure/streamStructure.d.ts +17 -0
  50. package/model-function/generate-structure/streamStructure.js +160 -0
  51. package/model-function/generate-text/TextGenerationModel.d.ts +4 -4
  52. package/model-function/generate-text/streamText.cjs +47 -68
  53. package/model-function/generate-text/streamText.d.ts +3 -18
  54. package/model-function/generate-text/streamText.js +46 -66
  55. package/model-function/index.cjs +3 -2
  56. package/model-function/index.d.ts +3 -2
  57. package/model-function/index.js +3 -2
  58. package/model-provider/cohere/CohereTextGenerationModel.cjs +3 -3
  59. package/model-provider/cohere/CohereTextGenerationModel.d.ts +3 -3
  60. package/model-provider/cohere/CohereTextGenerationModel.js +3 -3
  61. package/model-provider/huggingface/HuggingFaceTextGenerationModel.cjs +0 -12
  62. package/model-provider/huggingface/HuggingFaceTextGenerationModel.d.ts +0 -2
  63. package/model-provider/huggingface/HuggingFaceTextGenerationModel.js +0 -12
  64. package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +23 -23
  65. package/model-provider/llamacpp/LlamaCppTextGenerationModel.d.ts +3 -3
  66. package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +23 -23
  67. package/model-provider/openai/OpenAITextGenerationModel.cjs +27 -25
  68. package/model-provider/openai/OpenAITextGenerationModel.d.ts +3 -3
  69. package/model-provider/openai/OpenAITextGenerationModel.js +27 -25
  70. package/model-provider/openai/chat/OpenAIChatModel.cjs +23 -2
  71. package/model-provider/openai/chat/OpenAIChatModel.d.ts +4 -2
  72. package/model-provider/openai/chat/OpenAIChatModel.js +23 -2
  73. package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +27 -24
  74. package/model-provider/openai/chat/OpenAIChatStreamIterable.d.ts +1 -1
  75. package/model-provider/openai/chat/OpenAIChatStreamIterable.js +27 -24
  76. package/package.json +9 -5
  77. package/prompt/PromptFormatTextGenerationModel.d.ts +1 -1
  78. package/tool/useTool.cjs +3 -4
  79. package/tool/useTool.d.ts +1 -1
  80. package/tool/useTool.js +3 -4
  81. package/model-function/generate-text/AsyncQueue.d.ts +0 -17
  82. package/model-function/generate-text/TextDeltaEventSource.cjs +0 -54
  83. package/model-function/generate-text/TextDeltaEventSource.d.ts +0 -5
  84. package/model-function/generate-text/TextDeltaEventSource.js +0 -46
  85. package/model-function/generate-text/extractTextDeltas.cjs +0 -23
  86. package/model-function/generate-text/extractTextDeltas.d.ts +0 -7
  87. package/model-function/generate-text/extractTextDeltas.js +0 -19
  88. package/model-function/generate-text/parseEventSourceReadableStream.cjs +0 -30
  89. package/model-function/generate-text/parseEventSourceReadableStream.d.ts +0 -8
  90. package/model-function/generate-text/parseEventSourceReadableStream.js +0 -26
  91. /package/model-function/{generate-text/DeltaEvent.cjs → DeltaEvent.cjs} +0 -0
  92. /package/model-function/{generate-text/DeltaEvent.js → DeltaEvent.js} +0 -0
@@ -6,13 +6,13 @@ Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.OpenAITextResponseFormat = exports.OpenAITextGenerationModel = exports.calculateOpenAITextGenerationCostInMillicents = exports.isOpenAITextGenerationModel = exports.getOpenAITextGenerationModelInformation = exports.OPENAI_TEXT_GENERATION_MODELS = void 0;
7
7
  const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
8
8
  const zod_1 = __importDefault(require("zod"));
9
+ const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
10
+ const postToApi_js_1 = require("../../core/api/postToApi.cjs");
11
+ const AsyncQueue_js_1 = require("../../event-source/AsyncQueue.cjs");
12
+ const parseEventSourceStream_js_1 = require("../../event-source/parseEventSourceStream.cjs");
9
13
  const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
10
- const AsyncQueue_js_1 = require("../../model-function/generate-text/AsyncQueue.cjs");
11
- const parseEventSourceReadableStream_js_1 = require("../../model-function/generate-text/parseEventSourceReadableStream.cjs");
12
14
  const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
13
15
  const PromptFormatTextGenerationModel_js_1 = require("../../prompt/PromptFormatTextGenerationModel.cjs");
14
- const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
15
- const postToApi_js_1 = require("../../core/api/postToApi.cjs");
16
16
  const OpenAIApiConfiguration_js_1 = require("./OpenAIApiConfiguration.cjs");
17
17
  const OpenAIError_js_1 = require("./OpenAIError.cjs");
18
18
  const TikTokenTokenizer_js_1 = require("./TikTokenTokenizer.cjs");
@@ -335,18 +335,15 @@ async function createOpenAITextFullDeltaIterableQueue(stream) {
335
335
  const queue = new AsyncQueue_js_1.AsyncQueue();
336
336
  const streamDelta = [];
337
337
  // process the stream asynchonously (no 'await' on purpose):
338
- (0, parseEventSourceReadableStream_js_1.parseEventSourceReadableStream)({
339
- stream,
340
- callback: (event) => {
341
- if (event.type !== "event") {
342
- return;
343
- }
344
- const data = event.data;
345
- if (data === "[DONE]") {
346
- queue.close();
347
- return;
348
- }
349
- try {
338
+ (0, parseEventSourceStream_js_1.parseEventSourceStream)({ stream })
339
+ .then(async (events) => {
340
+ try {
341
+ for await (const event of events) {
342
+ const data = event.data;
343
+ if (data === "[DONE]") {
344
+ queue.close();
345
+ return;
346
+ }
350
347
  const json = secure_json_parse_1.default.parse(data);
351
348
  const parseResult = textResponseStreamEventSchema.safeParse(json);
352
349
  if (!parseResult.success) {
@@ -357,9 +354,9 @@ async function createOpenAITextFullDeltaIterableQueue(stream) {
357
354
  queue.close();
358
355
  return;
359
356
  }
360
- const event = parseResult.data;
361
- for (let i = 0; i < event.choices.length; i++) {
362
- const eventChoice = event.choices[i];
357
+ const eventData = parseResult.data;
358
+ for (let i = 0; i < eventData.choices.length; i++) {
359
+ const eventChoice = eventData.choices[i];
363
360
  const delta = eventChoice.text;
364
361
  if (streamDelta[i] == null) {
365
362
  streamDelta[i] = {
@@ -383,12 +380,17 @@ async function createOpenAITextFullDeltaIterableQueue(stream) {
383
380
  fullDelta: streamDeltaDeepCopy,
384
381
  });
385
382
  }
386
- catch (error) {
387
- queue.push({ type: "error", error });
388
- queue.close();
389
- return;
390
- }
391
- },
383
+ }
384
+ catch (error) {
385
+ queue.push({ type: "error", error });
386
+ queue.close();
387
+ return;
388
+ }
389
+ })
390
+ .catch((error) => {
391
+ queue.push({ type: "error", error });
392
+ queue.close();
393
+ return;
392
394
  });
393
395
  return queue;
394
396
  }
@@ -1,12 +1,12 @@
1
1
  import z from "zod";
2
+ import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
3
+ import { ResponseHandler } from "../../core/api/postToApi.js";
2
4
  import { AbstractModel } from "../../model-function/AbstractModel.js";
5
+ import { DeltaEvent } from "../../model-function/DeltaEvent.js";
3
6
  import { ModelFunctionOptions } from "../../model-function/ModelFunctionOptions.js";
4
- import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
5
- import { DeltaEvent } from "../../model-function/generate-text/DeltaEvent.js";
6
7
  import { TextGenerationModel, TextGenerationModelSettings } from "../../model-function/generate-text/TextGenerationModel.js";
7
8
  import { PromptFormat } from "../../prompt/PromptFormat.js";
8
9
  import { PromptFormatTextGenerationModel } from "../../prompt/PromptFormatTextGenerationModel.js";
9
- import { ResponseHandler } from "../../core/api/postToApi.js";
10
10
  import { OpenAIImageGenerationCallSettings } from "./OpenAIImageGenerationModel.js";
11
11
  import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
12
12
  /**
@@ -1,12 +1,12 @@
1
1
  import SecureJSON from "secure-json-parse";
2
2
  import z from "zod";
3
+ import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
4
+ import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
5
+ import { AsyncQueue } from "../../event-source/AsyncQueue.js";
6
+ import { parseEventSourceStream } from "../../event-source/parseEventSourceStream.js";
3
7
  import { AbstractModel } from "../../model-function/AbstractModel.js";
4
- import { AsyncQueue } from "../../model-function/generate-text/AsyncQueue.js";
5
- import { parseEventSourceReadableStream } from "../../model-function/generate-text/parseEventSourceReadableStream.js";
6
8
  import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
7
9
  import { PromptFormatTextGenerationModel } from "../../prompt/PromptFormatTextGenerationModel.js";
8
- import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
9
- import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
10
10
  import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
11
11
  import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
12
12
  import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
@@ -325,18 +325,15 @@ async function createOpenAITextFullDeltaIterableQueue(stream) {
325
325
  const queue = new AsyncQueue();
326
326
  const streamDelta = [];
327
327
  // process the stream asynchonously (no 'await' on purpose):
328
- parseEventSourceReadableStream({
329
- stream,
330
- callback: (event) => {
331
- if (event.type !== "event") {
332
- return;
333
- }
334
- const data = event.data;
335
- if (data === "[DONE]") {
336
- queue.close();
337
- return;
338
- }
339
- try {
328
+ parseEventSourceStream({ stream })
329
+ .then(async (events) => {
330
+ try {
331
+ for await (const event of events) {
332
+ const data = event.data;
333
+ if (data === "[DONE]") {
334
+ queue.close();
335
+ return;
336
+ }
340
337
  const json = SecureJSON.parse(data);
341
338
  const parseResult = textResponseStreamEventSchema.safeParse(json);
342
339
  if (!parseResult.success) {
@@ -347,9 +344,9 @@ async function createOpenAITextFullDeltaIterableQueue(stream) {
347
344
  queue.close();
348
345
  return;
349
346
  }
350
- const event = parseResult.data;
351
- for (let i = 0; i < event.choices.length; i++) {
352
- const eventChoice = event.choices[i];
347
+ const eventData = parseResult.data;
348
+ for (let i = 0; i < eventData.choices.length; i++) {
349
+ const eventChoice = eventData.choices[i];
353
350
  const delta = eventChoice.text;
354
351
  if (streamDelta[i] == null) {
355
352
  streamDelta[i] = {
@@ -373,12 +370,17 @@ async function createOpenAITextFullDeltaIterableQueue(stream) {
373
370
  fullDelta: streamDeltaDeepCopy,
374
371
  });
375
372
  }
376
- catch (error) {
377
- queue.push({ type: "error", error });
378
- queue.close();
379
- return;
380
- }
381
- },
373
+ }
374
+ catch (error) {
375
+ queue.push({ type: "error", error });
376
+ queue.close();
377
+ return;
378
+ }
379
+ })
380
+ .catch((error) => {
381
+ queue.push({ type: "error", error });
382
+ queue.close();
383
+ return;
382
384
  });
383
385
  return queue;
384
386
  }
@@ -9,6 +9,7 @@ const zod_1 = __importDefault(require("zod"));
9
9
  const callWithRetryAndThrottle_js_1 = require("../../../core/api/callWithRetryAndThrottle.cjs");
10
10
  const postToApi_js_1 = require("../../../core/api/postToApi.cjs");
11
11
  const AbstractModel_js_1 = require("../../../model-function/AbstractModel.cjs");
12
+ const parsePartialJson_js_1 = require("../../../model-function/generate-structure/parsePartialJson.cjs");
12
13
  const PromptFormatTextGenerationModel_js_1 = require("../../../prompt/PromptFormatTextGenerationModel.cjs");
13
14
  const OpenAIApiConfiguration_js_1 = require("../OpenAIApiConfiguration.cjs");
14
15
  const OpenAIError_js_1 = require("../OpenAIError.cjs");
@@ -264,8 +265,28 @@ class OpenAIChatModel extends AbstractModel_js_1.AbstractModel {
264
265
  });
265
266
  }
266
267
  extractStructure(response) {
267
- const jsonText = response.choices[0].message.function_call.arguments;
268
- return secure_json_parse_1.default.parse(jsonText);
268
+ return secure_json_parse_1.default.parse(response.choices[0].message.function_call.arguments);
269
+ }
270
+ generateStructureStreamResponse(structureDefinition, prompt, options) {
271
+ return this.callAPI(prompt, {
272
+ responseFormat: exports.OpenAIChatResponseFormat.deltaIterable,
273
+ functionId: options?.functionId,
274
+ settings: {
275
+ ...options,
276
+ functionCall: { name: structureDefinition.name },
277
+ functions: [
278
+ {
279
+ name: structureDefinition.name,
280
+ description: structureDefinition.description,
281
+ parameters: structureDefinition.schema.getJsonSchema(),
282
+ },
283
+ ],
284
+ },
285
+ run: options?.run,
286
+ });
287
+ }
288
+ extractPartialStructure(fullDelta) {
289
+ return (0, parsePartialJson_js_1.parsePartialJson)(fullDelta[0]?.function_call?.arguments);
269
290
  }
270
291
  generateStructureOrTextResponse(structureDefinitions, prompt, options) {
271
292
  return this.callAPI(prompt, {
@@ -3,10 +3,10 @@ import { ApiConfiguration } from "../../../core/api/ApiConfiguration.js";
3
3
  import { ResponseHandler } from "../../../core/api/postToApi.js";
4
4
  import { StructureDefinition } from "../../../core/structure/StructureDefinition.js";
5
5
  import { AbstractModel } from "../../../model-function/AbstractModel.js";
6
+ import { DeltaEvent } from "../../../model-function/DeltaEvent.js";
6
7
  import { ModelFunctionOptions } from "../../../model-function/ModelFunctionOptions.js";
7
8
  import { StructureGenerationModel } from "../../../model-function/generate-structure/StructureGenerationModel.js";
8
9
  import { StructureOrTextGenerationModel } from "../../../model-function/generate-structure/StructureOrTextGenerationModel.js";
9
- import { DeltaEvent } from "../../../model-function/generate-text/DeltaEvent.js";
10
10
  import { TextGenerationModel, TextGenerationModelSettings } from "../../../model-function/generate-text/TextGenerationModel.js";
11
11
  import { PromptFormat } from "../../../prompt/PromptFormat.js";
12
12
  import { PromptFormatTextGenerationModel } from "../../../prompt/PromptFormatTextGenerationModel.js";
@@ -132,7 +132,7 @@ export interface OpenAIChatSettings extends TextGenerationModelSettings, Omit<Op
132
132
  * ),
133
133
  * ]);
134
134
  */
135
- export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatSettings>, StructureOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatSettings> {
135
+ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> implements TextGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings>, StructureGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatDelta, OpenAIChatSettings>, StructureOrTextGenerationModel<OpenAIChatMessage[], OpenAIChatResponse, OpenAIChatSettings> {
136
136
  constructor(settings: OpenAIChatSettings);
137
137
  readonly provider: "openai";
138
138
  get modelName(): OpenAIChatModelType;
@@ -185,6 +185,8 @@ export declare class OpenAIChatModel extends AbstractModel<OpenAIChatSettings> i
185
185
  */
186
186
  generateStructureResponse(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings> | undefined): PromiseLike<OpenAIChatResponse>;
187
187
  extractStructure(response: OpenAIChatResponse): unknown;
188
+ generateStructureStreamResponse(structureDefinition: StructureDefinition<string, unknown>, prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings>): Promise<AsyncIterable<DeltaEvent<OpenAIChatDelta>>>;
189
+ extractPartialStructure(fullDelta: OpenAIChatDelta): unknown | undefined;
188
190
  generateStructureOrTextResponse(structureDefinitions: Array<StructureDefinition<string, unknown>>, prompt: OpenAIChatMessage[], options?: ModelFunctionOptions<OpenAIChatSettings> | undefined): PromiseLike<OpenAIChatResponse>;
189
191
  extractStructureAndText(response: OpenAIChatResponse): {
190
192
  structure: null;
@@ -3,6 +3,7 @@ import z from "zod";
3
3
  import { callWithRetryAndThrottle } from "../../../core/api/callWithRetryAndThrottle.js";
4
4
  import { createJsonResponseHandler, postJsonToApi, } from "../../../core/api/postToApi.js";
5
5
  import { AbstractModel } from "../../../model-function/AbstractModel.js";
6
+ import { parsePartialJson } from "../../../model-function/generate-structure/parsePartialJson.js";
6
7
  import { PromptFormatTextGenerationModel } from "../../../prompt/PromptFormatTextGenerationModel.js";
7
8
  import { OpenAIApiConfiguration } from "../OpenAIApiConfiguration.js";
8
9
  import { failedOpenAICallResponseHandler } from "../OpenAIError.js";
@@ -255,8 +256,28 @@ export class OpenAIChatModel extends AbstractModel {
255
256
  });
256
257
  }
257
258
  extractStructure(response) {
258
- const jsonText = response.choices[0].message.function_call.arguments;
259
- return SecureJSON.parse(jsonText);
259
+ return SecureJSON.parse(response.choices[0].message.function_call.arguments);
260
+ }
261
+ generateStructureStreamResponse(structureDefinition, prompt, options) {
262
+ return this.callAPI(prompt, {
263
+ responseFormat: OpenAIChatResponseFormat.deltaIterable,
264
+ functionId: options?.functionId,
265
+ settings: {
266
+ ...options,
267
+ functionCall: { name: structureDefinition.name },
268
+ functions: [
269
+ {
270
+ name: structureDefinition.name,
271
+ description: structureDefinition.description,
272
+ parameters: structureDefinition.schema.getJsonSchema(),
273
+ },
274
+ ],
275
+ },
276
+ run: options?.run,
277
+ });
278
+ }
279
+ extractPartialStructure(fullDelta) {
280
+ return parsePartialJson(fullDelta[0]?.function_call?.arguments);
260
281
  }
261
282
  generateStructureOrTextResponse(structureDefinitions, prompt, options) {
262
283
  return this.callAPI(prompt, {
@@ -6,8 +6,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.createOpenAIChatFullDeltaIterableQueue = void 0;
7
7
  const secure_json_parse_1 = __importDefault(require("secure-json-parse"));
8
8
  const zod_1 = require("zod");
9
- const AsyncQueue_js_1 = require("../../../model-function/generate-text/AsyncQueue.cjs");
10
- const parseEventSourceReadableStream_js_1 = require("../../../model-function/generate-text/parseEventSourceReadableStream.cjs");
9
+ const AsyncQueue_js_1 = require("../../../event-source/AsyncQueue.cjs");
10
+ const parseEventSourceStream_js_1 = require("../../../event-source/parseEventSourceStream.cjs");
11
11
  const chatResponseStreamEventSchema = zod_1.z.object({
12
12
  choices: zod_1.z.array(zod_1.z.object({
13
13
  delta: zod_1.z.object({
@@ -32,18 +32,15 @@ async function createOpenAIChatFullDeltaIterableQueue(stream) {
32
32
  const queue = new AsyncQueue_js_1.AsyncQueue();
33
33
  const streamDelta = [];
34
34
  // process the stream asynchonously (no 'await' on purpose):
35
- (0, parseEventSourceReadableStream_js_1.parseEventSourceReadableStream)({
36
- stream,
37
- callback: (event) => {
38
- if (event.type !== "event") {
39
- return;
40
- }
41
- const data = event.data;
42
- if (data === "[DONE]") {
43
- queue.close();
44
- return;
45
- }
46
- try {
35
+ (0, parseEventSourceStream_js_1.parseEventSourceStream)({ stream })
36
+ .then(async (events) => {
37
+ try {
38
+ for await (const event of events) {
39
+ const data = event.data;
40
+ if (data === "[DONE]") {
41
+ queue.close();
42
+ return;
43
+ }
47
44
  const json = secure_json_parse_1.default.parse(data);
48
45
  const parseResult = chatResponseStreamEventSchema.safeParse(json);
49
46
  if (!parseResult.success) {
@@ -54,9 +51,9 @@ async function createOpenAIChatFullDeltaIterableQueue(stream) {
54
51
  queue.close();
55
52
  return;
56
53
  }
57
- const event = parseResult.data;
58
- for (let i = 0; i < event.choices.length; i++) {
59
- const eventChoice = event.choices[i];
54
+ const eventData = parseResult.data;
55
+ for (let i = 0; i < eventData.choices.length; i++) {
56
+ const eventChoice = eventData.choices[i];
60
57
  const delta = eventChoice.delta;
61
58
  if (streamDelta[i] == null) {
62
59
  streamDelta[i] = {
@@ -85,7 +82,8 @@ async function createOpenAIChatFullDeltaIterableQueue(stream) {
85
82
  choice.function_call.name += delta.function_call.name;
86
83
  }
87
84
  if (delta.function_call.arguments != undefined) {
88
- choice.function_call.arguments += delta.function_call.arguments;
85
+ choice.function_call.arguments +=
86
+ delta.function_call.arguments;
89
87
  }
90
88
  }
91
89
  if (delta.role != undefined) {
@@ -100,12 +98,17 @@ async function createOpenAIChatFullDeltaIterableQueue(stream) {
100
98
  fullDelta: streamDeltaDeepCopy,
101
99
  });
102
100
  }
103
- catch (error) {
104
- queue.push({ type: "error", error });
105
- queue.close();
106
- return;
107
- }
108
- },
101
+ }
102
+ catch (error) {
103
+ queue.push({ type: "error", error });
104
+ queue.close();
105
+ return;
106
+ }
107
+ })
108
+ .catch((error) => {
109
+ queue.push({ type: "error", error });
110
+ queue.close();
111
+ return;
109
112
  });
110
113
  return queue;
111
114
  }
@@ -1,4 +1,4 @@
1
- import { DeltaEvent } from "../../../model-function/generate-text/DeltaEvent.js";
1
+ import { DeltaEvent } from "../../../model-function/DeltaEvent.js";
2
2
  export type OpenAIChatDelta = Array<{
3
3
  role: "assistant" | "user" | undefined;
4
4
  content: string;
@@ -1,7 +1,7 @@
1
1
  import SecureJSON from "secure-json-parse";
2
2
  import { z } from "zod";
3
- import { AsyncQueue } from "../../../model-function/generate-text/AsyncQueue.js";
4
- import { parseEventSourceReadableStream } from "../../../model-function/generate-text/parseEventSourceReadableStream.js";
3
+ import { AsyncQueue } from "../../../event-source/AsyncQueue.js";
4
+ import { parseEventSourceStream } from "../../../event-source/parseEventSourceStream.js";
5
5
  const chatResponseStreamEventSchema = z.object({
6
6
  choices: z.array(z.object({
7
7
  delta: z.object({
@@ -26,18 +26,15 @@ export async function createOpenAIChatFullDeltaIterableQueue(stream) {
26
26
  const queue = new AsyncQueue();
27
27
  const streamDelta = [];
28
28
  // process the stream asynchonously (no 'await' on purpose):
29
- parseEventSourceReadableStream({
30
- stream,
31
- callback: (event) => {
32
- if (event.type !== "event") {
33
- return;
34
- }
35
- const data = event.data;
36
- if (data === "[DONE]") {
37
- queue.close();
38
- return;
39
- }
40
- try {
29
+ parseEventSourceStream({ stream })
30
+ .then(async (events) => {
31
+ try {
32
+ for await (const event of events) {
33
+ const data = event.data;
34
+ if (data === "[DONE]") {
35
+ queue.close();
36
+ return;
37
+ }
41
38
  const json = SecureJSON.parse(data);
42
39
  const parseResult = chatResponseStreamEventSchema.safeParse(json);
43
40
  if (!parseResult.success) {
@@ -48,9 +45,9 @@ export async function createOpenAIChatFullDeltaIterableQueue(stream) {
48
45
  queue.close();
49
46
  return;
50
47
  }
51
- const event = parseResult.data;
52
- for (let i = 0; i < event.choices.length; i++) {
53
- const eventChoice = event.choices[i];
48
+ const eventData = parseResult.data;
49
+ for (let i = 0; i < eventData.choices.length; i++) {
50
+ const eventChoice = eventData.choices[i];
54
51
  const delta = eventChoice.delta;
55
52
  if (streamDelta[i] == null) {
56
53
  streamDelta[i] = {
@@ -79,7 +76,8 @@ export async function createOpenAIChatFullDeltaIterableQueue(stream) {
79
76
  choice.function_call.name += delta.function_call.name;
80
77
  }
81
78
  if (delta.function_call.arguments != undefined) {
82
- choice.function_call.arguments += delta.function_call.arguments;
79
+ choice.function_call.arguments +=
80
+ delta.function_call.arguments;
83
81
  }
84
82
  }
85
83
  if (delta.role != undefined) {
@@ -94,12 +92,17 @@ export async function createOpenAIChatFullDeltaIterableQueue(stream) {
94
92
  fullDelta: streamDeltaDeepCopy,
95
93
  });
96
94
  }
97
- catch (error) {
98
- queue.push({ type: "error", error });
99
- queue.close();
100
- return;
101
- }
102
- },
95
+ }
96
+ catch (error) {
97
+ queue.push({ type: "error", error });
98
+ queue.close();
99
+ return;
100
+ }
101
+ })
102
+ .catch((error) => {
103
+ queue.push({ type: "error", error });
104
+ queue.close();
105
+ return;
103
106
  });
104
107
  return queue;
105
108
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "modelfusion",
3
3
  "description": "Build multimodal applications, chatbots, and agents with JavaScript and TypeScript.",
4
- "version": "0.33.1",
4
+ "version": "0.35.0",
5
5
  "author": "Lars Grammel",
6
6
  "license": "MIT",
7
7
  "keywords": [
@@ -47,11 +47,13 @@
47
47
  "build": "npm run build:esm && npm run build:cjs",
48
48
  "build:esm": "tsc --outDir dist/",
49
49
  "build:cjs": "tsc --outDir build/cjs/ -p tsconfig.cjs.json && node bin/prepare-cjs.js",
50
- "dist:copy-files": "copyfiles package.json README.md LICENSE dist",
51
- "dist": "npm run clean && npm run lint && npm run build && npm run dist:copy-files"
50
+ "test": "vitest",
51
+ "dist": "npm run clean && npm run lint && npm run build && npm run dist:copy-files",
52
+ "dist:copy-files": "copyfiles package.json README.md LICENSE dist"
52
53
  },
53
54
  "dependencies": {
54
- "eventsource-parser": "1.0.0",
55
+ "deep-equal": "2.2.2",
56
+ "eventsource-parser": "1.1.1",
55
57
  "js-tiktoken": "1.0.7",
56
58
  "nanoid": "3.3.6",
57
59
  "secure-json-parse": "2.7.0",
@@ -61,6 +63,7 @@
61
63
  "devDependencies": {
62
64
  "@pinecone-database/pinecone": "^0.1.6",
63
65
  "@tsconfig/recommended": "1.0.3",
66
+ "@types/deep-equal": "^1.0.2",
64
67
  "@types/node": "18.11.9",
65
68
  "@typescript-eslint/eslint-plugin": "^6.1.0",
66
69
  "@typescript-eslint/parser": "^6.1.0",
@@ -71,7 +74,8 @@
71
74
  "lint-staged": "14.0.1",
72
75
  "prettier": "3.0.3",
73
76
  "rimraf": "5.0.1",
74
- "typescript": "5.2.2"
77
+ "typescript": "5.2.2",
78
+ "vitest": "^0.34.5"
75
79
  },
76
80
  "peerDependencies": {
77
81
  "@pinecone-database/pinecone": "0.1.6"
@@ -1,5 +1,5 @@
1
+ import { DeltaEvent } from "../model-function/DeltaEvent.js";
1
2
  import { ModelFunctionOptions } from "../model-function/ModelFunctionOptions.js";
2
- import { DeltaEvent } from "../model-function/generate-text/DeltaEvent.js";
3
3
  import { TextGenerationModel, TextGenerationModelSettings } from "../model-function/generate-text/TextGenerationModel.js";
4
4
  import { PromptFormat } from "./PromptFormat.js";
5
5
  export declare class PromptFormatTextGenerationModel<PROMPT, MODEL_PROMPT, RESPONSE, FULL_DELTA, SETTINGS extends TextGenerationModelSettings, MODEL extends TextGenerationModel<MODEL_PROMPT, RESPONSE, FULL_DELTA, SETTINGS>> implements TextGenerationModel<PROMPT, RESPONSE, FULL_DELTA, SETTINGS> {
package/tool/useTool.cjs CHANGED
@@ -3,9 +3,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.useTool = void 0;
4
4
  const generateStructure_js_1 = require("../model-function/generate-structure/generateStructure.cjs");
5
5
  const executeTool_js_1 = require("./executeTool.cjs");
6
- // In this file, using 'any' is required to allow for flexibility in the inputs. The actual types are
7
- // retrieved through lookups such as TOOL["name"], such that any does not affect any client.
8
- /* eslint-disable @typescript-eslint/no-explicit-any */
9
6
  /**
10
7
  * `useTool` uses `generateStructure` to generate parameters for a tool and then executes the tool with the parameters.
11
8
  *
@@ -13,7 +10,9 @@ const executeTool_js_1 = require("./executeTool.cjs");
13
10
  * the parameters (`parameters` property, typed),
14
11
  * and the result of the tool execution (`result` property, typed).
15
12
  */
16
- async function useTool(model, tool, prompt, options) {
13
+ async function useTool(
14
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
15
+ model, tool, prompt, options) {
17
16
  // Note: PROMPT must not be a function.
18
17
  const expandedPrompt = typeof prompt === "function"
19
18
  ? prompt(tool)
package/tool/useTool.d.ts CHANGED
@@ -8,7 +8,7 @@ import { Tool } from "./Tool.js";
8
8
  * the parameters (`parameters` property, typed),
9
9
  * and the result of the tool execution (`result` property, typed).
10
10
  */
11
- export declare function useTool<PROMPT, RESPONSE, SETTINGS extends StructureGenerationModelSettings, TOOL extends Tool<any, any, any>>(model: StructureGenerationModel<PROMPT, RESPONSE, SETTINGS>, tool: TOOL, prompt: PROMPT | ((tool: TOOL) => PROMPT), options?: ModelFunctionOptions<SETTINGS>): Promise<{
11
+ export declare function useTool<PROMPT, RESPONSE, SETTINGS extends StructureGenerationModelSettings, TOOL extends Tool<any, any, any>>(model: StructureGenerationModel<PROMPT, RESPONSE, any, SETTINGS>, tool: TOOL, prompt: PROMPT | ((tool: TOOL) => PROMPT), options?: ModelFunctionOptions<SETTINGS>): Promise<{
12
12
  tool: TOOL["name"];
13
13
  parameters: TOOL["inputSchema"];
14
14
  result: Awaited<ReturnType<TOOL["execute"]>>;
package/tool/useTool.js CHANGED
@@ -1,8 +1,5 @@
1
1
  import { generateStructure } from "../model-function/generate-structure/generateStructure.js";
2
2
  import { executeTool } from "./executeTool.js";
3
- // In this file, using 'any' is required to allow for flexibility in the inputs. The actual types are
4
- // retrieved through lookups such as TOOL["name"], such that any does not affect any client.
5
- /* eslint-disable @typescript-eslint/no-explicit-any */
6
3
  /**
7
4
  * `useTool` uses `generateStructure` to generate parameters for a tool and then executes the tool with the parameters.
8
5
  *
@@ -10,7 +7,9 @@ import { executeTool } from "./executeTool.js";
10
7
  * the parameters (`parameters` property, typed),
11
8
  * and the result of the tool execution (`result` property, typed).
12
9
  */
13
- export async function useTool(model, tool, prompt, options) {
10
+ export async function useTool(
11
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
12
+ model, tool, prompt, options) {
14
13
  // Note: PROMPT must not be a function.
15
14
  const expandedPrompt = typeof prompt === "function"
16
15
  ? prompt(tool)
@@ -1,17 +0,0 @@
1
- /**
2
- * @internal
3
- */
4
- export declare class AsyncQueue<T> implements AsyncIterable<T | undefined> {
5
- queue: T[];
6
- resolvers: Array<(options: {
7
- value: T | undefined;
8
- done: boolean;
9
- }) => void>;
10
- closed: boolean;
11
- constructor();
12
- push(value: T): void;
13
- close(): void;
14
- [Symbol.asyncIterator](): {
15
- next: () => Promise<IteratorResult<T | undefined, T | undefined>>;
16
- };
17
- }