modelfusion 0.54.0 → 0.55.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +69 -2
- package/browser/readEventSourceStream.cjs +1 -1
- package/browser/readEventSourceStream.js +1 -1
- package/index.cjs +0 -1
- package/index.d.ts +0 -1
- package/index.js +0 -1
- package/model-function/embed/EmbeddingModel.d.ts +4 -0
- package/model-function/embed/embed.cjs +12 -1
- package/model-function/embed/embed.js +12 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +1 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +1 -1
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +6 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +1 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.js +6 -0
- package/model-provider/cohere/CohereTextGenerationModel.cjs +31 -50
- package/model-provider/cohere/CohereTextGenerationModel.js +31 -50
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +6 -0
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +1 -0
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +6 -0
- package/model-provider/index.cjs +1 -0
- package/model-provider/index.d.ts +1 -0
- package/model-provider/index.js +1 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +3 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +2 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +3 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +1 -1
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +1 -1
- package/model-provider/ollama/OllamaApiConfiguration.cjs +15 -0
- package/model-provider/ollama/OllamaApiConfiguration.d.ts +10 -0
- package/model-provider/ollama/OllamaApiConfiguration.js +11 -0
- package/model-provider/ollama/OllamaError.cjs +29 -0
- package/model-provider/ollama/OllamaError.d.ts +22 -0
- package/model-provider/ollama/OllamaError.js +24 -0
- package/model-provider/ollama/OllamaTextGenerationModel.cjs +216 -0
- package/model-provider/ollama/OllamaTextGenerationModel.d.ts +134 -0
- package/model-provider/ollama/OllamaTextGenerationModel.js +212 -0
- package/model-provider/ollama/index.cjs +21 -0
- package/model-provider/ollama/index.d.ts +3 -0
- package/model-provider/ollama/index.js +3 -0
- package/model-provider/openai/OpenAICompletionModel.cjs +2 -2
- package/model-provider/openai/OpenAICompletionModel.js +2 -2
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +6 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +1 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.js +6 -0
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +1 -1
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +1 -1
- package/package.json +1 -1
- package/util/index.cjs +1 -0
- package/util/index.d.ts +1 -0
- package/util/index.js +1 -0
- package/util/streaming/parseJsonStream.cjs +35 -0
- package/util/streaming/parseJsonStream.d.ts +6 -0
- package/util/streaming/parseJsonStream.js +31 -0
- /package/{event-source → util/streaming}/EventSourceParserStream.cjs +0 -0
- /package/{event-source → util/streaming}/EventSourceParserStream.d.ts +0 -0
- /package/{event-source → util/streaming}/EventSourceParserStream.js +0 -0
- /package/{event-source → util/streaming}/convertReadableStreamToAsyncIterable.cjs +0 -0
- /package/{event-source → util/streaming}/convertReadableStreamToAsyncIterable.d.ts +0 -0
- /package/{event-source → util/streaming}/convertReadableStreamToAsyncIterable.js +0 -0
- /package/{event-source → util/streaming}/createEventSourceStream.cjs +0 -0
- /package/{event-source → util/streaming}/createEventSourceStream.d.ts +0 -0
- /package/{event-source → util/streaming}/createEventSourceStream.js +0 -0
- /package/{event-source → util/streaming}/index.cjs +0 -0
- /package/{event-source → util/streaming}/index.d.ts +0 -0
- /package/{event-source → util/streaming}/index.js +0 -0
- /package/{event-source → util/streaming}/parseEventSourceStream.cjs +0 -0
- /package/{event-source → util/streaming}/parseEventSourceStream.d.ts +0 -0
- /package/{event-source → util/streaming}/parseEventSourceStream.js +0 -0
@@ -1,13 +1,13 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
-
import { AsyncQueue } from "../../util/AsyncQueue.js";
|
5
|
-
import { parseEventSourceStream } from "../../event-source/parseEventSourceStream.js";
|
6
4
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
7
5
|
import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
|
8
6
|
import { mapChatPromptToTextFormat, mapInstructionPromptToTextFormat, } from "../../model-function/generate-text/TextPromptFormat.js";
|
9
7
|
import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
|
8
|
+
import { AsyncQueue } from "../../util/AsyncQueue.js";
|
10
9
|
import { parseJsonWithZod } from "../../util/parseJSON.js";
|
10
|
+
import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStream.js";
|
11
11
|
import { OpenAIApiConfiguration } from "./OpenAIApiConfiguration.js";
|
12
12
|
import { failedOpenAICallResponseHandler } from "./OpenAIError.js";
|
13
13
|
import { TikTokenTokenizer } from "./TikTokenTokenizer.js";
|
@@ -57,6 +57,12 @@ class OpenAITextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
57
57
|
writable: true,
|
58
58
|
value: 2048
|
59
59
|
});
|
60
|
+
Object.defineProperty(this, "isParallizable", {
|
61
|
+
enumerable: true,
|
62
|
+
configurable: true,
|
63
|
+
writable: true,
|
64
|
+
value: true
|
65
|
+
});
|
60
66
|
Object.defineProperty(this, "embeddingDimensions", {
|
61
67
|
enumerable: true,
|
62
68
|
configurable: true,
|
@@ -41,6 +41,7 @@ export declare class OpenAITextEmbeddingModel extends AbstractModel<OpenAITextEm
|
|
41
41
|
readonly provider: "openai";
|
42
42
|
get modelName(): "text-embedding-ada-002";
|
43
43
|
readonly maxValuesPerCall = 2048;
|
44
|
+
readonly isParallizable = true;
|
44
45
|
readonly embeddingDimensions: number;
|
45
46
|
readonly tokenizer: TikTokenTokenizer;
|
46
47
|
readonly contextWindowSize: number;
|
@@ -52,6 +52,12 @@ export class OpenAITextEmbeddingModel extends AbstractModel {
|
|
52
52
|
writable: true,
|
53
53
|
value: 2048
|
54
54
|
});
|
55
|
+
Object.defineProperty(this, "isParallizable", {
|
56
|
+
enumerable: true,
|
57
|
+
configurable: true,
|
58
|
+
writable: true,
|
59
|
+
value: true
|
60
|
+
});
|
55
61
|
Object.defineProperty(this, "embeddingDimensions", {
|
56
62
|
enumerable: true,
|
57
63
|
configurable: true,
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.createOpenAIChatDeltaIterableQueue = void 0;
|
4
4
|
const zod_1 = require("zod");
|
5
5
|
const AsyncQueue_js_1 = require("../../../util/AsyncQueue.cjs");
|
6
|
-
const parseEventSourceStream_js_1 = require("../../../
|
6
|
+
const parseEventSourceStream_js_1 = require("../../../util/streaming/parseEventSourceStream.cjs");
|
7
7
|
const parseJSON_js_1 = require("../../../util/parseJSON.cjs");
|
8
8
|
const chatResponseStreamEventSchema = zod_1.z.object({
|
9
9
|
choices: zod_1.z.array(zod_1.z.object({
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { AsyncQueue } from "../../../util/AsyncQueue.js";
|
3
|
-
import { parseEventSourceStream } from "../../../
|
3
|
+
import { parseEventSourceStream } from "../../../util/streaming/parseEventSourceStream.js";
|
4
4
|
import { safeParseJsonWithZod } from "../../../util/parseJSON.js";
|
5
5
|
const chatResponseStreamEventSchema = z.object({
|
6
6
|
choices: z.array(z.object({
|
package/package.json
CHANGED
package/util/index.cjs
CHANGED
@@ -20,3 +20,4 @@ __exportStar(require("./cosineSimilarity.cjs"), exports);
|
|
20
20
|
__exportStar(require("./delay.cjs"), exports);
|
21
21
|
__exportStar(require("./getAudioFileExtension.cjs"), exports);
|
22
22
|
__exportStar(require("./parseJSON.cjs"), exports);
|
23
|
+
__exportStar(require("./streaming/index.cjs"), exports);
|
package/util/index.d.ts
CHANGED
package/util/index.js
CHANGED
@@ -0,0 +1,35 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.parseJsonStream = void 0;
|
4
|
+
const parseJSON_js_1 = require("../parseJSON.cjs");
|
5
|
+
function parseJsonStream({ schema, stream, process, onDone, }) {
|
6
|
+
function processLine(line) {
|
7
|
+
process((0, parseJSON_js_1.parseJsonWithZod)(line, schema));
|
8
|
+
}
|
9
|
+
return (async () => {
|
10
|
+
try {
|
11
|
+
let unprocessedText = "";
|
12
|
+
const reader = new ReadableStreamDefaultReader(stream);
|
13
|
+
const utf8Decoder = new TextDecoder("utf-8");
|
14
|
+
// eslint-disable-next-line no-constant-condition
|
15
|
+
while (true) {
|
16
|
+
const { value: chunk, done } = await reader.read();
|
17
|
+
if (done) {
|
18
|
+
break;
|
19
|
+
}
|
20
|
+
unprocessedText += utf8Decoder.decode(chunk, { stream: true });
|
21
|
+
const processableLines = unprocessedText.split(/\r\n|\n|\r/g);
|
22
|
+
unprocessedText = processableLines.pop() || "";
|
23
|
+
processableLines.forEach(processLine);
|
24
|
+
}
|
25
|
+
// processing remaining text:
|
26
|
+
if (unprocessedText) {
|
27
|
+
processLine(unprocessedText);
|
28
|
+
}
|
29
|
+
}
|
30
|
+
finally {
|
31
|
+
onDone?.();
|
32
|
+
}
|
33
|
+
})();
|
34
|
+
}
|
35
|
+
exports.parseJsonStream = parseJsonStream;
|
@@ -0,0 +1,31 @@
|
|
1
|
+
import { parseJsonWithZod } from "../parseJSON.js";
|
2
|
+
export function parseJsonStream({ schema, stream, process, onDone, }) {
|
3
|
+
function processLine(line) {
|
4
|
+
process(parseJsonWithZod(line, schema));
|
5
|
+
}
|
6
|
+
return (async () => {
|
7
|
+
try {
|
8
|
+
let unprocessedText = "";
|
9
|
+
const reader = new ReadableStreamDefaultReader(stream);
|
10
|
+
const utf8Decoder = new TextDecoder("utf-8");
|
11
|
+
// eslint-disable-next-line no-constant-condition
|
12
|
+
while (true) {
|
13
|
+
const { value: chunk, done } = await reader.read();
|
14
|
+
if (done) {
|
15
|
+
break;
|
16
|
+
}
|
17
|
+
unprocessedText += utf8Decoder.decode(chunk, { stream: true });
|
18
|
+
const processableLines = unprocessedText.split(/\r\n|\n|\r/g);
|
19
|
+
unprocessedText = processableLines.pop() || "";
|
20
|
+
processableLines.forEach(processLine);
|
21
|
+
}
|
22
|
+
// processing remaining text:
|
23
|
+
if (unprocessedText) {
|
24
|
+
processLine(unprocessedText);
|
25
|
+
}
|
26
|
+
}
|
27
|
+
finally {
|
28
|
+
onDone?.();
|
29
|
+
}
|
30
|
+
})();
|
31
|
+
}
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|