modelfusion 0.54.0 → 0.55.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +69 -2
- package/browser/readEventSourceStream.cjs +1 -1
- package/browser/readEventSourceStream.js +1 -1
- package/index.cjs +0 -1
- package/index.d.ts +0 -1
- package/index.js +0 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +1 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +1 -1
- package/model-provider/cohere/CohereTextGenerationModel.cjs +31 -50
- package/model-provider/cohere/CohereTextGenerationModel.js +31 -50
- package/model-provider/index.cjs +1 -0
- package/model-provider/index.d.ts +1 -0
- package/model-provider/index.js +1 -0
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.cjs +1 -1
- package/model-provider/llamacpp/LlamaCppTextGenerationModel.js +1 -1
- package/model-provider/ollama/OllamaApiConfiguration.cjs +15 -0
- package/model-provider/ollama/OllamaApiConfiguration.d.ts +10 -0
- package/model-provider/ollama/OllamaApiConfiguration.js +11 -0
- package/model-provider/ollama/OllamaError.cjs +29 -0
- package/model-provider/ollama/OllamaError.d.ts +22 -0
- package/model-provider/ollama/OllamaError.js +24 -0
- package/model-provider/ollama/OllamaTextGenerationModel.cjs +216 -0
- package/model-provider/ollama/OllamaTextGenerationModel.d.ts +134 -0
- package/model-provider/ollama/OllamaTextGenerationModel.js +212 -0
- package/model-provider/ollama/index.cjs +21 -0
- package/model-provider/ollama/index.d.ts +3 -0
- package/model-provider/ollama/index.js +3 -0
- package/model-provider/openai/OpenAICompletionModel.cjs +2 -2
- package/model-provider/openai/OpenAICompletionModel.js +2 -2
- package/model-provider/openai/chat/OpenAIChatStreamIterable.cjs +1 -1
- package/model-provider/openai/chat/OpenAIChatStreamIterable.js +1 -1
- package/package.json +1 -1
- package/util/index.cjs +1 -0
- package/util/index.d.ts +1 -0
- package/util/index.js +1 -0
- package/util/streaming/parseJsonStream.cjs +35 -0
- package/util/streaming/parseJsonStream.d.ts +6 -0
- package/util/streaming/parseJsonStream.js +31 -0
- /package/{event-source → util/streaming}/EventSourceParserStream.cjs +0 -0
- /package/{event-source → util/streaming}/EventSourceParserStream.d.ts +0 -0
- /package/{event-source → util/streaming}/EventSourceParserStream.js +0 -0
- /package/{event-source → util/streaming}/convertReadableStreamToAsyncIterable.cjs +0 -0
- /package/{event-source → util/streaming}/convertReadableStreamToAsyncIterable.d.ts +0 -0
- /package/{event-source → util/streaming}/convertReadableStreamToAsyncIterable.js +0 -0
- /package/{event-source → util/streaming}/createEventSourceStream.cjs +0 -0
- /package/{event-source → util/streaming}/createEventSourceStream.d.ts +0 -0
- /package/{event-source → util/streaming}/createEventSourceStream.js +0 -0
- /package/{event-source → util/streaming}/index.cjs +0 -0
- /package/{event-source → util/streaming}/index.d.ts +0 -0
- /package/{event-source → util/streaming}/index.js +0 -0
- /package/{event-source → util/streaming}/parseEventSourceStream.cjs +0 -0
- /package/{event-source → util/streaming}/parseEventSourceStream.d.ts +0 -0
- /package/{event-source → util/streaming}/parseEventSourceStream.js +0 -0
package/README.md
CHANGED
@@ -20,6 +20,7 @@
|
|
20
20
|
- **Type inference and validation**: ModelFusion infers TypeScript types wherever possible and to validates model responses.
|
21
21
|
- **Observability and logging**: ModelFusion provides an observer framework and out-of-the-box logging support.
|
22
22
|
- **Resilience and Robustness**: ModelFusion ensures seamless operation through automatic retries, throttling, and error handling mechanisms.
|
23
|
+
- **Server**: ModelFusion provides a Fastify plugin that exposes a ModelFusion flow as a REST endpoint that uses server-sent events.
|
23
24
|
|
24
25
|
## Quick Install
|
25
26
|
|
@@ -53,7 +54,7 @@ const text = await generateText(
|
|
53
54
|
);
|
54
55
|
```
|
55
56
|
|
56
|
-
Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai), [Anthropic](https://modelfusion.dev/integration/model-provider/anthropic), [Cohere](https://modelfusion.dev/integration/model-provider/cohere), [Llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp), [Hugging Face](https://modelfusion.dev/integration/model-provider/huggingface)
|
57
|
+
Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai), [Anthropic](https://modelfusion.dev/integration/model-provider/anthropic), [Cohere](https://modelfusion.dev/integration/model-provider/cohere), [Llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp), [Ollama](https://modelfusion.dev/integration/model-provider/ollama), [Hugging Face](https://modelfusion.dev/integration/model-provider/huggingface)
|
57
58
|
|
58
59
|
#### streamText
|
59
60
|
|
@@ -70,7 +71,7 @@ for await (const textPart of textStream) {
|
|
70
71
|
}
|
71
72
|
```
|
72
73
|
|
73
|
-
Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai), [Anthropic](https://modelfusion.dev/integration/model-provider/anthropic), [Cohere](https://modelfusion.dev/integration/model-provider/cohere), [Llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp)
|
74
|
+
Providers: [OpenAI](https://modelfusion.dev/integration/model-provider/openai), [Anthropic](https://modelfusion.dev/integration/model-provider/anthropic), [Cohere](https://modelfusion.dev/integration/model-provider/cohere), [Llama.cpp](https://modelfusion.dev/integration/model-provider/llamacpp), [Ollama](https://modelfusion.dev/integration/model-provider/ollama)
|
74
75
|
|
75
76
|
### [Generate Image](https://modelfusion.dev/guide/function/generate-image)
|
76
77
|
|
@@ -543,6 +544,71 @@ ModelFusion provides an [observer framework](https://modelfusion.dev/guide/util/
|
|
543
544
|
setGlobalFunctionLogging("detailed-object"); // log full events
|
544
545
|
```
|
545
546
|
|
547
|
+
### [Server](https://modelfusion.dev/guide/server/)
|
548
|
+
|
549
|
+
> [!WARNING]
|
550
|
+
> ModelFusion Server is in its initial development phase and not feature-complete. The API is experimental and breaking changes are likely. Feedback and suggestions are welcome.
|
551
|
+
|
552
|
+
ModelFusion Server is desigend for running multi-modal generative AI flows that take up to several minutes to complete. It provides the following benefits:
|
553
|
+
|
554
|
+
- 🔄 Real-time progress updates via custom server-sent events
|
555
|
+
- 🔒Type-safety with Zod-schema for inputs/events
|
556
|
+
- 📦 Efficient handling of dynamically created binary assets (images, audio)
|
557
|
+
- 📜 Auto-logging for AI model interactions within flows
|
558
|
+
|
559
|
+
ModelFusion provides a [Fastify](https://fastify.dev/) plugin that allows you to set up a server that exposes your ModelFusion flows as REST endpoints using server-sent events.
|
560
|
+
|
561
|
+
```ts
|
562
|
+
import {
|
563
|
+
FileSystemAssetStorage,
|
564
|
+
FileSystemLogger,
|
565
|
+
modelFusionFastifyPlugin,
|
566
|
+
} from "modelfusion/fastify-server"; // '/fastify-server' import path
|
567
|
+
|
568
|
+
// configurable logging for all runs using ModelFusion observability:
|
569
|
+
const logger = new FileSystemLogger({
|
570
|
+
path: (run) => path.join(fsBasePath, run.runId, "logs"),
|
571
|
+
});
|
572
|
+
|
573
|
+
// configurable storage for large files like images and audio files:
|
574
|
+
const assetStorage = new FileSystemAssetStorage({
|
575
|
+
path: (run) => path.join(fsBasePath, run.runId, "assets"),
|
576
|
+
logger,
|
577
|
+
});
|
578
|
+
|
579
|
+
fastify.register(modelFusionFastifyPlugin, {
|
580
|
+
baseUrl,
|
581
|
+
basePath: "/myFlow",
|
582
|
+
logger,
|
583
|
+
assetStorage,
|
584
|
+
flow: exampleFlow,
|
585
|
+
});
|
586
|
+
```
|
587
|
+
|
588
|
+
Using `invokeFlow`, you can easily connect your client to a ModelFusion flow endpoint:
|
589
|
+
|
590
|
+
```ts
|
591
|
+
import { invokeFlow } from "modelfusion/browser"; // '/browser' import path
|
592
|
+
|
593
|
+
invokeFlow({
|
594
|
+
url: `${BASE_URL}/myFlow`,
|
595
|
+
schema: myFlowSchema,
|
596
|
+
input: { prompt },
|
597
|
+
onEvent(event) {
|
598
|
+
switch (event.type) {
|
599
|
+
case "my-event": {
|
600
|
+
// do something with the event
|
601
|
+
break;
|
602
|
+
}
|
603
|
+
// more events...
|
604
|
+
}
|
605
|
+
},
|
606
|
+
onStop() {
|
607
|
+
// flow finished
|
608
|
+
},
|
609
|
+
});
|
610
|
+
```
|
611
|
+
|
546
612
|
## Documentation
|
547
613
|
|
548
614
|
### [Guide](https://modelfusion.dev/guide)
|
@@ -563,6 +629,7 @@ setGlobalFunctionLogging("detailed-object"); // log full events
|
|
563
629
|
- [Retrieve](https://modelfusion.dev/guide/vector-index/retrieve)
|
564
630
|
- [Text Chunks](https://modelfusion.dev/guide/text-chunk/)
|
565
631
|
- [Split Text](https://modelfusion.dev/guide/text-chunk/split)
|
632
|
+
- [Server](https://modelfusion.dev/guide/server/)
|
566
633
|
- [Utilities](https://modelfusion.dev/guide/util/)
|
567
634
|
- [API Configuration](https://modelfusion.dev/guide/util/api-configuration)
|
568
635
|
- [Retry strategies](https://modelfusion.dev/guide/util/api-configuration/retry)
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.readEventSourceStream = void 0;
|
4
4
|
const parseJSON_js_1 = require("../util/parseJSON.cjs");
|
5
5
|
const AsyncQueue_js_1 = require("../util/AsyncQueue.cjs");
|
6
|
-
const parseEventSourceStream_js_1 = require("../
|
6
|
+
const parseEventSourceStream_js_1 = require("../util/streaming/parseEventSourceStream.cjs");
|
7
7
|
function readEventSourceStream({ stream, schema, errorHandler, }) {
|
8
8
|
const queue = new AsyncQueue_js_1.AsyncQueue();
|
9
9
|
// run async (no await on purpose):
|
@@ -1,6 +1,6 @@
|
|
1
1
|
import { safeParseJsonWithSchema } from "../util/parseJSON.js";
|
2
2
|
import { AsyncQueue } from "../util/AsyncQueue.js";
|
3
|
-
import { parseEventSourceStream } from "../
|
3
|
+
import { parseEventSourceStream } from "../util/streaming/parseEventSourceStream.js";
|
4
4
|
export function readEventSourceStream({ stream, schema, errorHandler, }) {
|
5
5
|
const queue = new AsyncQueue();
|
6
6
|
// run async (no await on purpose):
|
package/index.cjs
CHANGED
@@ -17,7 +17,6 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
17
|
__exportStar(require("./composed-function/index.cjs"), exports);
|
18
18
|
__exportStar(require("./core/index.cjs"), exports);
|
19
19
|
__exportStar(require("./cost/index.cjs"), exports);
|
20
|
-
__exportStar(require("./event-source/index.cjs"), exports);
|
21
20
|
__exportStar(require("./guard/index.cjs"), exports);
|
22
21
|
__exportStar(require("./model-function/index.cjs"), exports);
|
23
22
|
__exportStar(require("./model-provider/index.cjs"), exports);
|
package/index.d.ts
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
export * from "./composed-function/index.js";
|
2
2
|
export * from "./core/index.js";
|
3
3
|
export * from "./cost/index.js";
|
4
|
-
export * from "./event-source/index.js";
|
5
4
|
export * from "./guard/index.js";
|
6
5
|
export * from "./model-function/index.js";
|
7
6
|
export * from "./model-provider/index.js";
|
package/index.js
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
export * from "./composed-function/index.js";
|
2
2
|
export * from "./core/index.js";
|
3
3
|
export * from "./cost/index.js";
|
4
|
-
export * from "./event-source/index.js";
|
5
4
|
export * from "./guard/index.js";
|
6
5
|
export * from "./model-function/index.js";
|
7
6
|
export * from "./model-provider/index.js";
|
@@ -4,7 +4,7 @@ exports.AnthropicTextGenerationResponseFormat = exports.AnthropicTextGenerationM
|
|
4
4
|
const zod_1 = require("zod");
|
5
5
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
6
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
|
-
const parseEventSourceStream_js_1 = require("../../
|
7
|
+
const parseEventSourceStream_js_1 = require("../../util/streaming/parseEventSourceStream.cjs");
|
8
8
|
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
9
9
|
const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
|
10
10
|
const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
-
import { parseEventSourceStream } from "../../
|
4
|
+
import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStream.js";
|
5
5
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
6
|
import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
|
7
7
|
import { AsyncQueue } from "../../util/AsyncQueue.js";
|
@@ -4,12 +4,12 @@ exports.CohereTextGenerationResponseFormat = exports.CohereTextGenerationModel =
|
|
4
4
|
const zod_1 = require("zod");
|
5
5
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
6
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
|
-
const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
|
8
7
|
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
9
8
|
const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
|
10
9
|
const TextPromptFormat_js_1 = require("../../model-function/generate-text/TextPromptFormat.cjs");
|
11
10
|
const countTokens_js_1 = require("../../model-function/tokenize-text/countTokens.cjs");
|
12
|
-
const
|
11
|
+
const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
|
12
|
+
const parseJsonStream_js_1 = require("../../util/streaming/parseJsonStream.cjs");
|
13
13
|
const CohereApiConfiguration_js_1 = require("./CohereApiConfiguration.cjs");
|
14
14
|
const CohereError_js_1 = require("./CohereError.cjs");
|
15
15
|
const CohereTokenizer_js_1 = require("./CohereTokenizer.cjs");
|
@@ -216,58 +216,39 @@ const cohereTextStreamingResponseSchema = zod_1.z.discriminatedUnion("is_finishe
|
|
216
216
|
async function createCohereTextGenerationFullDeltaIterableQueue(stream) {
|
217
217
|
const queue = new AsyncQueue_js_1.AsyncQueue();
|
218
218
|
let accumulatedText = "";
|
219
|
-
function processLine(line) {
|
220
|
-
const event = (0, parseJSON_js_1.parseJsonWithZod)(line, cohereTextStreamingResponseSchema);
|
221
|
-
if (event.is_finished === true) {
|
222
|
-
queue.push({
|
223
|
-
type: "delta",
|
224
|
-
fullDelta: {
|
225
|
-
content: accumulatedText,
|
226
|
-
isComplete: true,
|
227
|
-
delta: "",
|
228
|
-
},
|
229
|
-
valueDelta: "",
|
230
|
-
});
|
231
|
-
}
|
232
|
-
else {
|
233
|
-
accumulatedText += event.text;
|
234
|
-
queue.push({
|
235
|
-
type: "delta",
|
236
|
-
fullDelta: {
|
237
|
-
content: accumulatedText,
|
238
|
-
isComplete: false,
|
239
|
-
delta: event.text,
|
240
|
-
},
|
241
|
-
valueDelta: event.text,
|
242
|
-
});
|
243
|
-
}
|
244
|
-
}
|
245
219
|
// process the stream asynchonously (no 'await' on purpose):
|
246
|
-
(
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
processableLines.forEach(processLine);
|
220
|
+
(0, parseJsonStream_js_1.parseJsonStream)({
|
221
|
+
stream,
|
222
|
+
schema: cohereTextStreamingResponseSchema,
|
223
|
+
process(event) {
|
224
|
+
if (event.is_finished === true) {
|
225
|
+
queue.push({
|
226
|
+
type: "delta",
|
227
|
+
fullDelta: {
|
228
|
+
content: accumulatedText,
|
229
|
+
isComplete: true,
|
230
|
+
delta: "",
|
231
|
+
},
|
232
|
+
valueDelta: "",
|
233
|
+
});
|
261
234
|
}
|
262
|
-
|
263
|
-
|
264
|
-
|
235
|
+
else {
|
236
|
+
accumulatedText += event.text;
|
237
|
+
queue.push({
|
238
|
+
type: "delta",
|
239
|
+
fullDelta: {
|
240
|
+
content: accumulatedText,
|
241
|
+
isComplete: false,
|
242
|
+
delta: event.text,
|
243
|
+
},
|
244
|
+
valueDelta: event.text,
|
245
|
+
});
|
265
246
|
}
|
266
|
-
}
|
267
|
-
|
247
|
+
},
|
248
|
+
onDone() {
|
268
249
|
queue.close();
|
269
|
-
}
|
270
|
-
})
|
250
|
+
},
|
251
|
+
});
|
271
252
|
return queue;
|
272
253
|
}
|
273
254
|
exports.CohereTextGenerationResponseFormat = {
|
@@ -1,12 +1,12 @@
|
|
1
1
|
import { z } from "zod";
|
2
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
|
-
import { AsyncQueue } from "../../util/AsyncQueue.js";
|
5
4
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
6
5
|
import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
|
7
6
|
import { mapChatPromptToTextFormat, mapInstructionPromptToTextFormat, } from "../../model-function/generate-text/TextPromptFormat.js";
|
8
7
|
import { countTokens } from "../../model-function/tokenize-text/countTokens.js";
|
9
|
-
import {
|
8
|
+
import { AsyncQueue } from "../../util/AsyncQueue.js";
|
9
|
+
import { parseJsonStream } from "../../util/streaming/parseJsonStream.js";
|
10
10
|
import { CohereApiConfiguration } from "./CohereApiConfiguration.js";
|
11
11
|
import { failedCohereCallResponseHandler } from "./CohereError.js";
|
12
12
|
import { CohereTokenizer } from "./CohereTokenizer.js";
|
@@ -212,58 +212,39 @@ const cohereTextStreamingResponseSchema = z.discriminatedUnion("is_finished", [
|
|
212
212
|
async function createCohereTextGenerationFullDeltaIterableQueue(stream) {
|
213
213
|
const queue = new AsyncQueue();
|
214
214
|
let accumulatedText = "";
|
215
|
-
function processLine(line) {
|
216
|
-
const event = parseJsonWithZod(line, cohereTextStreamingResponseSchema);
|
217
|
-
if (event.is_finished === true) {
|
218
|
-
queue.push({
|
219
|
-
type: "delta",
|
220
|
-
fullDelta: {
|
221
|
-
content: accumulatedText,
|
222
|
-
isComplete: true,
|
223
|
-
delta: "",
|
224
|
-
},
|
225
|
-
valueDelta: "",
|
226
|
-
});
|
227
|
-
}
|
228
|
-
else {
|
229
|
-
accumulatedText += event.text;
|
230
|
-
queue.push({
|
231
|
-
type: "delta",
|
232
|
-
fullDelta: {
|
233
|
-
content: accumulatedText,
|
234
|
-
isComplete: false,
|
235
|
-
delta: event.text,
|
236
|
-
},
|
237
|
-
valueDelta: event.text,
|
238
|
-
});
|
239
|
-
}
|
240
|
-
}
|
241
215
|
// process the stream asynchonously (no 'await' on purpose):
|
242
|
-
(
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
processableLines.forEach(processLine);
|
216
|
+
parseJsonStream({
|
217
|
+
stream,
|
218
|
+
schema: cohereTextStreamingResponseSchema,
|
219
|
+
process(event) {
|
220
|
+
if (event.is_finished === true) {
|
221
|
+
queue.push({
|
222
|
+
type: "delta",
|
223
|
+
fullDelta: {
|
224
|
+
content: accumulatedText,
|
225
|
+
isComplete: true,
|
226
|
+
delta: "",
|
227
|
+
},
|
228
|
+
valueDelta: "",
|
229
|
+
});
|
257
230
|
}
|
258
|
-
|
259
|
-
|
260
|
-
|
231
|
+
else {
|
232
|
+
accumulatedText += event.text;
|
233
|
+
queue.push({
|
234
|
+
type: "delta",
|
235
|
+
fullDelta: {
|
236
|
+
content: accumulatedText,
|
237
|
+
isComplete: false,
|
238
|
+
delta: event.text,
|
239
|
+
},
|
240
|
+
valueDelta: event.text,
|
241
|
+
});
|
261
242
|
}
|
262
|
-
}
|
263
|
-
|
243
|
+
},
|
244
|
+
onDone() {
|
264
245
|
queue.close();
|
265
|
-
}
|
266
|
-
})
|
246
|
+
},
|
247
|
+
});
|
267
248
|
return queue;
|
268
249
|
}
|
269
250
|
export const CohereTextGenerationResponseFormat = {
|
package/model-provider/index.cjs
CHANGED
@@ -21,5 +21,6 @@ __exportStar(require("./elevenlabs/index.cjs"), exports);
|
|
21
21
|
__exportStar(require("./huggingface/index.cjs"), exports);
|
22
22
|
__exportStar(require("./llamacpp/index.cjs"), exports);
|
23
23
|
__exportStar(require("./lmnt/index.cjs"), exports);
|
24
|
+
__exportStar(require("./ollama/index.cjs"), exports);
|
24
25
|
__exportStar(require("./openai/index.cjs"), exports);
|
25
26
|
__exportStar(require("./stability/index.cjs"), exports);
|
package/model-provider/index.js
CHANGED
@@ -5,7 +5,7 @@ const zod_1 = require("zod");
|
|
5
5
|
const callWithRetryAndThrottle_js_1 = require("../../core/api/callWithRetryAndThrottle.cjs");
|
6
6
|
const postToApi_js_1 = require("../../core/api/postToApi.cjs");
|
7
7
|
const AsyncQueue_js_1 = require("../../util/AsyncQueue.cjs");
|
8
|
-
const parseEventSourceStream_js_1 = require("../../
|
8
|
+
const parseEventSourceStream_js_1 = require("../../util/streaming/parseEventSourceStream.cjs");
|
9
9
|
const AbstractModel_js_1 = require("../../model-function/AbstractModel.cjs");
|
10
10
|
const PromptFormatTextStreamingModel_js_1 = require("../../model-function/generate-text/PromptFormatTextStreamingModel.cjs");
|
11
11
|
const parseJSON_js_1 = require("../../util/parseJSON.cjs");
|
@@ -2,7 +2,7 @@ import { z } from "zod";
|
|
2
2
|
import { callWithRetryAndThrottle } from "../../core/api/callWithRetryAndThrottle.js";
|
3
3
|
import { createJsonResponseHandler, postJsonToApi, } from "../../core/api/postToApi.js";
|
4
4
|
import { AsyncQueue } from "../../util/AsyncQueue.js";
|
5
|
-
import { parseEventSourceStream } from "../../
|
5
|
+
import { parseEventSourceStream } from "../../util/streaming/parseEventSourceStream.js";
|
6
6
|
import { AbstractModel } from "../../model-function/AbstractModel.js";
|
7
7
|
import { PromptFormatTextStreamingModel } from "../../model-function/generate-text/PromptFormatTextStreamingModel.js";
|
8
8
|
import { parseJsonWithZod } from "../../util/parseJSON.js";
|
@@ -0,0 +1,15 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.OllamaApiConfiguration = void 0;
|
4
|
+
const BaseUrlApiConfiguration_js_1 = require("../../core/api/BaseUrlApiConfiguration.cjs");
|
5
|
+
class OllamaApiConfiguration extends BaseUrlApiConfiguration_js_1.BaseUrlApiConfiguration {
|
6
|
+
constructor({ baseUrl = "http://127.0.0.1:11434", retry, throttle, } = {}) {
|
7
|
+
super({
|
8
|
+
baseUrl,
|
9
|
+
headers: {},
|
10
|
+
retry,
|
11
|
+
throttle,
|
12
|
+
});
|
13
|
+
}
|
14
|
+
}
|
15
|
+
exports.OllamaApiConfiguration = OllamaApiConfiguration;
|
@@ -0,0 +1,10 @@
|
|
1
|
+
import { BaseUrlApiConfiguration } from "../../core/api/BaseUrlApiConfiguration.js";
|
2
|
+
import { RetryFunction } from "../../core/api/RetryFunction.js";
|
3
|
+
import { ThrottleFunction } from "../../core/api/ThrottleFunction.js";
|
4
|
+
export declare class OllamaApiConfiguration extends BaseUrlApiConfiguration {
|
5
|
+
constructor({ baseUrl, retry, throttle, }?: {
|
6
|
+
baseUrl?: string;
|
7
|
+
retry?: RetryFunction;
|
8
|
+
throttle?: ThrottleFunction;
|
9
|
+
});
|
10
|
+
}
|
@@ -0,0 +1,11 @@
|
|
1
|
+
import { BaseUrlApiConfiguration } from "../../core/api/BaseUrlApiConfiguration.js";
|
2
|
+
export class OllamaApiConfiguration extends BaseUrlApiConfiguration {
|
3
|
+
constructor({ baseUrl = "http://127.0.0.1:11434", retry, throttle, } = {}) {
|
4
|
+
super({
|
5
|
+
baseUrl,
|
6
|
+
headers: {},
|
7
|
+
retry,
|
8
|
+
throttle,
|
9
|
+
});
|
10
|
+
}
|
11
|
+
}
|
@@ -0,0 +1,29 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.failedOllamaCallResponseHandler = exports.OllamaError = exports.ollamaErrorDataSchema = void 0;
|
4
|
+
const zod_1 = require("zod");
|
5
|
+
const ApiCallError_js_1 = require("../../core/api/ApiCallError.cjs");
|
6
|
+
const parseJSON_js_1 = require("../../util/parseJSON.cjs");
|
7
|
+
exports.ollamaErrorDataSchema = zod_1.z.object({
|
8
|
+
error: zod_1.z.string(),
|
9
|
+
});
|
10
|
+
class OllamaError extends ApiCallError_js_1.ApiCallError {
|
11
|
+
constructor({ data, statusCode, url, requestBodyValues, message = data.error, }) {
|
12
|
+
super({ message, statusCode, requestBodyValues, url });
|
13
|
+
Object.defineProperty(this, "data", {
|
14
|
+
enumerable: true,
|
15
|
+
configurable: true,
|
16
|
+
writable: true,
|
17
|
+
value: void 0
|
18
|
+
});
|
19
|
+
this.data = data;
|
20
|
+
}
|
21
|
+
}
|
22
|
+
exports.OllamaError = OllamaError;
|
23
|
+
const failedOllamaCallResponseHandler = async ({ response, url, requestBodyValues }) => new OllamaError({
|
24
|
+
url,
|
25
|
+
requestBodyValues,
|
26
|
+
statusCode: response.status,
|
27
|
+
data: (0, parseJSON_js_1.parseJsonWithZod)(await response.text(), exports.ollamaErrorDataSchema),
|
28
|
+
});
|
29
|
+
exports.failedOllamaCallResponseHandler = failedOllamaCallResponseHandler;
|
@@ -0,0 +1,22 @@
|
|
1
|
+
import { z } from "zod";
|
2
|
+
import { ApiCallError } from "../../core/api/ApiCallError.js";
|
3
|
+
import { ResponseHandler } from "../../core/api/postToApi.js";
|
4
|
+
export declare const ollamaErrorDataSchema: z.ZodObject<{
|
5
|
+
error: z.ZodString;
|
6
|
+
}, "strip", z.ZodTypeAny, {
|
7
|
+
error: string;
|
8
|
+
}, {
|
9
|
+
error: string;
|
10
|
+
}>;
|
11
|
+
export type OllamaErrorData = z.infer<typeof ollamaErrorDataSchema>;
|
12
|
+
export declare class OllamaError extends ApiCallError {
|
13
|
+
readonly data: OllamaErrorData;
|
14
|
+
constructor({ data, statusCode, url, requestBodyValues, message, }: {
|
15
|
+
message?: string;
|
16
|
+
statusCode: number;
|
17
|
+
url: string;
|
18
|
+
requestBodyValues: unknown;
|
19
|
+
data: OllamaErrorData;
|
20
|
+
});
|
21
|
+
}
|
22
|
+
export declare const failedOllamaCallResponseHandler: ResponseHandler<ApiCallError>;
|
@@ -0,0 +1,24 @@
|
|
1
|
+
import { z } from "zod";
|
2
|
+
import { ApiCallError } from "../../core/api/ApiCallError.js";
|
3
|
+
import { parseJsonWithZod } from "../../util/parseJSON.js";
|
4
|
+
export const ollamaErrorDataSchema = z.object({
|
5
|
+
error: z.string(),
|
6
|
+
});
|
7
|
+
export class OllamaError extends ApiCallError {
|
8
|
+
constructor({ data, statusCode, url, requestBodyValues, message = data.error, }) {
|
9
|
+
super({ message, statusCode, requestBodyValues, url });
|
10
|
+
Object.defineProperty(this, "data", {
|
11
|
+
enumerable: true,
|
12
|
+
configurable: true,
|
13
|
+
writable: true,
|
14
|
+
value: void 0
|
15
|
+
});
|
16
|
+
this.data = data;
|
17
|
+
}
|
18
|
+
}
|
19
|
+
export const failedOllamaCallResponseHandler = async ({ response, url, requestBodyValues }) => new OllamaError({
|
20
|
+
url,
|
21
|
+
requestBodyValues,
|
22
|
+
statusCode: response.status,
|
23
|
+
data: parseJsonWithZod(await response.text(), ollamaErrorDataSchema),
|
24
|
+
});
|