modelfusion 0.74.1 → 0.76.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +55 -33
- package/guard/fixStructure.cjs +1 -1
- package/guard/fixStructure.d.ts +1 -1
- package/guard/fixStructure.js +1 -1
- package/model-function/embed/EmbeddingModel.d.ts +1 -1
- package/model-function/embed/embed.cjs +1 -1
- package/model-function/embed/embed.d.ts +2 -2
- package/model-function/embed/embed.js +1 -1
- package/model-function/generate-image/generateImage.d.ts +1 -1
- package/model-function/generate-speech/generateSpeech.d.ts +1 -1
- package/model-function/generate-speech/streamSpeech.d.ts +1 -1
- package/model-function/generate-structure/generateStructure.d.ts +1 -1
- package/model-function/generate-structure/streamStructure.d.ts +1 -1
- package/model-function/generate-text/generateText.d.ts +1 -1
- package/model-function/generate-text/streamText.d.ts +1 -1
- package/model-function/generate-transcription/generateTranscription.d.ts +1 -1
- package/model-provider/anthropic/AnthropicFacade.cjs +15 -0
- package/model-provider/anthropic/AnthropicFacade.d.ts +9 -0
- package/model-provider/anthropic/AnthropicFacade.js +11 -0
- package/model-provider/anthropic/AnthropicPromptFormat.cjs +2 -5
- package/model-provider/anthropic/AnthropicPromptFormat.js +2 -5
- package/model-provider/anthropic/AnthropicTextGenerationModel.cjs +4 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.d.ts +4 -1
- package/model-provider/anthropic/AnthropicTextGenerationModel.js +4 -1
- package/model-provider/anthropic/index.cjs +2 -1
- package/model-provider/anthropic/index.d.ts +1 -0
- package/model-provider/anthropic/index.js +1 -0
- package/model-provider/automatic1111/Automatic1111Facade.cjs +15 -0
- package/model-provider/automatic1111/Automatic1111Facade.d.ts +9 -0
- package/model-provider/automatic1111/Automatic1111Facade.js +11 -0
- package/model-provider/automatic1111/index.cjs +14 -1
- package/model-provider/automatic1111/index.d.ts +1 -0
- package/model-provider/automatic1111/index.js +1 -0
- package/model-provider/cohere/CohereFacade.cjs +71 -0
- package/model-provider/cohere/CohereFacade.d.ts +59 -0
- package/model-provider/cohere/CohereFacade.js +65 -0
- package/model-provider/cohere/CohereTextEmbeddingModel.cjs +1 -1
- package/model-provider/cohere/CohereTextEmbeddingModel.d.ts +1 -1
- package/model-provider/cohere/CohereTextEmbeddingModel.js +1 -1
- package/model-provider/cohere/index.cjs +14 -1
- package/model-provider/cohere/index.d.ts +1 -0
- package/model-provider/cohere/index.js +1 -0
- package/model-provider/elevenlabs/ElevenLabsFacade.cjs +18 -0
- package/model-provider/elevenlabs/ElevenLabsFacade.d.ts +12 -0
- package/model-provider/elevenlabs/ElevenLabsFacade.js +14 -0
- package/model-provider/elevenlabs/index.cjs +14 -0
- package/model-provider/elevenlabs/index.d.ts +1 -0
- package/model-provider/elevenlabs/index.js +1 -0
- package/model-provider/huggingface/HuggingFaceFacade.cjs +55 -0
- package/model-provider/huggingface/HuggingFaceFacade.d.ts +46 -0
- package/model-provider/huggingface/HuggingFaceFacade.js +50 -0
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.cjs +1 -1
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.d.ts +1 -1
- package/model-provider/huggingface/HuggingFaceTextEmbeddingModel.js +1 -1
- package/model-provider/huggingface/index.cjs +14 -2
- package/model-provider/huggingface/index.d.ts +1 -1
- package/model-provider/huggingface/index.js +1 -1
- package/model-provider/llamacpp/LlamaCppFacade.cjs +19 -0
- package/model-provider/llamacpp/LlamaCppFacade.d.ts +7 -0
- package/model-provider/llamacpp/LlamaCppFacade.js +13 -0
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.cjs +2 -2
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.d.ts +2 -2
- package/model-provider/llamacpp/LlamaCppTextEmbeddingModel.js +2 -2
- package/model-provider/llamacpp/index.cjs +2 -1
- package/model-provider/llamacpp/index.d.ts +1 -0
- package/model-provider/llamacpp/index.js +1 -0
- package/model-provider/lmnt/LmntFacade.cjs +15 -0
- package/model-provider/lmnt/LmntFacade.d.ts +9 -0
- package/model-provider/lmnt/LmntFacade.js +11 -0
- package/model-provider/lmnt/index.cjs +14 -0
- package/model-provider/lmnt/index.d.ts +1 -0
- package/model-provider/lmnt/index.js +1 -0
- package/model-provider/ollama/OllamaFacade.cjs +13 -0
- package/model-provider/ollama/OllamaFacade.d.ts +4 -0
- package/model-provider/ollama/OllamaFacade.js +8 -0
- package/model-provider/ollama/OllamaTextEmbeddingModel.cjs +2 -2
- package/model-provider/ollama/OllamaTextEmbeddingModel.d.ts +2 -2
- package/model-provider/ollama/OllamaTextEmbeddingModel.js +2 -2
- package/model-provider/ollama/index.cjs +14 -1
- package/model-provider/ollama/index.d.ts +1 -0
- package/model-provider/ollama/index.js +1 -0
- package/model-provider/openai/OpenAIFacade.cjs +148 -0
- package/model-provider/openai/OpenAIFacade.d.ts +124 -0
- package/model-provider/openai/OpenAIFacade.js +138 -0
- package/model-provider/openai/OpenAITextEmbeddingModel.cjs +1 -1
- package/model-provider/openai/OpenAITextEmbeddingModel.d.ts +1 -1
- package/model-provider/openai/OpenAITextEmbeddingModel.js +1 -1
- package/model-provider/openai/TikTokenTokenizer.cjs +2 -2
- package/model-provider/openai/TikTokenTokenizer.d.ts +4 -3
- package/model-provider/openai/TikTokenTokenizer.js +2 -2
- package/model-provider/openai/index.cjs +2 -1
- package/model-provider/openai/index.d.ts +1 -0
- package/model-provider/openai/index.js +1 -0
- package/model-provider/stability/StabilityFacade.cjs +32 -0
- package/model-provider/stability/StabilityFacade.d.ts +26 -0
- package/model-provider/stability/StabilityFacade.js +28 -0
- package/model-provider/stability/index.cjs +14 -1
- package/model-provider/stability/index.d.ts +1 -0
- package/model-provider/stability/index.js +1 -0
- package/package.json +1 -1
- package/model-provider/huggingface/HuggingFaceImageDescriptionModel.cjs +0 -94
- package/model-provider/huggingface/HuggingFaceImageDescriptionModel.d.ts +0 -44
- package/model-provider/huggingface/HuggingFaceImageDescriptionModel.js +0 -90
@@ -1,4 +1,5 @@
|
|
1
1
|
export * from "./Automatic1111ApiConfiguration.js";
|
2
2
|
export { Automatic1111Error, } from "./Automatic1111Error.js";
|
3
|
+
export * as automatic1111 from "./Automatic1111Facade.js";
|
3
4
|
export * from "./Automatic1111ImageGenerationModel.js";
|
4
5
|
export * from "./Automatic1111ImageGenerationPrompt.js";
|
@@ -0,0 +1,71 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.Tokenizer = exports.TextEmbedder = exports.TextGenerator = void 0;
|
4
|
+
const CohereTextEmbeddingModel_js_1 = require("./CohereTextEmbeddingModel.cjs");
|
5
|
+
const CohereTextGenerationModel_js_1 = require("./CohereTextGenerationModel.cjs");
|
6
|
+
const CohereTokenizer_js_1 = require("./CohereTokenizer.cjs");
|
7
|
+
/**
|
8
|
+
* Create a text generation model that calls the Cohere Co.Generate API.
|
9
|
+
*
|
10
|
+
* @see https://docs.cohere.com/reference/generate
|
11
|
+
*
|
12
|
+
* @example
|
13
|
+
* const model = cohere.TextGenerator({
|
14
|
+
* model: "command-nightly",
|
15
|
+
* temperature: 0.7,
|
16
|
+
* maxCompletionTokens: 500,
|
17
|
+
* });
|
18
|
+
*
|
19
|
+
* const text = await generateText(
|
20
|
+
* model,
|
21
|
+
* "Write a short story about a robot learning to love:\n\n"
|
22
|
+
* );
|
23
|
+
*
|
24
|
+
* @returns A new instance of {@link CohereTextGenerationModel}.
|
25
|
+
*/
|
26
|
+
function TextGenerator(settings) {
|
27
|
+
return new CohereTextGenerationModel_js_1.CohereTextGenerationModel(settings);
|
28
|
+
}
|
29
|
+
exports.TextGenerator = TextGenerator;
|
30
|
+
/**
|
31
|
+
* Create a text embedding model that calls the Cohere Co.Embed API.
|
32
|
+
*
|
33
|
+
* @see https://docs.cohere.com/reference/embed
|
34
|
+
*
|
35
|
+
* @example
|
36
|
+
* const embeddings = await embedMany(
|
37
|
+
* cohere.TextEmbedder({ model: "embed-english-light-v2.0" }),
|
38
|
+
* [
|
39
|
+
* "At first, Nox didn't know what to do with the pup.",
|
40
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
41
|
+
* ]
|
42
|
+
* );
|
43
|
+
*
|
44
|
+
* @returns A new instance of {@link CohereTextEmbeddingModel}.
|
45
|
+
*/
|
46
|
+
function TextEmbedder(settings) {
|
47
|
+
return new CohereTextEmbeddingModel_js_1.CohereTextEmbeddingModel(settings);
|
48
|
+
}
|
49
|
+
exports.TextEmbedder = TextEmbedder;
|
50
|
+
/**
|
51
|
+
* Tokenizer for the Cohere models. It uses the Co.Tokenize and Co.Detokenize APIs.
|
52
|
+
*
|
53
|
+
* @see https://docs.cohere.com/reference/tokenize
|
54
|
+
* @see https://docs.cohere.com/reference/detokenize-1
|
55
|
+
*
|
56
|
+
* @example
|
57
|
+
* const tokenizer = cohere.Tokenizer({ model: "command-nightly" });
|
58
|
+
*
|
59
|
+
* const text = "At first, Nox didn't know what to do with the pup.";
|
60
|
+
*
|
61
|
+
* const tokenCount = await countTokens(tokenizer, text);
|
62
|
+
* const tokens = await tokenizer.tokenize(text);
|
63
|
+
* const tokensAndTokenTexts = await tokenizer.tokenizeWithTexts(text);
|
64
|
+
* const reconstructedText = await tokenizer.detokenize(tokens);
|
65
|
+
*
|
66
|
+
* @returns A new instance of {@link CohereTokenizer}.
|
67
|
+
*/
|
68
|
+
function Tokenizer(settings) {
|
69
|
+
return new CohereTokenizer_js_1.CohereTokenizer(settings);
|
70
|
+
}
|
71
|
+
exports.Tokenizer = Tokenizer;
|
@@ -0,0 +1,59 @@
|
|
1
|
+
import { CohereTextEmbeddingModel, CohereTextEmbeddingModelSettings } from "./CohereTextEmbeddingModel.js";
|
2
|
+
import { CohereTextGenerationModel, CohereTextGenerationModelSettings } from "./CohereTextGenerationModel.js";
|
3
|
+
import { CohereTokenizer, CohereTokenizerSettings } from "./CohereTokenizer.js";
|
4
|
+
/**
|
5
|
+
* Create a text generation model that calls the Cohere Co.Generate API.
|
6
|
+
*
|
7
|
+
* @see https://docs.cohere.com/reference/generate
|
8
|
+
*
|
9
|
+
* @example
|
10
|
+
* const model = cohere.TextGenerator({
|
11
|
+
* model: "command-nightly",
|
12
|
+
* temperature: 0.7,
|
13
|
+
* maxCompletionTokens: 500,
|
14
|
+
* });
|
15
|
+
*
|
16
|
+
* const text = await generateText(
|
17
|
+
* model,
|
18
|
+
* "Write a short story about a robot learning to love:\n\n"
|
19
|
+
* );
|
20
|
+
*
|
21
|
+
* @returns A new instance of {@link CohereTextGenerationModel}.
|
22
|
+
*/
|
23
|
+
export declare function TextGenerator(settings: CohereTextGenerationModelSettings): CohereTextGenerationModel;
|
24
|
+
/**
|
25
|
+
* Create a text embedding model that calls the Cohere Co.Embed API.
|
26
|
+
*
|
27
|
+
* @see https://docs.cohere.com/reference/embed
|
28
|
+
*
|
29
|
+
* @example
|
30
|
+
* const embeddings = await embedMany(
|
31
|
+
* cohere.TextEmbedder({ model: "embed-english-light-v2.0" }),
|
32
|
+
* [
|
33
|
+
* "At first, Nox didn't know what to do with the pup.",
|
34
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
35
|
+
* ]
|
36
|
+
* );
|
37
|
+
*
|
38
|
+
* @returns A new instance of {@link CohereTextEmbeddingModel}.
|
39
|
+
*/
|
40
|
+
export declare function TextEmbedder(settings: CohereTextEmbeddingModelSettings): CohereTextEmbeddingModel;
|
41
|
+
/**
|
42
|
+
* Tokenizer for the Cohere models. It uses the Co.Tokenize and Co.Detokenize APIs.
|
43
|
+
*
|
44
|
+
* @see https://docs.cohere.com/reference/tokenize
|
45
|
+
* @see https://docs.cohere.com/reference/detokenize-1
|
46
|
+
*
|
47
|
+
* @example
|
48
|
+
* const tokenizer = cohere.Tokenizer({ model: "command-nightly" });
|
49
|
+
*
|
50
|
+
* const text = "At first, Nox didn't know what to do with the pup.";
|
51
|
+
*
|
52
|
+
* const tokenCount = await countTokens(tokenizer, text);
|
53
|
+
* const tokens = await tokenizer.tokenize(text);
|
54
|
+
* const tokensAndTokenTexts = await tokenizer.tokenizeWithTexts(text);
|
55
|
+
* const reconstructedText = await tokenizer.detokenize(tokens);
|
56
|
+
*
|
57
|
+
* @returns A new instance of {@link CohereTokenizer}.
|
58
|
+
*/
|
59
|
+
export declare function Tokenizer(settings: CohereTokenizerSettings): CohereTokenizer;
|
@@ -0,0 +1,65 @@
|
|
1
|
+
import { CohereTextEmbeddingModel, } from "./CohereTextEmbeddingModel.js";
|
2
|
+
import { CohereTextGenerationModel, } from "./CohereTextGenerationModel.js";
|
3
|
+
import { CohereTokenizer } from "./CohereTokenizer.js";
|
4
|
+
/**
|
5
|
+
* Create a text generation model that calls the Cohere Co.Generate API.
|
6
|
+
*
|
7
|
+
* @see https://docs.cohere.com/reference/generate
|
8
|
+
*
|
9
|
+
* @example
|
10
|
+
* const model = cohere.TextGenerator({
|
11
|
+
* model: "command-nightly",
|
12
|
+
* temperature: 0.7,
|
13
|
+
* maxCompletionTokens: 500,
|
14
|
+
* });
|
15
|
+
*
|
16
|
+
* const text = await generateText(
|
17
|
+
* model,
|
18
|
+
* "Write a short story about a robot learning to love:\n\n"
|
19
|
+
* );
|
20
|
+
*
|
21
|
+
* @returns A new instance of {@link CohereTextGenerationModel}.
|
22
|
+
*/
|
23
|
+
export function TextGenerator(settings) {
|
24
|
+
return new CohereTextGenerationModel(settings);
|
25
|
+
}
|
26
|
+
/**
|
27
|
+
* Create a text embedding model that calls the Cohere Co.Embed API.
|
28
|
+
*
|
29
|
+
* @see https://docs.cohere.com/reference/embed
|
30
|
+
*
|
31
|
+
* @example
|
32
|
+
* const embeddings = await embedMany(
|
33
|
+
* cohere.TextEmbedder({ model: "embed-english-light-v2.0" }),
|
34
|
+
* [
|
35
|
+
* "At first, Nox didn't know what to do with the pup.",
|
36
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
37
|
+
* ]
|
38
|
+
* );
|
39
|
+
*
|
40
|
+
* @returns A new instance of {@link CohereTextEmbeddingModel}.
|
41
|
+
*/
|
42
|
+
export function TextEmbedder(settings) {
|
43
|
+
return new CohereTextEmbeddingModel(settings);
|
44
|
+
}
|
45
|
+
/**
|
46
|
+
* Tokenizer for the Cohere models. It uses the Co.Tokenize and Co.Detokenize APIs.
|
47
|
+
*
|
48
|
+
* @see https://docs.cohere.com/reference/tokenize
|
49
|
+
* @see https://docs.cohere.com/reference/detokenize-1
|
50
|
+
*
|
51
|
+
* @example
|
52
|
+
* const tokenizer = cohere.Tokenizer({ model: "command-nightly" });
|
53
|
+
*
|
54
|
+
* const text = "At first, Nox didn't know what to do with the pup.";
|
55
|
+
*
|
56
|
+
* const tokenCount = await countTokens(tokenizer, text);
|
57
|
+
* const tokens = await tokenizer.tokenize(text);
|
58
|
+
* const tokensAndTokenTexts = await tokenizer.tokenizeWithTexts(text);
|
59
|
+
* const reconstructedText = await tokenizer.detokenize(tokens);
|
60
|
+
*
|
61
|
+
* @returns A new instance of {@link CohereTokenizer}.
|
62
|
+
*/
|
63
|
+
export function Tokenizer(settings) {
|
64
|
+
return new CohereTokenizer(settings);
|
65
|
+
}
|
@@ -67,7 +67,7 @@ class CohereTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
67
67
|
writable: true,
|
68
68
|
value: 96
|
69
69
|
});
|
70
|
-
Object.defineProperty(this, "
|
70
|
+
Object.defineProperty(this, "isParallelizable", {
|
71
71
|
enumerable: true,
|
72
72
|
configurable: true,
|
73
73
|
writable: true,
|
@@ -60,7 +60,7 @@ export declare class CohereTextEmbeddingModel extends AbstractModel<CohereTextEm
|
|
60
60
|
readonly provider: "cohere";
|
61
61
|
get modelName(): "embed-english-light-v2.0" | "embed-english-v2.0" | "embed-multilingual-v2.0" | "embed-english-v3.0" | "embed-english-light-v3.0" | "embed-multilingual-v3.0" | "embed-multilingual-light-v3.0";
|
62
62
|
readonly maxValuesPerCall = 96;
|
63
|
-
readonly
|
63
|
+
readonly isParallelizable = true;
|
64
64
|
readonly embeddingDimensions: number;
|
65
65
|
readonly contextWindowSize: number;
|
66
66
|
private readonly tokenizer;
|
@@ -64,7 +64,7 @@ export class CohereTextEmbeddingModel extends AbstractModel {
|
|
64
64
|
writable: true,
|
65
65
|
value: 96
|
66
66
|
});
|
67
|
-
Object.defineProperty(this, "
|
67
|
+
Object.defineProperty(this, "isParallelizable", {
|
68
68
|
enumerable: true,
|
69
69
|
configurable: true,
|
70
70
|
writable: true,
|
@@ -10,14 +10,27 @@ var __createBinding = (this && this.__createBinding) || (Object.create ? (functi
|
|
10
10
|
if (k2 === undefined) k2 = k;
|
11
11
|
o[k2] = m[k];
|
12
12
|
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
13
18
|
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
14
19
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
20
|
};
|
21
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
22
|
+
if (mod && mod.__esModule) return mod;
|
23
|
+
var result = {};
|
24
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
25
|
+
__setModuleDefault(result, mod);
|
26
|
+
return result;
|
27
|
+
};
|
16
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
-
exports.CohereError = void 0;
|
29
|
+
exports.cohere = exports.CohereError = void 0;
|
18
30
|
__exportStar(require("./CohereApiConfiguration.cjs"), exports);
|
19
31
|
var CohereError_js_1 = require("./CohereError.cjs");
|
20
32
|
Object.defineProperty(exports, "CohereError", { enumerable: true, get: function () { return CohereError_js_1.CohereError; } });
|
33
|
+
exports.cohere = __importStar(require("./CohereFacade.cjs"));
|
21
34
|
__exportStar(require("./CohereTextEmbeddingModel.cjs"), exports);
|
22
35
|
__exportStar(require("./CohereTextGenerationModel.cjs"), exports);
|
23
36
|
__exportStar(require("./CohereTokenizer.cjs"), exports);
|
@@ -1,5 +1,6 @@
|
|
1
1
|
export * from "./CohereApiConfiguration.js";
|
2
2
|
export { CohereError, CohereErrorData } from "./CohereError.js";
|
3
|
+
export * as cohere from "./CohereFacade.js";
|
3
4
|
export * from "./CohereTextEmbeddingModel.js";
|
4
5
|
export * from "./CohereTextGenerationModel.js";
|
5
6
|
export * from "./CohereTokenizer.js";
|
@@ -1,5 +1,6 @@
|
|
1
1
|
export * from "./CohereApiConfiguration.js";
|
2
2
|
export { CohereError } from "./CohereError.js";
|
3
|
+
export * as cohere from "./CohereFacade.js";
|
3
4
|
export * from "./CohereTextEmbeddingModel.js";
|
4
5
|
export * from "./CohereTextGenerationModel.js";
|
5
6
|
export * from "./CohereTokenizer.js";
|
@@ -0,0 +1,18 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.Speech = void 0;
|
4
|
+
const ElevenLabsSpeechModel_js_1 = require("./ElevenLabsSpeechModel.cjs");
|
5
|
+
/**
|
6
|
+
* Synthesize speech using the ElevenLabs Text to Speech API.
|
7
|
+
*
|
8
|
+
* Both regular text-to-speech and full duplex text-to-speech streaming are supported.
|
9
|
+
*
|
10
|
+
* @see https://docs.elevenlabs.io/api-reference/text-to-speech
|
11
|
+
* @see https://docs.elevenlabs.io/api-reference/text-to-speech-websockets
|
12
|
+
*
|
13
|
+
* @returns A new instance of {@link ElevenLabsSpeechModel}.
|
14
|
+
*/
|
15
|
+
function Speech(settings) {
|
16
|
+
return new ElevenLabsSpeechModel_js_1.ElevenLabsSpeechModel(settings);
|
17
|
+
}
|
18
|
+
exports.Speech = Speech;
|
@@ -0,0 +1,12 @@
|
|
1
|
+
import { ElevenLabsSpeechModel, ElevenLabsSpeechModelSettings } from "./ElevenLabsSpeechModel.js";
|
2
|
+
/**
|
3
|
+
* Synthesize speech using the ElevenLabs Text to Speech API.
|
4
|
+
*
|
5
|
+
* Both regular text-to-speech and full duplex text-to-speech streaming are supported.
|
6
|
+
*
|
7
|
+
* @see https://docs.elevenlabs.io/api-reference/text-to-speech
|
8
|
+
* @see https://docs.elevenlabs.io/api-reference/text-to-speech-websockets
|
9
|
+
*
|
10
|
+
* @returns A new instance of {@link ElevenLabsSpeechModel}.
|
11
|
+
*/
|
12
|
+
export declare function Speech(settings: ElevenLabsSpeechModelSettings): ElevenLabsSpeechModel;
|
@@ -0,0 +1,14 @@
|
|
1
|
+
import { ElevenLabsSpeechModel, } from "./ElevenLabsSpeechModel.js";
|
2
|
+
/**
|
3
|
+
* Synthesize speech using the ElevenLabs Text to Speech API.
|
4
|
+
*
|
5
|
+
* Both regular text-to-speech and full duplex text-to-speech streaming are supported.
|
6
|
+
*
|
7
|
+
* @see https://docs.elevenlabs.io/api-reference/text-to-speech
|
8
|
+
* @see https://docs.elevenlabs.io/api-reference/text-to-speech-websockets
|
9
|
+
*
|
10
|
+
* @returns A new instance of {@link ElevenLabsSpeechModel}.
|
11
|
+
*/
|
12
|
+
export function Speech(settings) {
|
13
|
+
return new ElevenLabsSpeechModel(settings);
|
14
|
+
}
|
@@ -10,9 +10,23 @@ var __createBinding = (this && this.__createBinding) || (Object.create ? (functi
|
|
10
10
|
if (k2 === undefined) k2 = k;
|
11
11
|
o[k2] = m[k];
|
12
12
|
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
13
18
|
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
14
19
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
20
|
};
|
21
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
22
|
+
if (mod && mod.__esModule) return mod;
|
23
|
+
var result = {};
|
24
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
25
|
+
__setModuleDefault(result, mod);
|
26
|
+
return result;
|
27
|
+
};
|
16
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
29
|
+
exports.elevenlabs = void 0;
|
17
30
|
__exportStar(require("./ElevenLabsApiConfiguration.cjs"), exports);
|
31
|
+
exports.elevenlabs = __importStar(require("./ElevenLabsFacade.cjs"));
|
18
32
|
__exportStar(require("./ElevenLabsSpeechModel.cjs"), exports);
|
@@ -0,0 +1,55 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.TextEmbedder = exports.TextGenerator = void 0;
|
4
|
+
const HuggingFaceTextEmbeddingModel_js_1 = require("./HuggingFaceTextEmbeddingModel.cjs");
|
5
|
+
const HuggingFaceTextGenerationModel_js_1 = require("./HuggingFaceTextGenerationModel.cjs");
|
6
|
+
/**
|
7
|
+
* Create a text generation model that calls a Hugging Face Inference API Text Generation Task.
|
8
|
+
*
|
9
|
+
* @see https://huggingface.co/docs/api-inference/detailed_parameters#text-generation-task
|
10
|
+
*
|
11
|
+
* @example
|
12
|
+
* const model = huggingface.TextGenerator({
|
13
|
+
* model: "tiiuae/falcon-7b",
|
14
|
+
* temperature: 0.7,
|
15
|
+
* maxCompletionTokens: 500,
|
16
|
+
* retry: retryWithExponentialBackoff({ maxTries: 5 }),
|
17
|
+
* });
|
18
|
+
*
|
19
|
+
* const text = await generateText(
|
20
|
+
* model,
|
21
|
+
* "Write a short story about a robot learning to love:\n\n"
|
22
|
+
* );
|
23
|
+
*
|
24
|
+
* @returns A new instance of {@link HuggingFaceTextGenerationModel}.
|
25
|
+
*/
|
26
|
+
function TextGenerator(settings) {
|
27
|
+
return new HuggingFaceTextGenerationModel_js_1.HuggingFaceTextGenerationModel(settings);
|
28
|
+
}
|
29
|
+
exports.TextGenerator = TextGenerator;
|
30
|
+
/**
|
31
|
+
* Create a text embedding model that calls a Hugging Face Inference API Feature Extraction Task.
|
32
|
+
*
|
33
|
+
* @see https://huggingface.co/docs/api-inference/detailed_parameters#feature-extraction-task
|
34
|
+
*
|
35
|
+
* @example
|
36
|
+
* const model = huggingface.TextEmbedder({
|
37
|
+
* model: "intfloat/e5-base-v2",
|
38
|
+
* maxTexstsPerCall: 5,
|
39
|
+
* retry: retryWithExponentialBackoff({ maxTries: 5 }),
|
40
|
+
* });
|
41
|
+
*
|
42
|
+
* const embeddings = await embedMany(
|
43
|
+
* model,
|
44
|
+
* [
|
45
|
+
* "At first, Nox didn't know what to do with the pup.",
|
46
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
47
|
+
* ]
|
48
|
+
* );
|
49
|
+
*
|
50
|
+
* @returns A new instance of {@link HuggingFaceTextEmbeddingModel}.
|
51
|
+
*/
|
52
|
+
function TextEmbedder(settings) {
|
53
|
+
return new HuggingFaceTextEmbeddingModel_js_1.HuggingFaceTextEmbeddingModel(settings);
|
54
|
+
}
|
55
|
+
exports.TextEmbedder = TextEmbedder;
|
@@ -0,0 +1,46 @@
|
|
1
|
+
import { HuggingFaceTextEmbeddingModel, HuggingFaceTextEmbeddingModelSettings } from "./HuggingFaceTextEmbeddingModel.js";
|
2
|
+
import { HuggingFaceTextGenerationModel, HuggingFaceTextGenerationModelSettings } from "./HuggingFaceTextGenerationModel.js";
|
3
|
+
/**
|
4
|
+
* Create a text generation model that calls a Hugging Face Inference API Text Generation Task.
|
5
|
+
*
|
6
|
+
* @see https://huggingface.co/docs/api-inference/detailed_parameters#text-generation-task
|
7
|
+
*
|
8
|
+
* @example
|
9
|
+
* const model = huggingface.TextGenerator({
|
10
|
+
* model: "tiiuae/falcon-7b",
|
11
|
+
* temperature: 0.7,
|
12
|
+
* maxCompletionTokens: 500,
|
13
|
+
* retry: retryWithExponentialBackoff({ maxTries: 5 }),
|
14
|
+
* });
|
15
|
+
*
|
16
|
+
* const text = await generateText(
|
17
|
+
* model,
|
18
|
+
* "Write a short story about a robot learning to love:\n\n"
|
19
|
+
* );
|
20
|
+
*
|
21
|
+
* @returns A new instance of {@link HuggingFaceTextGenerationModel}.
|
22
|
+
*/
|
23
|
+
export declare function TextGenerator(settings: HuggingFaceTextGenerationModelSettings): HuggingFaceTextGenerationModel;
|
24
|
+
/**
|
25
|
+
* Create a text embedding model that calls a Hugging Face Inference API Feature Extraction Task.
|
26
|
+
*
|
27
|
+
* @see https://huggingface.co/docs/api-inference/detailed_parameters#feature-extraction-task
|
28
|
+
*
|
29
|
+
* @example
|
30
|
+
* const model = huggingface.TextEmbedder({
|
31
|
+
* model: "intfloat/e5-base-v2",
|
32
|
+
* maxTexstsPerCall: 5,
|
33
|
+
* retry: retryWithExponentialBackoff({ maxTries: 5 }),
|
34
|
+
* });
|
35
|
+
*
|
36
|
+
* const embeddings = await embedMany(
|
37
|
+
* model,
|
38
|
+
* [
|
39
|
+
* "At first, Nox didn't know what to do with the pup.",
|
40
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
41
|
+
* ]
|
42
|
+
* );
|
43
|
+
*
|
44
|
+
* @returns A new instance of {@link HuggingFaceTextEmbeddingModel}.
|
45
|
+
*/
|
46
|
+
export declare function TextEmbedder(settings: HuggingFaceTextEmbeddingModelSettings): HuggingFaceTextEmbeddingModel;
|
@@ -0,0 +1,50 @@
|
|
1
|
+
import { HuggingFaceTextEmbeddingModel, } from "./HuggingFaceTextEmbeddingModel.js";
|
2
|
+
import { HuggingFaceTextGenerationModel, } from "./HuggingFaceTextGenerationModel.js";
|
3
|
+
/**
|
4
|
+
* Create a text generation model that calls a Hugging Face Inference API Text Generation Task.
|
5
|
+
*
|
6
|
+
* @see https://huggingface.co/docs/api-inference/detailed_parameters#text-generation-task
|
7
|
+
*
|
8
|
+
* @example
|
9
|
+
* const model = huggingface.TextGenerator({
|
10
|
+
* model: "tiiuae/falcon-7b",
|
11
|
+
* temperature: 0.7,
|
12
|
+
* maxCompletionTokens: 500,
|
13
|
+
* retry: retryWithExponentialBackoff({ maxTries: 5 }),
|
14
|
+
* });
|
15
|
+
*
|
16
|
+
* const text = await generateText(
|
17
|
+
* model,
|
18
|
+
* "Write a short story about a robot learning to love:\n\n"
|
19
|
+
* );
|
20
|
+
*
|
21
|
+
* @returns A new instance of {@link HuggingFaceTextGenerationModel}.
|
22
|
+
*/
|
23
|
+
export function TextGenerator(settings) {
|
24
|
+
return new HuggingFaceTextGenerationModel(settings);
|
25
|
+
}
|
26
|
+
/**
|
27
|
+
* Create a text embedding model that calls a Hugging Face Inference API Feature Extraction Task.
|
28
|
+
*
|
29
|
+
* @see https://huggingface.co/docs/api-inference/detailed_parameters#feature-extraction-task
|
30
|
+
*
|
31
|
+
* @example
|
32
|
+
* const model = huggingface.TextEmbedder({
|
33
|
+
* model: "intfloat/e5-base-v2",
|
34
|
+
* maxTexstsPerCall: 5,
|
35
|
+
* retry: retryWithExponentialBackoff({ maxTries: 5 }),
|
36
|
+
* });
|
37
|
+
*
|
38
|
+
* const embeddings = await embedMany(
|
39
|
+
* model,
|
40
|
+
* [
|
41
|
+
* "At first, Nox didn't know what to do with the pup.",
|
42
|
+
* "He keenly observed and absorbed everything around him, from the birds in the sky to the trees in the forest.",
|
43
|
+
* ]
|
44
|
+
* );
|
45
|
+
*
|
46
|
+
* @returns A new instance of {@link HuggingFaceTextEmbeddingModel}.
|
47
|
+
*/
|
48
|
+
export function TextEmbedder(settings) {
|
49
|
+
return new HuggingFaceTextEmbeddingModel(settings);
|
50
|
+
}
|
@@ -42,7 +42,7 @@ class HuggingFaceTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
42
42
|
writable: true,
|
43
43
|
value: void 0
|
44
44
|
});
|
45
|
-
Object.defineProperty(this, "
|
45
|
+
Object.defineProperty(this, "isParallelizable", {
|
46
46
|
enumerable: true,
|
47
47
|
configurable: true,
|
48
48
|
writable: true,
|
@@ -38,7 +38,7 @@ export declare class HuggingFaceTextEmbeddingModel extends AbstractModel<Hugging
|
|
38
38
|
readonly provider = "huggingface";
|
39
39
|
get modelName(): string;
|
40
40
|
readonly maxValuesPerCall: number;
|
41
|
-
readonly
|
41
|
+
readonly isParallelizable = true;
|
42
42
|
readonly contextWindowSize: undefined;
|
43
43
|
readonly embeddingDimensions: number | undefined;
|
44
44
|
readonly tokenizer: undefined;
|
@@ -39,7 +39,7 @@ export class HuggingFaceTextEmbeddingModel extends AbstractModel {
|
|
39
39
|
writable: true,
|
40
40
|
value: void 0
|
41
41
|
});
|
42
|
-
Object.defineProperty(this, "
|
42
|
+
Object.defineProperty(this, "isParallelizable", {
|
43
43
|
enumerable: true,
|
44
44
|
configurable: true,
|
45
45
|
writable: true,
|
@@ -10,14 +10,26 @@ var __createBinding = (this && this.__createBinding) || (Object.create ? (functi
|
|
10
10
|
if (k2 === undefined) k2 = k;
|
11
11
|
o[k2] = m[k];
|
12
12
|
}));
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
15
|
+
}) : function(o, v) {
|
16
|
+
o["default"] = v;
|
17
|
+
});
|
13
18
|
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
14
19
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
15
20
|
};
|
21
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
22
|
+
if (mod && mod.__esModule) return mod;
|
23
|
+
var result = {};
|
24
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
25
|
+
__setModuleDefault(result, mod);
|
26
|
+
return result;
|
27
|
+
};
|
16
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
17
|
-
exports.HuggingFaceError = void 0;
|
29
|
+
exports.huggingface = exports.HuggingFaceError = void 0;
|
18
30
|
__exportStar(require("./HuggingFaceApiConfiguration.cjs"), exports);
|
19
31
|
var HuggingFaceError_js_1 = require("./HuggingFaceError.cjs");
|
20
32
|
Object.defineProperty(exports, "HuggingFaceError", { enumerable: true, get: function () { return HuggingFaceError_js_1.HuggingFaceError; } });
|
21
|
-
|
33
|
+
exports.huggingface = __importStar(require("./HuggingFaceFacade.cjs"));
|
22
34
|
__exportStar(require("./HuggingFaceTextEmbeddingModel.cjs"), exports);
|
23
35
|
__exportStar(require("./HuggingFaceTextGenerationModel.cjs"), exports);
|
@@ -1,5 +1,5 @@
|
|
1
1
|
export * from "./HuggingFaceApiConfiguration.js";
|
2
2
|
export { HuggingFaceError, HuggingFaceErrorData } from "./HuggingFaceError.js";
|
3
|
-
export * from "./
|
3
|
+
export * as huggingface from "./HuggingFaceFacade.js";
|
4
4
|
export * from "./HuggingFaceTextEmbeddingModel.js";
|
5
5
|
export * from "./HuggingFaceTextGenerationModel.js";
|
@@ -1,5 +1,5 @@
|
|
1
1
|
export * from "./HuggingFaceApiConfiguration.js";
|
2
2
|
export { HuggingFaceError } from "./HuggingFaceError.js";
|
3
|
-
export * from "./
|
3
|
+
export * as huggingface from "./HuggingFaceFacade.js";
|
4
4
|
export * from "./HuggingFaceTextEmbeddingModel.js";
|
5
5
|
export * from "./HuggingFaceTextGenerationModel.js";
|
@@ -0,0 +1,19 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.Tokenizer = exports.TextEmbedder = exports.TextGenerator = void 0;
|
4
|
+
const LlamaCppApiConfiguration_js_1 = require("./LlamaCppApiConfiguration.cjs");
|
5
|
+
const LlamaCppTextEmbeddingModel_js_1 = require("./LlamaCppTextEmbeddingModel.cjs");
|
6
|
+
const LlamaCppTextGenerationModel_js_1 = require("./LlamaCppTextGenerationModel.cjs");
|
7
|
+
const LlamaCppTokenizer_js_1 = require("./LlamaCppTokenizer.cjs");
|
8
|
+
function TextGenerator(settings = {}) {
|
9
|
+
return new LlamaCppTextGenerationModel_js_1.LlamaCppTextGenerationModel(settings);
|
10
|
+
}
|
11
|
+
exports.TextGenerator = TextGenerator;
|
12
|
+
function TextEmbedder(settings = {}) {
|
13
|
+
return new LlamaCppTextEmbeddingModel_js_1.LlamaCppTextEmbeddingModel(settings);
|
14
|
+
}
|
15
|
+
exports.TextEmbedder = TextEmbedder;
|
16
|
+
function Tokenizer(api = new LlamaCppApiConfiguration_js_1.LlamaCppApiConfiguration()) {
|
17
|
+
return new LlamaCppTokenizer_js_1.LlamaCppTokenizer(api);
|
18
|
+
}
|
19
|
+
exports.Tokenizer = Tokenizer;
|
@@ -0,0 +1,7 @@
|
|
1
|
+
import { ApiConfiguration } from "../../core/api/ApiConfiguration.js";
|
2
|
+
import { LlamaCppTextEmbeddingModel, LlamaCppTextEmbeddingModelSettings } from "./LlamaCppTextEmbeddingModel.js";
|
3
|
+
import { LlamaCppTextGenerationModel, LlamaCppTextGenerationModelSettings } from "./LlamaCppTextGenerationModel.js";
|
4
|
+
import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
|
5
|
+
export declare function TextGenerator<CONTEXT_WINDOW_SIZE extends number>(settings?: LlamaCppTextGenerationModelSettings<CONTEXT_WINDOW_SIZE>): LlamaCppTextGenerationModel<CONTEXT_WINDOW_SIZE>;
|
6
|
+
export declare function TextEmbedder(settings?: LlamaCppTextEmbeddingModelSettings): LlamaCppTextEmbeddingModel;
|
7
|
+
export declare function Tokenizer(api?: ApiConfiguration): LlamaCppTokenizer;
|
@@ -0,0 +1,13 @@
|
|
1
|
+
import { LlamaCppApiConfiguration } from "./LlamaCppApiConfiguration.js";
|
2
|
+
import { LlamaCppTextEmbeddingModel, } from "./LlamaCppTextEmbeddingModel.js";
|
3
|
+
import { LlamaCppTextGenerationModel, } from "./LlamaCppTextGenerationModel.js";
|
4
|
+
import { LlamaCppTokenizer } from "./LlamaCppTokenizer.js";
|
5
|
+
export function TextGenerator(settings = {}) {
|
6
|
+
return new LlamaCppTextGenerationModel(settings);
|
7
|
+
}
|
8
|
+
export function TextEmbedder(settings = {}) {
|
9
|
+
return new LlamaCppTextEmbeddingModel(settings);
|
10
|
+
}
|
11
|
+
export function Tokenizer(api = new LlamaCppApiConfiguration()) {
|
12
|
+
return new LlamaCppTokenizer(api);
|
13
|
+
}
|
@@ -40,8 +40,8 @@ class LlamaCppTextEmbeddingModel extends AbstractModel_js_1.AbstractModel {
|
|
40
40
|
get modelName() {
|
41
41
|
return null;
|
42
42
|
}
|
43
|
-
get
|
44
|
-
return this.settings.
|
43
|
+
get isParallelizable() {
|
44
|
+
return this.settings.isParallelizable ?? false;
|
45
45
|
}
|
46
46
|
get embeddingDimensions() {
|
47
47
|
return this.settings.embeddingDimensions;
|