@ai-sdk/togetherai 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,9 @@
1
+ # @ai-sdk/togetherai
2
+
3
+ ## 0.0.1
4
+
5
+ ### Patch Changes
6
+
7
+ - c24b6df: feat (provider/togetherai): Add togetherai provider.
8
+ - Updated dependencies [962978b]
9
+ - @ai-sdk/openai-compatible@0.0.1
package/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright 2023 Vercel, Inc.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
package/README.md ADDED
@@ -0,0 +1,35 @@
1
+ # AI SDK - Together.ai Provider
2
+
3
+ The **[Together.ai provider](https://sdk.vercel.ai/providers/ai-sdk-providers/togetherai)** for the [AI SDK](https://sdk.vercel.ai/docs) contains language model support for the [Together.ai](https://together.ai) platform.
4
+
5
+ ## Setup
6
+
7
+ The Together.ai provider is available in the `@ai-sdk/togetherai` module. You can install it with
8
+
9
+ ```bash
10
+ npm i @ai-sdk/togetherai
11
+ ```
12
+
13
+ ## Provider Instance
14
+
15
+ You can import the default provider instance `togetherai` from `@ai-sdk/togetherai`:
16
+
17
+ ```ts
18
+ import { togetherai } from '@ai-sdk/togetherai';
19
+ ```
20
+
21
+ ## Example
22
+
23
+ ```ts
24
+ import { togetherai } from '@ai-sdk/togetherai';
25
+ import { generateText } from 'ai';
26
+
27
+ const { text } = await generateText({
28
+ model: togetherai('meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo'),
29
+ prompt: 'Write a Python function that sorts a list:',
30
+ });
31
+ ```
32
+
33
+ ## Documentation
34
+
35
+ Please check out the **[Together.ai provider](https://sdk.vercel.ai/providers/ai-sdk-providers/togetherai)** for more information.
@@ -0,0 +1,57 @@
1
+ import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';
2
+ import { FetchFunction } from '@ai-sdk/provider-utils';
3
+ import { OpenAICompatibleChatSettings, OpenAICompatibleEmbeddingSettings, OpenAICompatibleCompletionSettings } from '@ai-sdk/openai-compatible';
4
+
5
+ type TogetherAIChatModelId = 'databricks/dbrx-instruct' | 'deepseek-ai/deepseek-llm-67b-chat' | 'google/gemma-2-27b-it' | 'google/gemma-2-9b-it' | 'google/gemma-2b-it' | 'Gryphe/MythoMax-L2-13b' | 'meta-llama/Llama-2-13b-chat-hf' | 'meta-llama/Llama-3-70b-chat-hf' | 'meta-llama/Llama-3-8b-chat-hf' | 'meta-llama/Llama-3.2-3B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3-70B-Instruct-Lite' | 'meta-llama/Meta-Llama-3-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3-8B-Instruct-Lite' | 'meta-llama/Meta-Llama-3-8B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'microsoft/WizardLM-2-8x22B' | 'mistralai/Mistral-7B-Instruct-v0.1' | 'mistralai/Mistral-7B-Instruct-v0.2' | 'mistralai/Mistral-7B-Instruct-v0.3' | 'mistralai/Mixtral-8x22B-Instruct-v0.1' | 'mistralai/Mixtral-8x7B-Instruct-v0.1' | 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO' | 'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF' | 'Qwen/Qwen2-72B-Instruct' | 'Qwen/Qwen2.5-72B-Instruct-Turbo' | 'Qwen/Qwen2.5-7B-Instruct-Turbo' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | 'togethercomputer/StripedHyena-Nous-7B' | 'upstage/SOLAR-10.7B-Instruct-v1.0' | (string & {});
6
+ interface TogetherAIChatSettings extends OpenAICompatibleChatSettings {
7
+ }
8
+
9
+ type TogetherAIEmbeddingModelId = 'BAAI/bge-base-en-v1.5' | 'BAAI/bge-large-en-v1.5' | 'bert-base-uncased' | 'sentence-transformers/msmarco-bert-base-dot-v5' | 'togethercomputer/m2-bert-80M-2k-retrieval' | 'togethercomputer/m2-bert-80M-32k-retrieval' | 'togethercomputer/m2-bert-80M-8k-retrieval' | 'WhereIsAI/UAE-Large-V1' | (string & {});
10
+ interface TogetherAIEmbeddingSettings extends OpenAICompatibleEmbeddingSettings {
11
+ }
12
+
13
+ type TogetherAICompletionModelId = 'codellama/CodeLlama-34b-Instruct-hf' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | (string & {});
14
+ interface TogetherAICompletionSettings extends OpenAICompatibleCompletionSettings {
15
+ }
16
+
17
+ interface TogetherAIProviderSettings {
18
+ /**
19
+ TogetherAI API key.
20
+ */
21
+ apiKey?: string;
22
+ /**
23
+ Base URL for the API calls.
24
+ */
25
+ baseURL?: string;
26
+ /**
27
+ Custom headers to include in the requests.
28
+ */
29
+ headers?: Record<string, string>;
30
+ /**
31
+ Custom fetch implementation. You can use it as a middleware to intercept requests,
32
+ or to provide a custom fetch implementation for e.g. testing.
33
+ */
34
+ fetch?: FetchFunction;
35
+ }
36
+ interface TogetherAIProvider {
37
+ /**
38
+ Creates a model for text generation.
39
+ */
40
+ (modelId: TogetherAIChatModelId, settings?: TogetherAIChatSettings): LanguageModelV1;
41
+ /**
42
+ Creates a chat model for text generation.
43
+ */
44
+ chatModel(modelId: TogetherAIChatModelId, settings?: TogetherAIChatSettings): LanguageModelV1;
45
+ /**
46
+ Creates a completion model for text generation.
47
+ */
48
+ completionModel(modelId: TogetherAICompletionModelId, settings?: TogetherAICompletionSettings): LanguageModelV1;
49
+ /**
50
+ Creates a text embedding model for text generation.
51
+ */
52
+ textEmbeddingModel(modelId: TogetherAIEmbeddingModelId, settings?: TogetherAIEmbeddingSettings): EmbeddingModelV1<string>;
53
+ }
54
+ declare function createTogetherAI(options?: TogetherAIProviderSettings): TogetherAIProvider;
55
+ declare const togetherai: TogetherAIProvider;
56
+
57
+ export { type TogetherAIProvider, type TogetherAIProviderSettings, createTogetherAI, togetherai };
@@ -0,0 +1,57 @@
1
+ import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';
2
+ import { FetchFunction } from '@ai-sdk/provider-utils';
3
+ import { OpenAICompatibleChatSettings, OpenAICompatibleEmbeddingSettings, OpenAICompatibleCompletionSettings } from '@ai-sdk/openai-compatible';
4
+
5
+ type TogetherAIChatModelId = 'databricks/dbrx-instruct' | 'deepseek-ai/deepseek-llm-67b-chat' | 'google/gemma-2-27b-it' | 'google/gemma-2-9b-it' | 'google/gemma-2b-it' | 'Gryphe/MythoMax-L2-13b' | 'meta-llama/Llama-2-13b-chat-hf' | 'meta-llama/Llama-3-70b-chat-hf' | 'meta-llama/Llama-3-8b-chat-hf' | 'meta-llama/Llama-3.2-3B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3-70B-Instruct-Lite' | 'meta-llama/Meta-Llama-3-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3-8B-Instruct-Lite' | 'meta-llama/Meta-Llama-3-8B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' | 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' | 'microsoft/WizardLM-2-8x22B' | 'mistralai/Mistral-7B-Instruct-v0.1' | 'mistralai/Mistral-7B-Instruct-v0.2' | 'mistralai/Mistral-7B-Instruct-v0.3' | 'mistralai/Mixtral-8x22B-Instruct-v0.1' | 'mistralai/Mixtral-8x7B-Instruct-v0.1' | 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO' | 'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF' | 'Qwen/Qwen2-72B-Instruct' | 'Qwen/Qwen2.5-72B-Instruct-Turbo' | 'Qwen/Qwen2.5-7B-Instruct-Turbo' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | 'togethercomputer/StripedHyena-Nous-7B' | 'upstage/SOLAR-10.7B-Instruct-v1.0' | (string & {});
6
+ interface TogetherAIChatSettings extends OpenAICompatibleChatSettings {
7
+ }
8
+
9
+ type TogetherAIEmbeddingModelId = 'BAAI/bge-base-en-v1.5' | 'BAAI/bge-large-en-v1.5' | 'bert-base-uncased' | 'sentence-transformers/msmarco-bert-base-dot-v5' | 'togethercomputer/m2-bert-80M-2k-retrieval' | 'togethercomputer/m2-bert-80M-32k-retrieval' | 'togethercomputer/m2-bert-80M-8k-retrieval' | 'WhereIsAI/UAE-Large-V1' | (string & {});
10
+ interface TogetherAIEmbeddingSettings extends OpenAICompatibleEmbeddingSettings {
11
+ }
12
+
13
+ type TogetherAICompletionModelId = 'codellama/CodeLlama-34b-Instruct-hf' | 'Qwen/Qwen2.5-Coder-32B-Instruct' | (string & {});
14
+ interface TogetherAICompletionSettings extends OpenAICompatibleCompletionSettings {
15
+ }
16
+
17
+ interface TogetherAIProviderSettings {
18
+ /**
19
+ TogetherAI API key.
20
+ */
21
+ apiKey?: string;
22
+ /**
23
+ Base URL for the API calls.
24
+ */
25
+ baseURL?: string;
26
+ /**
27
+ Custom headers to include in the requests.
28
+ */
29
+ headers?: Record<string, string>;
30
+ /**
31
+ Custom fetch implementation. You can use it as a middleware to intercept requests,
32
+ or to provide a custom fetch implementation for e.g. testing.
33
+ */
34
+ fetch?: FetchFunction;
35
+ }
36
+ interface TogetherAIProvider {
37
+ /**
38
+ Creates a model for text generation.
39
+ */
40
+ (modelId: TogetherAIChatModelId, settings?: TogetherAIChatSettings): LanguageModelV1;
41
+ /**
42
+ Creates a chat model for text generation.
43
+ */
44
+ chatModel(modelId: TogetherAIChatModelId, settings?: TogetherAIChatSettings): LanguageModelV1;
45
+ /**
46
+ Creates a completion model for text generation.
47
+ */
48
+ completionModel(modelId: TogetherAICompletionModelId, settings?: TogetherAICompletionSettings): LanguageModelV1;
49
+ /**
50
+ Creates a text embedding model for text generation.
51
+ */
52
+ textEmbeddingModel(modelId: TogetherAIEmbeddingModelId, settings?: TogetherAIEmbeddingSettings): EmbeddingModelV1<string>;
53
+ }
54
+ declare function createTogetherAI(options?: TogetherAIProviderSettings): TogetherAIProvider;
55
+ declare const togetherai: TogetherAIProvider;
56
+
57
+ export { type TogetherAIProvider, type TogetherAIProviderSettings, createTogetherAI, togetherai };
package/dist/index.js ADDED
@@ -0,0 +1,78 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var src_exports = {};
22
+ __export(src_exports, {
23
+ createTogetherAI: () => createTogetherAI,
24
+ togetherai: () => togetherai
25
+ });
26
+ module.exports = __toCommonJS(src_exports);
27
+
28
+ // src/togetherai-provider.ts
29
+ var import_openai_compatible = require("@ai-sdk/openai-compatible");
30
+ var import_provider_utils = require("@ai-sdk/provider-utils");
31
+ function createTogetherAI(options = {}) {
32
+ var _a;
33
+ const baseURL = (0, import_provider_utils.withoutTrailingSlash)(
34
+ (_a = options.baseURL) != null ? _a : "https://api.together.xyz/v1/"
35
+ );
36
+ const getHeaders = () => ({
37
+ Authorization: `Bearer ${(0, import_provider_utils.loadApiKey)({
38
+ apiKey: options.apiKey,
39
+ environmentVariableName: "TOGETHER_AI_API_KEY",
40
+ description: "TogetherAI's API key"
41
+ })}`,
42
+ ...options.headers
43
+ });
44
+ const getCommonModelConfig = (modelType) => ({
45
+ provider: `togetherai.${modelType}`,
46
+ url: ({ path }) => `${baseURL}${path}`,
47
+ headers: getHeaders,
48
+ fetch: options.fetch
49
+ });
50
+ const createChatModel = (modelId, settings = {}) => {
51
+ return new import_openai_compatible.OpenAICompatibleChatLanguageModel(modelId, settings, {
52
+ ...getCommonModelConfig("chat"),
53
+ defaultObjectGenerationMode: "tool"
54
+ });
55
+ };
56
+ const createCompletionModel = (modelId, settings = {}) => new import_openai_compatible.OpenAICompatibleCompletionLanguageModel(
57
+ modelId,
58
+ settings,
59
+ getCommonModelConfig("completion")
60
+ );
61
+ const createTextEmbeddingModel = (modelId, settings = {}) => new import_openai_compatible.OpenAICompatibleEmbeddingModel(
62
+ modelId,
63
+ settings,
64
+ getCommonModelConfig("embedding")
65
+ );
66
+ const provider = (modelId, settings) => createChatModel(modelId, settings);
67
+ provider.completionModel = createCompletionModel;
68
+ provider.chatModel = createChatModel;
69
+ provider.textEmbeddingModel = createTextEmbeddingModel;
70
+ return provider;
71
+ }
72
+ var togetherai = createTogetherAI();
73
+ // Annotate the CommonJS export names for ESM import in node:
74
+ 0 && (module.exports = {
75
+ createTogetherAI,
76
+ togetherai
77
+ });
78
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/index.ts","../src/togetherai-provider.ts"],"sourcesContent":["export { createTogetherAI, togetherai } from './togetherai-provider';\nexport type {\n TogetherAIProvider,\n TogetherAIProviderSettings,\n} from './togetherai-provider';\n","import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';\nimport {\n OpenAICompatibleChatLanguageModel,\n OpenAICompatibleCompletionLanguageModel,\n OpenAICompatibleEmbeddingModel,\n} from '@ai-sdk/openai-compatible';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport {\n TogetherAIChatModelId,\n TogetherAIChatSettings,\n} from './togetherai-chat-settings';\nimport {\n TogetherAIEmbeddingModelId,\n TogetherAIEmbeddingSettings,\n} from './togetherai-embedding-settings';\nimport {\n TogetherAICompletionModelId,\n TogetherAICompletionSettings,\n} from './togetherai-completion-settings';\n\nexport interface TogetherAIProviderSettings {\n /**\nTogetherAI API key.\n*/\n apiKey?: string;\n /**\nBase URL for the API calls.\n*/\n baseURL?: string;\n /**\nCustom headers to include in the requests.\n*/\n headers?: Record<string, string>;\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n*/\n fetch?: FetchFunction;\n}\n\nexport interface TogetherAIProvider {\n /**\nCreates a model for text generation.\n*/\n (\n modelId: TogetherAIChatModelId,\n settings?: TogetherAIChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a chat model for text generation.\n*/\n chatModel(\n modelId: TogetherAIChatModelId,\n settings?: TogetherAIChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a completion model for text generation.\n*/\n completionModel(\n modelId: TogetherAICompletionModelId,\n settings?: TogetherAICompletionSettings,\n ): LanguageModelV1;\n\n /**\nCreates a text embedding model for text generation.\n*/\n textEmbeddingModel(\n modelId: TogetherAIEmbeddingModelId,\n settings?: TogetherAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport function createTogetherAI(\n options: TogetherAIProviderSettings = {},\n): TogetherAIProvider {\n const baseURL = withoutTrailingSlash(\n options.baseURL ?? 'https://api.together.xyz/v1/',\n );\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'TOGETHER_AI_API_KEY',\n description: \"TogetherAI's API key\",\n })}`,\n ...options.headers,\n });\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `togetherai.${modelType}`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createChatModel = (\n modelId: TogetherAIChatModelId,\n settings: TogetherAIChatSettings = {},\n ) => {\n // TODO(shaper): Likely need a registry of model to object generation mode.\n return new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'tool',\n });\n };\n\n const createCompletionModel = (\n modelId: TogetherAICompletionModelId,\n settings: TogetherAICompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createTextEmbeddingModel = (\n modelId: TogetherAIEmbeddingModelId,\n settings: TogetherAIEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const provider = (\n modelId: TogetherAIChatModelId,\n settings?: TogetherAIChatSettings,\n ) => createChatModel(modelId, settings);\n\n provider.completionModel = createCompletionModel;\n provider.chatModel = createChatModel;\n provider.textEmbeddingModel = createTextEmbeddingModel;\n\n return provider as TogetherAIProvider;\n}\n\nexport const togetherai = createTogetherAI();\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACCA,+BAIO;AACP,4BAIO;AAoEA,SAAS,iBACd,UAAsC,CAAC,GACnB;AAhFtB;AAiFE,QAAM,cAAU;AAAA,KACd,aAAQ,YAAR,YAAmB;AAAA,EACrB;AACA,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,cAAU,kCAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AASA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,cAAc,SAAS;AAAA,IACjC,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAmC,CAAC,MACjC;AAEH,WAAO,IAAI,2DAAkC,SAAS,UAAU;AAAA,MAC9D,GAAG,qBAAqB,MAAM;AAAA,MAC9B,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAC5B,SACA,WAAyC,CAAC,MAE1C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,2BAA2B,CAC/B,SACA,WAAwC,CAAC,MAEzC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,WAAW,CACf,SACA,aACG,gBAAgB,SAAS,QAAQ;AAEtC,WAAS,kBAAkB;AAC3B,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAEO,IAAM,aAAa,iBAAiB;","names":[]}
package/dist/index.mjs ADDED
@@ -0,0 +1,57 @@
1
+ // src/togetherai-provider.ts
2
+ import {
3
+ OpenAICompatibleChatLanguageModel,
4
+ OpenAICompatibleCompletionLanguageModel,
5
+ OpenAICompatibleEmbeddingModel
6
+ } from "@ai-sdk/openai-compatible";
7
+ import {
8
+ loadApiKey,
9
+ withoutTrailingSlash
10
+ } from "@ai-sdk/provider-utils";
11
+ function createTogetherAI(options = {}) {
12
+ var _a;
13
+ const baseURL = withoutTrailingSlash(
14
+ (_a = options.baseURL) != null ? _a : "https://api.together.xyz/v1/"
15
+ );
16
+ const getHeaders = () => ({
17
+ Authorization: `Bearer ${loadApiKey({
18
+ apiKey: options.apiKey,
19
+ environmentVariableName: "TOGETHER_AI_API_KEY",
20
+ description: "TogetherAI's API key"
21
+ })}`,
22
+ ...options.headers
23
+ });
24
+ const getCommonModelConfig = (modelType) => ({
25
+ provider: `togetherai.${modelType}`,
26
+ url: ({ path }) => `${baseURL}${path}`,
27
+ headers: getHeaders,
28
+ fetch: options.fetch
29
+ });
30
+ const createChatModel = (modelId, settings = {}) => {
31
+ return new OpenAICompatibleChatLanguageModel(modelId, settings, {
32
+ ...getCommonModelConfig("chat"),
33
+ defaultObjectGenerationMode: "tool"
34
+ });
35
+ };
36
+ const createCompletionModel = (modelId, settings = {}) => new OpenAICompatibleCompletionLanguageModel(
37
+ modelId,
38
+ settings,
39
+ getCommonModelConfig("completion")
40
+ );
41
+ const createTextEmbeddingModel = (modelId, settings = {}) => new OpenAICompatibleEmbeddingModel(
42
+ modelId,
43
+ settings,
44
+ getCommonModelConfig("embedding")
45
+ );
46
+ const provider = (modelId, settings) => createChatModel(modelId, settings);
47
+ provider.completionModel = createCompletionModel;
48
+ provider.chatModel = createChatModel;
49
+ provider.textEmbeddingModel = createTextEmbeddingModel;
50
+ return provider;
51
+ }
52
+ var togetherai = createTogetherAI();
53
+ export {
54
+ createTogetherAI,
55
+ togetherai
56
+ };
57
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/togetherai-provider.ts"],"sourcesContent":["import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';\nimport {\n OpenAICompatibleChatLanguageModel,\n OpenAICompatibleCompletionLanguageModel,\n OpenAICompatibleEmbeddingModel,\n} from '@ai-sdk/openai-compatible';\nimport {\n FetchFunction,\n loadApiKey,\n withoutTrailingSlash,\n} from '@ai-sdk/provider-utils';\nimport {\n TogetherAIChatModelId,\n TogetherAIChatSettings,\n} from './togetherai-chat-settings';\nimport {\n TogetherAIEmbeddingModelId,\n TogetherAIEmbeddingSettings,\n} from './togetherai-embedding-settings';\nimport {\n TogetherAICompletionModelId,\n TogetherAICompletionSettings,\n} from './togetherai-completion-settings';\n\nexport interface TogetherAIProviderSettings {\n /**\nTogetherAI API key.\n*/\n apiKey?: string;\n /**\nBase URL for the API calls.\n*/\n baseURL?: string;\n /**\nCustom headers to include in the requests.\n*/\n headers?: Record<string, string>;\n /**\nCustom fetch implementation. You can use it as a middleware to intercept requests,\nor to provide a custom fetch implementation for e.g. testing.\n*/\n fetch?: FetchFunction;\n}\n\nexport interface TogetherAIProvider {\n /**\nCreates a model for text generation.\n*/\n (\n modelId: TogetherAIChatModelId,\n settings?: TogetherAIChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a chat model for text generation.\n*/\n chatModel(\n modelId: TogetherAIChatModelId,\n settings?: TogetherAIChatSettings,\n ): LanguageModelV1;\n\n /**\nCreates a completion model for text generation.\n*/\n completionModel(\n modelId: TogetherAICompletionModelId,\n settings?: TogetherAICompletionSettings,\n ): LanguageModelV1;\n\n /**\nCreates a text embedding model for text generation.\n*/\n textEmbeddingModel(\n modelId: TogetherAIEmbeddingModelId,\n settings?: TogetherAIEmbeddingSettings,\n ): EmbeddingModelV1<string>;\n}\n\nexport function createTogetherAI(\n options: TogetherAIProviderSettings = {},\n): TogetherAIProvider {\n const baseURL = withoutTrailingSlash(\n options.baseURL ?? 'https://api.together.xyz/v1/',\n );\n const getHeaders = () => ({\n Authorization: `Bearer ${loadApiKey({\n apiKey: options.apiKey,\n environmentVariableName: 'TOGETHER_AI_API_KEY',\n description: \"TogetherAI's API key\",\n })}`,\n ...options.headers,\n });\n\n interface CommonModelConfig {\n provider: string;\n url: ({ path }: { path: string }) => string;\n headers: () => Record<string, string>;\n fetch?: FetchFunction;\n }\n\n const getCommonModelConfig = (modelType: string): CommonModelConfig => ({\n provider: `togetherai.${modelType}`,\n url: ({ path }) => `${baseURL}${path}`,\n headers: getHeaders,\n fetch: options.fetch,\n });\n\n const createChatModel = (\n modelId: TogetherAIChatModelId,\n settings: TogetherAIChatSettings = {},\n ) => {\n // TODO(shaper): Likely need a registry of model to object generation mode.\n return new OpenAICompatibleChatLanguageModel(modelId, settings, {\n ...getCommonModelConfig('chat'),\n defaultObjectGenerationMode: 'tool',\n });\n };\n\n const createCompletionModel = (\n modelId: TogetherAICompletionModelId,\n settings: TogetherAICompletionSettings = {},\n ) =>\n new OpenAICompatibleCompletionLanguageModel(\n modelId,\n settings,\n getCommonModelConfig('completion'),\n );\n\n const createTextEmbeddingModel = (\n modelId: TogetherAIEmbeddingModelId,\n settings: TogetherAIEmbeddingSettings = {},\n ) =>\n new OpenAICompatibleEmbeddingModel(\n modelId,\n settings,\n getCommonModelConfig('embedding'),\n );\n\n const provider = (\n modelId: TogetherAIChatModelId,\n settings?: TogetherAIChatSettings,\n ) => createChatModel(modelId, settings);\n\n provider.completionModel = createCompletionModel;\n provider.chatModel = createChatModel;\n provider.textEmbeddingModel = createTextEmbeddingModel;\n\n return provider as TogetherAIProvider;\n}\n\nexport const togetherai = createTogetherAI();\n"],"mappings":";AACA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP;AAAA,EAEE;AAAA,EACA;AAAA,OACK;AAoEA,SAAS,iBACd,UAAsC,CAAC,GACnB;AAhFtB;AAiFE,QAAM,UAAU;AAAA,KACd,aAAQ,YAAR,YAAmB;AAAA,EACrB;AACA,QAAM,aAAa,OAAO;AAAA,IACxB,eAAe,UAAU,WAAW;AAAA,MAClC,QAAQ,QAAQ;AAAA,MAChB,yBAAyB;AAAA,MACzB,aAAa;AAAA,IACf,CAAC,CAAC;AAAA,IACF,GAAG,QAAQ;AAAA,EACb;AASA,QAAM,uBAAuB,CAAC,eAA0C;AAAA,IACtE,UAAU,cAAc,SAAS;AAAA,IACjC,KAAK,CAAC,EAAE,KAAK,MAAM,GAAG,OAAO,GAAG,IAAI;AAAA,IACpC,SAAS;AAAA,IACT,OAAO,QAAQ;AAAA,EACjB;AAEA,QAAM,kBAAkB,CACtB,SACA,WAAmC,CAAC,MACjC;AAEH,WAAO,IAAI,kCAAkC,SAAS,UAAU;AAAA,MAC9D,GAAG,qBAAqB,MAAM;AAAA,MAC9B,6BAA6B;AAAA,IAC/B,CAAC;AAAA,EACH;AAEA,QAAM,wBAAwB,CAC5B,SACA,WAAyC,CAAC,MAE1C,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,YAAY;AAAA,EACnC;AAEF,QAAM,2BAA2B,CAC/B,SACA,WAAwC,CAAC,MAEzC,IAAI;AAAA,IACF;AAAA,IACA;AAAA,IACA,qBAAqB,WAAW;AAAA,EAClC;AAEF,QAAM,WAAW,CACf,SACA,aACG,gBAAgB,SAAS,QAAQ;AAEtC,WAAS,kBAAkB;AAC3B,WAAS,YAAY;AACrB,WAAS,qBAAqB;AAE9B,SAAO;AACT;AAEO,IAAM,aAAa,iBAAiB;","names":[]}
package/package.json ADDED
@@ -0,0 +1,71 @@
1
+ {
2
+ "name": "@ai-sdk/togetherai",
3
+ "version": "0.0.1",
4
+ "license": "Apache-2.0",
5
+ "sideEffects": false,
6
+ "main": "./dist/index.js",
7
+ "module": "./dist/index.mjs",
8
+ "types": "./dist/index.d.ts",
9
+ "files": [
10
+ "dist/**/*",
11
+ "internal/dist/**/*",
12
+ "CHANGELOG.md"
13
+ ],
14
+ "exports": {
15
+ "./package.json": "./package.json",
16
+ ".": {
17
+ "types": "./dist/index.d.ts",
18
+ "import": "./dist/index.mjs",
19
+ "require": "./dist/index.js"
20
+ },
21
+ "./internal": {
22
+ "types": "./internal/dist/index.d.ts",
23
+ "import": "./internal/dist/index.mjs",
24
+ "module": "./internal/dist/index.mjs",
25
+ "require": "./internal/dist/index.js"
26
+ }
27
+ },
28
+ "dependencies": {
29
+ "@ai-sdk/openai-compatible": "0.0.1",
30
+ "@ai-sdk/provider": "1.0.0",
31
+ "@ai-sdk/provider-utils": "2.0.0"
32
+ },
33
+ "devDependencies": {
34
+ "@types/node": "^18",
35
+ "tsup": "^8",
36
+ "typescript": "5.6.3",
37
+ "zod": "3.23.8",
38
+ "@vercel/ai-tsconfig": "0.0.0"
39
+ },
40
+ "peerDependencies": {
41
+ "zod": "^3.0.0"
42
+ },
43
+ "engines": {
44
+ "node": ">=18"
45
+ },
46
+ "publishConfig": {
47
+ "access": "public"
48
+ },
49
+ "homepage": "https://sdk.vercel.ai/docs",
50
+ "repository": {
51
+ "type": "git",
52
+ "url": "git+https://github.com/vercel/ai.git"
53
+ },
54
+ "bugs": {
55
+ "url": "https://github.com/vercel/ai/issues"
56
+ },
57
+ "keywords": [
58
+ "ai"
59
+ ],
60
+ "scripts": {
61
+ "build": "tsup",
62
+ "build:watch": "tsup --watch",
63
+ "clean": "rm -rf dist && rm -rf internal/dist",
64
+ "lint": "eslint \"./**/*.ts*\"",
65
+ "type-check": "tsc --noEmit",
66
+ "prettier-check": "prettier --check \"./**/*.ts*\"",
67
+ "test": "pnpm test:node && pnpm test:edge",
68
+ "test:edge": "vitest --config vitest.edge.config.js --run",
69
+ "test:node": "vitest --config vitest.node.config.js --run"
70
+ }
71
+ }