notdiamond 1.0.1 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -30,7 +30,7 @@ const axios__default = /*#__PURE__*/_interopDefaultCompat(axios);
30
30
 
31
31
  const name = "notdiamond";
32
32
  const type = "module";
33
- const version = "1.0.1";
33
+ const version = "1.0.2";
34
34
  const author = "not-diamond";
35
35
  const license = "MIT";
36
36
  const description = "TS/JS client for the NotDiamond API";
@@ -231,7 +231,7 @@ const SupportedProvider = {
231
231
  MISTRAL: "mistral",
232
232
  PERPLEXITY: "perplexity",
233
233
  COHERE: "cohere",
234
- TOGETHER: "together"
234
+ TOGETHERAI: "togetherai"
235
235
  };
236
236
  const SupportedModel = {
237
237
  GPT_3_5_TURBO: "gpt-3.5-turbo",
@@ -339,7 +339,7 @@ const SupportedModel = {
339
339
  SupportedModel.COMMAND_R,
340
340
  SupportedModel.COMMAND_R_PLUS
341
341
  ],
342
- [SupportedProvider.TOGETHER]: [
342
+ [SupportedProvider.TOGETHERAI]: [
343
343
  SupportedModel.MISTRAL_7B_INSTRUCT_V0_2,
344
344
  SupportedModel.MIXTRAL_8X7B_INSTRUCT_V0_1,
345
345
  SupportedModel.MIXTRAL_8X22B_INSTRUCT_V0_1,
@@ -353,7 +353,7 @@ const SupportedModel = {
353
353
  });
354
354
 
355
355
  function getLangChainModel(provider, llmKeys, responseModel) {
356
- const { OPENAI, ANTHROPIC, GOOGLE, MISTRAL, PERPLEXITY, COHERE, TOGETHER } = SupportedProvider;
356
+ const { OPENAI, ANTHROPIC, GOOGLE, MISTRAL, PERPLEXITY, COHERE, TOGETHERAI } = SupportedProvider;
357
357
  switch (provider.provider) {
358
358
  case OPENAI:
359
359
  if (responseModel) {
@@ -421,15 +421,15 @@ function getLangChainModel(provider, llmKeys, responseModel) {
421
421
  apiKey: process.env.COHERE_API_KEY || llmKeys.cohere,
422
422
  model: provider.model
423
423
  });
424
- case TOGETHER:
424
+ case TOGETHERAI:
425
425
  if (responseModel) {
426
426
  return new togetherai.ChatTogetherAI({
427
- apiKey: process.env.TOGETHER_API_KEY || llmKeys.together,
427
+ apiKey: process.env.TOGETHERAI_API_KEY || llmKeys.togetherai,
428
428
  model: provider.model
429
429
  }).withStructuredOutput(responseModel);
430
430
  }
431
431
  return new togetherai.ChatTogetherAI({
432
- apiKey: process.env.TOGETHER_API_KEY || llmKeys.together,
432
+ apiKey: process.env.TOGETHERAI_API_KEY || llmKeys.togetherai,
433
433
  model: provider.model
434
434
  });
435
435
  default:
package/dist/index.d.cts CHANGED
@@ -7,7 +7,7 @@ declare const SupportedProvider: {
7
7
  readonly MISTRAL: "mistral";
8
8
  readonly PERPLEXITY: "perplexity";
9
9
  readonly COHERE: "cohere";
10
- readonly TOGETHER: "together";
10
+ readonly TOGETHERAI: "togetherai";
11
11
  };
12
12
  declare const SupportedModel: {
13
13
  readonly GPT_3_5_TURBO: "gpt-3.5-turbo";
@@ -68,7 +68,7 @@ declare const ProviderModelMap: {
68
68
  readonly mistral: readonly ["mistral-large-latest", "mistral-large-2407", "mistral-large-2402", "mistral-medium-latest", "mistral-small-latest", "codestral-latest", "open-mistral-7b", "open-mixtral-8x7b", "open-mixtral-8x22b", "open-mistral-nemo"];
69
69
  readonly perplexity: readonly ["llama-3.1-sonar-large-128k-online"];
70
70
  readonly cohere: readonly ["command-r", "command-r-plus"];
71
- readonly together: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo"];
71
+ readonly togetherai: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo"];
72
72
  };
73
73
  type ProviderModelMapType = typeof ProviderModelMap;
74
74
  type SupportedProviderType = keyof ProviderModelMapType;
@@ -181,7 +181,7 @@ declare class NotDiamond {
181
181
  providers: Provider[];
182
182
  }) => void): Promise<{
183
183
  content: string;
184
- providers: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "together">[];
184
+ providers: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "togetherai">[];
185
185
  }> | undefined;
186
186
  /**
187
187
  * Streams the results of the model asynchronously.
@@ -199,7 +199,7 @@ declare class NotDiamond {
199
199
  provider: Provider;
200
200
  chunk?: string;
201
201
  }) => void): Promise<{
202
- provider: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "together">;
202
+ provider: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "togetherai">;
203
203
  stream: AsyncIterable<string>;
204
204
  }> | undefined;
205
205
  }
package/dist/index.d.mts CHANGED
@@ -7,7 +7,7 @@ declare const SupportedProvider: {
7
7
  readonly MISTRAL: "mistral";
8
8
  readonly PERPLEXITY: "perplexity";
9
9
  readonly COHERE: "cohere";
10
- readonly TOGETHER: "together";
10
+ readonly TOGETHERAI: "togetherai";
11
11
  };
12
12
  declare const SupportedModel: {
13
13
  readonly GPT_3_5_TURBO: "gpt-3.5-turbo";
@@ -68,7 +68,7 @@ declare const ProviderModelMap: {
68
68
  readonly mistral: readonly ["mistral-large-latest", "mistral-large-2407", "mistral-large-2402", "mistral-medium-latest", "mistral-small-latest", "codestral-latest", "open-mistral-7b", "open-mixtral-8x7b", "open-mixtral-8x22b", "open-mistral-nemo"];
69
69
  readonly perplexity: readonly ["llama-3.1-sonar-large-128k-online"];
70
70
  readonly cohere: readonly ["command-r", "command-r-plus"];
71
- readonly together: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo"];
71
+ readonly togetherai: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo"];
72
72
  };
73
73
  type ProviderModelMapType = typeof ProviderModelMap;
74
74
  type SupportedProviderType = keyof ProviderModelMapType;
@@ -181,7 +181,7 @@ declare class NotDiamond {
181
181
  providers: Provider[];
182
182
  }) => void): Promise<{
183
183
  content: string;
184
- providers: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "together">[];
184
+ providers: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "togetherai">[];
185
185
  }> | undefined;
186
186
  /**
187
187
  * Streams the results of the model asynchronously.
@@ -199,7 +199,7 @@ declare class NotDiamond {
199
199
  provider: Provider;
200
200
  chunk?: string;
201
201
  }) => void): Promise<{
202
- provider: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "together">;
202
+ provider: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "togetherai">;
203
203
  stream: AsyncIterable<string>;
204
204
  }> | undefined;
205
205
  }
package/dist/index.d.ts CHANGED
@@ -7,7 +7,7 @@ declare const SupportedProvider: {
7
7
  readonly MISTRAL: "mistral";
8
8
  readonly PERPLEXITY: "perplexity";
9
9
  readonly COHERE: "cohere";
10
- readonly TOGETHER: "together";
10
+ readonly TOGETHERAI: "togetherai";
11
11
  };
12
12
  declare const SupportedModel: {
13
13
  readonly GPT_3_5_TURBO: "gpt-3.5-turbo";
@@ -68,7 +68,7 @@ declare const ProviderModelMap: {
68
68
  readonly mistral: readonly ["mistral-large-latest", "mistral-large-2407", "mistral-large-2402", "mistral-medium-latest", "mistral-small-latest", "codestral-latest", "open-mistral-7b", "open-mixtral-8x7b", "open-mixtral-8x22b", "open-mistral-nemo"];
69
69
  readonly perplexity: readonly ["llama-3.1-sonar-large-128k-online"];
70
70
  readonly cohere: readonly ["command-r", "command-r-plus"];
71
- readonly together: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo"];
71
+ readonly togetherai: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo"];
72
72
  };
73
73
  type ProviderModelMapType = typeof ProviderModelMap;
74
74
  type SupportedProviderType = keyof ProviderModelMapType;
@@ -181,7 +181,7 @@ declare class NotDiamond {
181
181
  providers: Provider[];
182
182
  }) => void): Promise<{
183
183
  content: string;
184
- providers: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "together">[];
184
+ providers: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "togetherai">[];
185
185
  }> | undefined;
186
186
  /**
187
187
  * Streams the results of the model asynchronously.
@@ -199,7 +199,7 @@ declare class NotDiamond {
199
199
  provider: Provider;
200
200
  chunk?: string;
201
201
  }) => void): Promise<{
202
- provider: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "together">;
202
+ provider: Provider<"openai" | "anthropic" | "perplexity" | "google" | "mistral" | "cohere" | "togetherai">;
203
203
  stream: AsyncIterable<string>;
204
204
  }> | undefined;
205
205
  }
package/dist/index.mjs CHANGED
@@ -11,7 +11,7 @@ import { ChatTogetherAI } from '@langchain/community/chat_models/togetherai';
11
11
 
12
12
  const name = "notdiamond";
13
13
  const type = "module";
14
- const version = "1.0.1";
14
+ const version = "1.0.2";
15
15
  const author = "not-diamond";
16
16
  const license = "MIT";
17
17
  const description = "TS/JS client for the NotDiamond API";
@@ -212,7 +212,7 @@ const SupportedProvider = {
212
212
  MISTRAL: "mistral",
213
213
  PERPLEXITY: "perplexity",
214
214
  COHERE: "cohere",
215
- TOGETHER: "together"
215
+ TOGETHERAI: "togetherai"
216
216
  };
217
217
  const SupportedModel = {
218
218
  GPT_3_5_TURBO: "gpt-3.5-turbo",
@@ -320,7 +320,7 @@ const SupportedModel = {
320
320
  SupportedModel.COMMAND_R,
321
321
  SupportedModel.COMMAND_R_PLUS
322
322
  ],
323
- [SupportedProvider.TOGETHER]: [
323
+ [SupportedProvider.TOGETHERAI]: [
324
324
  SupportedModel.MISTRAL_7B_INSTRUCT_V0_2,
325
325
  SupportedModel.MIXTRAL_8X7B_INSTRUCT_V0_1,
326
326
  SupportedModel.MIXTRAL_8X22B_INSTRUCT_V0_1,
@@ -334,7 +334,7 @@ const SupportedModel = {
334
334
  });
335
335
 
336
336
  function getLangChainModel(provider, llmKeys, responseModel) {
337
- const { OPENAI, ANTHROPIC, GOOGLE, MISTRAL, PERPLEXITY, COHERE, TOGETHER } = SupportedProvider;
337
+ const { OPENAI, ANTHROPIC, GOOGLE, MISTRAL, PERPLEXITY, COHERE, TOGETHERAI } = SupportedProvider;
338
338
  switch (provider.provider) {
339
339
  case OPENAI:
340
340
  if (responseModel) {
@@ -402,15 +402,15 @@ function getLangChainModel(provider, llmKeys, responseModel) {
402
402
  apiKey: process.env.COHERE_API_KEY || llmKeys.cohere,
403
403
  model: provider.model
404
404
  });
405
- case TOGETHER:
405
+ case TOGETHERAI:
406
406
  if (responseModel) {
407
407
  return new ChatTogetherAI({
408
- apiKey: process.env.TOGETHER_API_KEY || llmKeys.together,
408
+ apiKey: process.env.TOGETHERAI_API_KEY || llmKeys.togetherai,
409
409
  model: provider.model
410
410
  }).withStructuredOutput(responseModel);
411
411
  }
412
412
  return new ChatTogetherAI({
413
- apiKey: process.env.TOGETHER_API_KEY || llmKeys.together,
413
+ apiKey: process.env.TOGETHERAI_API_KEY || llmKeys.togetherai,
414
414
  model: provider.model
415
415
  });
416
416
  default:
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "notdiamond",
3
3
  "type": "module",
4
- "version": "1.0.1",
4
+ "version": "1.0.3",
5
5
  "author": "not-diamond",
6
6
  "license": "MIT",
7
7
  "description": "TS/JS client for the NotDiamond API",