notdiamond 1.0.8 → 1.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -30,7 +30,7 @@ const axios__default = /*#__PURE__*/_interopDefaultCompat(axios);
30
30
 
31
31
  const name = "notdiamond";
32
32
  const type = "module";
33
- const version = "1.0.7";
33
+ const version = "1.0.9";
34
34
  const author = "not-diamond";
35
35
  const license = "MIT";
36
36
  const description = "TS/JS client for the NotDiamond API";
@@ -248,6 +248,7 @@ const SupportedModel = {
248
248
  GPT_4O_MINI_2024_07_18: "gpt-4o-mini-2024-07-18",
249
249
  GPT_4O_MINI: "gpt-4o-mini",
250
250
  GPT_4_0125_PREVIEW: "gpt-4-0125-preview",
251
+ CHATGPT_4O_LATEST: "chatgpt-4o-latest",
251
252
  O1_PREVIEW: "o1-preview",
252
253
  O1_PREVIEW_2024_09_12: "o1-preview-2024-09-12",
253
254
  O1_MINI: "o1-mini",
@@ -256,7 +257,10 @@ const SupportedModel = {
256
257
  CLAUDE_3_OPUS_20240229: "claude-3-opus-20240229",
257
258
  CLAUDE_3_SONNET_20240229: "claude-3-sonnet-20240229",
258
259
  CLAUDE_3_5_SONNET_20240620: "claude-3-5-sonnet-20240620",
260
+ CLAUDE_3_5_SONNET_20241022: "claude-3-5-sonnet-20241022",
261
+ CLAUDE_3_5_SONNET_LATEST: "claude-3-5-sonnet-latest",
259
262
  CLAUDE_3_HAIKU_20240307: "claude-3-haiku-20240307",
263
+ CLAUDE_3_5_HAIKU_20241022: "claude-3-5-haiku-20241022",
260
264
  GEMINI_PRO: "gemini-pro",
261
265
  GEMINI_1_PRO_LATEST: "gemini-1.0-pro-latest",
262
266
  GEMINI_15_PRO_LATEST: "gemini-1.5-pro-latest",
@@ -283,7 +287,8 @@ const SupportedModel = {
283
287
  LLAMA_3_1_70B_INSTRUCT_TURBO: "Meta-Llama-3.1-70B-Instruct-Turbo",
284
288
  LLAMA_3_1_405B_INSTRUCT_TURBO: "Meta-Llama-3.1-405B-Instruct-Turbo",
285
289
  LLAMA_3_1_SONAR_LARGE_128K_ONLINE: "llama-3.1-sonar-large-128k-online",
286
- OPEN_MISTRAL_NEMO: "open-mistral-nemo"
290
+ OPEN_MISTRAL_NEMO: "open-mistral-nemo",
291
+ DEEPSEEK_R1: "DeepSeek-R1"
287
292
  };
288
293
  ({
289
294
  [SupportedProvider.OPENAI]: [
@@ -304,14 +309,18 @@ const SupportedModel = {
304
309
  SupportedModel.O1_PREVIEW,
305
310
  SupportedModel.O1_PREVIEW_2024_09_12,
306
311
  SupportedModel.O1_MINI,
307
- SupportedModel.O1_MINI_2024_09_12
312
+ SupportedModel.O1_MINI_2024_09_12,
313
+ SupportedModel.CHATGPT_4O_LATEST
308
314
  ],
309
315
  [SupportedProvider.ANTHROPIC]: [
310
316
  SupportedModel.CLAUDE_2_1,
311
317
  SupportedModel.CLAUDE_3_OPUS_20240229,
312
318
  SupportedModel.CLAUDE_3_SONNET_20240229,
313
319
  SupportedModel.CLAUDE_3_5_SONNET_20240620,
314
- SupportedModel.CLAUDE_3_HAIKU_20240307
320
+ SupportedModel.CLAUDE_3_5_SONNET_20241022,
321
+ SupportedModel.CLAUDE_3_5_SONNET_LATEST,
322
+ SupportedModel.CLAUDE_3_HAIKU_20240307,
323
+ SupportedModel.CLAUDE_3_5_HAIKU_20241022
315
324
  ],
316
325
  [SupportedProvider.GOOGLE]: [
317
326
  SupportedModel.GEMINI_PRO,
@@ -348,7 +357,8 @@ const SupportedModel = {
348
357
  SupportedModel.QWEN2_72B_INSTRUCT,
349
358
  SupportedModel.LLAMA_3_1_8B_INSTRUCT_TURBO,
350
359
  SupportedModel.LLAMA_3_1_70B_INSTRUCT_TURBO,
351
- SupportedModel.LLAMA_3_1_405B_INSTRUCT_TURBO
360
+ SupportedModel.LLAMA_3_1_405B_INSTRUCT_TURBO,
361
+ SupportedModel.DEEPSEEK_R1
352
362
  ]
353
363
  });
354
364
 
@@ -672,20 +682,14 @@ class NotDiamond {
672
682
  const { providers } = selectedModel;
673
683
  const stream = await Promise.resolve(
674
684
  callLLMStream(
675
- providers?.[0] || {
676
- provider: "openai",
677
- model: "gpt-3.5-turbo"
678
- },
685
+ providers?.[0] || options.default,
679
686
  options,
680
687
  this.llmKeys,
681
688
  runtimeArgs
682
689
  )
683
690
  );
684
691
  return {
685
- provider: providers?.[0] || {
686
- provider: "openai",
687
- model: "gpt-3.5-turbo"
688
- },
692
+ provider: providers?.[0] || options.default,
689
693
  stream
690
694
  };
691
695
  }
package/dist/index.d.cts CHANGED
@@ -24,6 +24,7 @@ declare const SupportedModel: {
24
24
  readonly GPT_4O_MINI_2024_07_18: "gpt-4o-mini-2024-07-18";
25
25
  readonly GPT_4O_MINI: "gpt-4o-mini";
26
26
  readonly GPT_4_0125_PREVIEW: "gpt-4-0125-preview";
27
+ readonly CHATGPT_4O_LATEST: "chatgpt-4o-latest";
27
28
  readonly O1_PREVIEW: "o1-preview";
28
29
  readonly O1_PREVIEW_2024_09_12: "o1-preview-2024-09-12";
29
30
  readonly O1_MINI: "o1-mini";
@@ -32,7 +33,10 @@ declare const SupportedModel: {
32
33
  readonly CLAUDE_3_OPUS_20240229: "claude-3-opus-20240229";
33
34
  readonly CLAUDE_3_SONNET_20240229: "claude-3-sonnet-20240229";
34
35
  readonly CLAUDE_3_5_SONNET_20240620: "claude-3-5-sonnet-20240620";
36
+ readonly CLAUDE_3_5_SONNET_20241022: "claude-3-5-sonnet-20241022";
37
+ readonly CLAUDE_3_5_SONNET_LATEST: "claude-3-5-sonnet-latest";
35
38
  readonly CLAUDE_3_HAIKU_20240307: "claude-3-haiku-20240307";
39
+ readonly CLAUDE_3_5_HAIKU_20241022: "claude-3-5-haiku-20241022";
36
40
  readonly GEMINI_PRO: "gemini-pro";
37
41
  readonly GEMINI_1_PRO_LATEST: "gemini-1.0-pro-latest";
38
42
  readonly GEMINI_15_PRO_LATEST: "gemini-1.5-pro-latest";
@@ -60,15 +64,16 @@ declare const SupportedModel: {
60
64
  readonly LLAMA_3_1_405B_INSTRUCT_TURBO: "Meta-Llama-3.1-405B-Instruct-Turbo";
61
65
  readonly LLAMA_3_1_SONAR_LARGE_128K_ONLINE: "llama-3.1-sonar-large-128k-online";
62
66
  readonly OPEN_MISTRAL_NEMO: "open-mistral-nemo";
67
+ readonly DEEPSEEK_R1: "DeepSeek-R1";
63
68
  };
64
69
  declare const ProviderModelMap: {
65
- readonly openai: readonly ["gpt-3.5-turbo", "gpt-3.5-turbo-0125", "gpt-4", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-turbo", "gpt-4-turbo-preview", "gpt-4-turbo-2024-04-09", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06", "gpt-4o", "gpt-4o-mini-2024-07-18", "gpt-4o-mini", "gpt-4-0125-preview", "o1-preview", "o1-preview-2024-09-12", "o1-mini", "o1-mini-2024-09-12"];
66
- readonly anthropic: readonly ["claude-2.1", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-5-sonnet-20240620", "claude-3-haiku-20240307"];
70
+ readonly openai: readonly ["gpt-3.5-turbo", "gpt-3.5-turbo-0125", "gpt-4", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-turbo", "gpt-4-turbo-preview", "gpt-4-turbo-2024-04-09", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06", "gpt-4o", "gpt-4o-mini-2024-07-18", "gpt-4o-mini", "gpt-4-0125-preview", "o1-preview", "o1-preview-2024-09-12", "o1-mini", "o1-mini-2024-09-12", "chatgpt-4o-latest"];
71
+ readonly anthropic: readonly ["claude-2.1", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-5-sonnet-20240620", "claude-3-5-sonnet-20241022", "claude-3-5-sonnet-latest", "claude-3-haiku-20240307", "claude-3-5-haiku-20241022"];
67
72
  readonly google: readonly ["gemini-pro", "gemini-1.0-pro-latest", "gemini-1.5-pro-latest", "gemini-1.5-pro-exp-0801", "gemini-1.5-flash-latest"];
68
73
  readonly mistral: readonly ["mistral-large-latest", "mistral-large-2407", "mistral-large-2402", "mistral-medium-latest", "mistral-small-latest", "codestral-latest", "open-mistral-7b", "open-mixtral-8x7b", "open-mixtral-8x22b", "open-mistral-nemo"];
69
74
  readonly perplexity: readonly ["llama-3.1-sonar-large-128k-online"];
70
75
  readonly cohere: readonly ["command-r", "command-r-plus"];
71
- readonly togetherai: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo"];
76
+ readonly togetherai: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo", "DeepSeek-R1"];
72
77
  };
73
78
  type ProviderModelMapType = typeof ProviderModelMap;
74
79
  type SupportedProviderType = keyof ProviderModelMapType;
package/dist/index.d.mts CHANGED
@@ -24,6 +24,7 @@ declare const SupportedModel: {
24
24
  readonly GPT_4O_MINI_2024_07_18: "gpt-4o-mini-2024-07-18";
25
25
  readonly GPT_4O_MINI: "gpt-4o-mini";
26
26
  readonly GPT_4_0125_PREVIEW: "gpt-4-0125-preview";
27
+ readonly CHATGPT_4O_LATEST: "chatgpt-4o-latest";
27
28
  readonly O1_PREVIEW: "o1-preview";
28
29
  readonly O1_PREVIEW_2024_09_12: "o1-preview-2024-09-12";
29
30
  readonly O1_MINI: "o1-mini";
@@ -32,7 +33,10 @@ declare const SupportedModel: {
32
33
  readonly CLAUDE_3_OPUS_20240229: "claude-3-opus-20240229";
33
34
  readonly CLAUDE_3_SONNET_20240229: "claude-3-sonnet-20240229";
34
35
  readonly CLAUDE_3_5_SONNET_20240620: "claude-3-5-sonnet-20240620";
36
+ readonly CLAUDE_3_5_SONNET_20241022: "claude-3-5-sonnet-20241022";
37
+ readonly CLAUDE_3_5_SONNET_LATEST: "claude-3-5-sonnet-latest";
35
38
  readonly CLAUDE_3_HAIKU_20240307: "claude-3-haiku-20240307";
39
+ readonly CLAUDE_3_5_HAIKU_20241022: "claude-3-5-haiku-20241022";
36
40
  readonly GEMINI_PRO: "gemini-pro";
37
41
  readonly GEMINI_1_PRO_LATEST: "gemini-1.0-pro-latest";
38
42
  readonly GEMINI_15_PRO_LATEST: "gemini-1.5-pro-latest";
@@ -60,15 +64,16 @@ declare const SupportedModel: {
60
64
  readonly LLAMA_3_1_405B_INSTRUCT_TURBO: "Meta-Llama-3.1-405B-Instruct-Turbo";
61
65
  readonly LLAMA_3_1_SONAR_LARGE_128K_ONLINE: "llama-3.1-sonar-large-128k-online";
62
66
  readonly OPEN_MISTRAL_NEMO: "open-mistral-nemo";
67
+ readonly DEEPSEEK_R1: "DeepSeek-R1";
63
68
  };
64
69
  declare const ProviderModelMap: {
65
- readonly openai: readonly ["gpt-3.5-turbo", "gpt-3.5-turbo-0125", "gpt-4", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-turbo", "gpt-4-turbo-preview", "gpt-4-turbo-2024-04-09", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06", "gpt-4o", "gpt-4o-mini-2024-07-18", "gpt-4o-mini", "gpt-4-0125-preview", "o1-preview", "o1-preview-2024-09-12", "o1-mini", "o1-mini-2024-09-12"];
66
- readonly anthropic: readonly ["claude-2.1", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-5-sonnet-20240620", "claude-3-haiku-20240307"];
70
+ readonly openai: readonly ["gpt-3.5-turbo", "gpt-3.5-turbo-0125", "gpt-4", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-turbo", "gpt-4-turbo-preview", "gpt-4-turbo-2024-04-09", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06", "gpt-4o", "gpt-4o-mini-2024-07-18", "gpt-4o-mini", "gpt-4-0125-preview", "o1-preview", "o1-preview-2024-09-12", "o1-mini", "o1-mini-2024-09-12", "chatgpt-4o-latest"];
71
+ readonly anthropic: readonly ["claude-2.1", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-5-sonnet-20240620", "claude-3-5-sonnet-20241022", "claude-3-5-sonnet-latest", "claude-3-haiku-20240307", "claude-3-5-haiku-20241022"];
67
72
  readonly google: readonly ["gemini-pro", "gemini-1.0-pro-latest", "gemini-1.5-pro-latest", "gemini-1.5-pro-exp-0801", "gemini-1.5-flash-latest"];
68
73
  readonly mistral: readonly ["mistral-large-latest", "mistral-large-2407", "mistral-large-2402", "mistral-medium-latest", "mistral-small-latest", "codestral-latest", "open-mistral-7b", "open-mixtral-8x7b", "open-mixtral-8x22b", "open-mistral-nemo"];
69
74
  readonly perplexity: readonly ["llama-3.1-sonar-large-128k-online"];
70
75
  readonly cohere: readonly ["command-r", "command-r-plus"];
71
- readonly togetherai: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo"];
76
+ readonly togetherai: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo", "DeepSeek-R1"];
72
77
  };
73
78
  type ProviderModelMapType = typeof ProviderModelMap;
74
79
  type SupportedProviderType = keyof ProviderModelMapType;
package/dist/index.d.ts CHANGED
@@ -24,6 +24,7 @@ declare const SupportedModel: {
24
24
  readonly GPT_4O_MINI_2024_07_18: "gpt-4o-mini-2024-07-18";
25
25
  readonly GPT_4O_MINI: "gpt-4o-mini";
26
26
  readonly GPT_4_0125_PREVIEW: "gpt-4-0125-preview";
27
+ readonly CHATGPT_4O_LATEST: "chatgpt-4o-latest";
27
28
  readonly O1_PREVIEW: "o1-preview";
28
29
  readonly O1_PREVIEW_2024_09_12: "o1-preview-2024-09-12";
29
30
  readonly O1_MINI: "o1-mini";
@@ -32,7 +33,10 @@ declare const SupportedModel: {
32
33
  readonly CLAUDE_3_OPUS_20240229: "claude-3-opus-20240229";
33
34
  readonly CLAUDE_3_SONNET_20240229: "claude-3-sonnet-20240229";
34
35
  readonly CLAUDE_3_5_SONNET_20240620: "claude-3-5-sonnet-20240620";
36
+ readonly CLAUDE_3_5_SONNET_20241022: "claude-3-5-sonnet-20241022";
37
+ readonly CLAUDE_3_5_SONNET_LATEST: "claude-3-5-sonnet-latest";
35
38
  readonly CLAUDE_3_HAIKU_20240307: "claude-3-haiku-20240307";
39
+ readonly CLAUDE_3_5_HAIKU_20241022: "claude-3-5-haiku-20241022";
36
40
  readonly GEMINI_PRO: "gemini-pro";
37
41
  readonly GEMINI_1_PRO_LATEST: "gemini-1.0-pro-latest";
38
42
  readonly GEMINI_15_PRO_LATEST: "gemini-1.5-pro-latest";
@@ -60,15 +64,16 @@ declare const SupportedModel: {
60
64
  readonly LLAMA_3_1_405B_INSTRUCT_TURBO: "Meta-Llama-3.1-405B-Instruct-Turbo";
61
65
  readonly LLAMA_3_1_SONAR_LARGE_128K_ONLINE: "llama-3.1-sonar-large-128k-online";
62
66
  readonly OPEN_MISTRAL_NEMO: "open-mistral-nemo";
67
+ readonly DEEPSEEK_R1: "DeepSeek-R1";
63
68
  };
64
69
  declare const ProviderModelMap: {
65
- readonly openai: readonly ["gpt-3.5-turbo", "gpt-3.5-turbo-0125", "gpt-4", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-turbo", "gpt-4-turbo-preview", "gpt-4-turbo-2024-04-09", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06", "gpt-4o", "gpt-4o-mini-2024-07-18", "gpt-4o-mini", "gpt-4-0125-preview", "o1-preview", "o1-preview-2024-09-12", "o1-mini", "o1-mini-2024-09-12"];
66
- readonly anthropic: readonly ["claude-2.1", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-5-sonnet-20240620", "claude-3-haiku-20240307"];
70
+ readonly openai: readonly ["gpt-3.5-turbo", "gpt-3.5-turbo-0125", "gpt-4", "gpt-4-0613", "gpt-4-1106-preview", "gpt-4-turbo", "gpt-4-turbo-preview", "gpt-4-turbo-2024-04-09", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06", "gpt-4o", "gpt-4o-mini-2024-07-18", "gpt-4o-mini", "gpt-4-0125-preview", "o1-preview", "o1-preview-2024-09-12", "o1-mini", "o1-mini-2024-09-12", "chatgpt-4o-latest"];
71
+ readonly anthropic: readonly ["claude-2.1", "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-5-sonnet-20240620", "claude-3-5-sonnet-20241022", "claude-3-5-sonnet-latest", "claude-3-haiku-20240307", "claude-3-5-haiku-20241022"];
67
72
  readonly google: readonly ["gemini-pro", "gemini-1.0-pro-latest", "gemini-1.5-pro-latest", "gemini-1.5-pro-exp-0801", "gemini-1.5-flash-latest"];
68
73
  readonly mistral: readonly ["mistral-large-latest", "mistral-large-2407", "mistral-large-2402", "mistral-medium-latest", "mistral-small-latest", "codestral-latest", "open-mistral-7b", "open-mixtral-8x7b", "open-mixtral-8x22b", "open-mistral-nemo"];
69
74
  readonly perplexity: readonly ["llama-3.1-sonar-large-128k-online"];
70
75
  readonly cohere: readonly ["command-r", "command-r-plus"];
71
- readonly togetherai: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo"];
76
+ readonly togetherai: readonly ["Mistral-7B-Instruct-v0.2", "Mixtral-8x7B-Instruct-v0.1", "Mixtral-8x22B-Instruct-v0.1", "Llama-3-70b-chat-hf", "Llama-3-8b-chat-hf", "Qwen2-72B-Instruct", "Meta-Llama-3.1-8B-Instruct-Turbo", "Meta-Llama-3.1-70B-Instruct-Turbo", "Meta-Llama-3.1-405B-Instruct-Turbo", "DeepSeek-R1"];
72
77
  };
73
78
  type ProviderModelMapType = typeof ProviderModelMap;
74
79
  type SupportedProviderType = keyof ProviderModelMapType;
package/dist/index.mjs CHANGED
@@ -11,7 +11,7 @@ import { ChatTogetherAI } from '@langchain/community/chat_models/togetherai';
11
11
 
12
12
  const name = "notdiamond";
13
13
  const type = "module";
14
- const version = "1.0.7";
14
+ const version = "1.0.9";
15
15
  const author = "not-diamond";
16
16
  const license = "MIT";
17
17
  const description = "TS/JS client for the NotDiamond API";
@@ -229,6 +229,7 @@ const SupportedModel = {
229
229
  GPT_4O_MINI_2024_07_18: "gpt-4o-mini-2024-07-18",
230
230
  GPT_4O_MINI: "gpt-4o-mini",
231
231
  GPT_4_0125_PREVIEW: "gpt-4-0125-preview",
232
+ CHATGPT_4O_LATEST: "chatgpt-4o-latest",
232
233
  O1_PREVIEW: "o1-preview",
233
234
  O1_PREVIEW_2024_09_12: "o1-preview-2024-09-12",
234
235
  O1_MINI: "o1-mini",
@@ -237,7 +238,10 @@ const SupportedModel = {
237
238
  CLAUDE_3_OPUS_20240229: "claude-3-opus-20240229",
238
239
  CLAUDE_3_SONNET_20240229: "claude-3-sonnet-20240229",
239
240
  CLAUDE_3_5_SONNET_20240620: "claude-3-5-sonnet-20240620",
241
+ CLAUDE_3_5_SONNET_20241022: "claude-3-5-sonnet-20241022",
242
+ CLAUDE_3_5_SONNET_LATEST: "claude-3-5-sonnet-latest",
240
243
  CLAUDE_3_HAIKU_20240307: "claude-3-haiku-20240307",
244
+ CLAUDE_3_5_HAIKU_20241022: "claude-3-5-haiku-20241022",
241
245
  GEMINI_PRO: "gemini-pro",
242
246
  GEMINI_1_PRO_LATEST: "gemini-1.0-pro-latest",
243
247
  GEMINI_15_PRO_LATEST: "gemini-1.5-pro-latest",
@@ -264,7 +268,8 @@ const SupportedModel = {
264
268
  LLAMA_3_1_70B_INSTRUCT_TURBO: "Meta-Llama-3.1-70B-Instruct-Turbo",
265
269
  LLAMA_3_1_405B_INSTRUCT_TURBO: "Meta-Llama-3.1-405B-Instruct-Turbo",
266
270
  LLAMA_3_1_SONAR_LARGE_128K_ONLINE: "llama-3.1-sonar-large-128k-online",
267
- OPEN_MISTRAL_NEMO: "open-mistral-nemo"
271
+ OPEN_MISTRAL_NEMO: "open-mistral-nemo",
272
+ DEEPSEEK_R1: "DeepSeek-R1"
268
273
  };
269
274
  ({
270
275
  [SupportedProvider.OPENAI]: [
@@ -285,14 +290,18 @@ const SupportedModel = {
285
290
  SupportedModel.O1_PREVIEW,
286
291
  SupportedModel.O1_PREVIEW_2024_09_12,
287
292
  SupportedModel.O1_MINI,
288
- SupportedModel.O1_MINI_2024_09_12
293
+ SupportedModel.O1_MINI_2024_09_12,
294
+ SupportedModel.CHATGPT_4O_LATEST
289
295
  ],
290
296
  [SupportedProvider.ANTHROPIC]: [
291
297
  SupportedModel.CLAUDE_2_1,
292
298
  SupportedModel.CLAUDE_3_OPUS_20240229,
293
299
  SupportedModel.CLAUDE_3_SONNET_20240229,
294
300
  SupportedModel.CLAUDE_3_5_SONNET_20240620,
295
- SupportedModel.CLAUDE_3_HAIKU_20240307
301
+ SupportedModel.CLAUDE_3_5_SONNET_20241022,
302
+ SupportedModel.CLAUDE_3_5_SONNET_LATEST,
303
+ SupportedModel.CLAUDE_3_HAIKU_20240307,
304
+ SupportedModel.CLAUDE_3_5_HAIKU_20241022
296
305
  ],
297
306
  [SupportedProvider.GOOGLE]: [
298
307
  SupportedModel.GEMINI_PRO,
@@ -329,7 +338,8 @@ const SupportedModel = {
329
338
  SupportedModel.QWEN2_72B_INSTRUCT,
330
339
  SupportedModel.LLAMA_3_1_8B_INSTRUCT_TURBO,
331
340
  SupportedModel.LLAMA_3_1_70B_INSTRUCT_TURBO,
332
- SupportedModel.LLAMA_3_1_405B_INSTRUCT_TURBO
341
+ SupportedModel.LLAMA_3_1_405B_INSTRUCT_TURBO,
342
+ SupportedModel.DEEPSEEK_R1
333
343
  ]
334
344
  });
335
345
 
@@ -653,20 +663,14 @@ class NotDiamond {
653
663
  const { providers } = selectedModel;
654
664
  const stream = await Promise.resolve(
655
665
  callLLMStream(
656
- providers?.[0] || {
657
- provider: "openai",
658
- model: "gpt-3.5-turbo"
659
- },
666
+ providers?.[0] || options.default,
660
667
  options,
661
668
  this.llmKeys,
662
669
  runtimeArgs
663
670
  )
664
671
  );
665
672
  return {
666
- provider: providers?.[0] || {
667
- provider: "openai",
668
- model: "gpt-3.5-turbo"
669
- },
673
+ provider: providers?.[0] || options.default,
670
674
  stream
671
675
  };
672
676
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "notdiamond",
3
3
  "type": "module",
4
- "version": "1.0.8",
4
+ "version": "1.0.10",
5
5
  "author": "not-diamond",
6
6
  "license": "MIT",
7
7
  "description": "TS/JS client for the NotDiamond API",