notdiamond 1.0.3 → 1.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +15 -3
- package/dist/index.mjs +15 -3
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -30,7 +30,7 @@ const axios__default = /*#__PURE__*/_interopDefaultCompat(axios);
|
|
|
30
30
|
|
|
31
31
|
const name = "notdiamond";
|
|
32
32
|
const type = "module";
|
|
33
|
-
const version = "1.0.
|
|
33
|
+
const version = "1.0.6";
|
|
34
34
|
const author = "not-diamond";
|
|
35
35
|
const license = "MIT";
|
|
36
36
|
const description = "TS/JS client for the NotDiamond API";
|
|
@@ -425,17 +425,29 @@ function getLangChainModel(provider, llmKeys, responseModel) {
|
|
|
425
425
|
if (responseModel) {
|
|
426
426
|
return new togetherai.ChatTogetherAI({
|
|
427
427
|
apiKey: process.env.TOGETHERAI_API_KEY || llmKeys.togetherai,
|
|
428
|
-
model: provider.model
|
|
428
|
+
model: getTogetheraiModel(provider.model)
|
|
429
429
|
}).withStructuredOutput(responseModel);
|
|
430
430
|
}
|
|
431
431
|
return new togetherai.ChatTogetherAI({
|
|
432
432
|
apiKey: process.env.TOGETHERAI_API_KEY || llmKeys.togetherai,
|
|
433
|
-
model: provider.model
|
|
433
|
+
model: getTogetheraiModel(provider.model)
|
|
434
434
|
});
|
|
435
435
|
default:
|
|
436
436
|
throw new Error(`Unsupported provider: ${provider.provider}`);
|
|
437
437
|
}
|
|
438
438
|
}
|
|
439
|
+
const getTogetheraiModel = (model) => {
|
|
440
|
+
if (model === SupportedModel.MISTRAL_7B_INSTRUCT_V0_2 || model === SupportedModel.MIXTRAL_8X7B_INSTRUCT_V0_1 || model === SupportedModel.MIXTRAL_8X22B_INSTRUCT_V0_1) {
|
|
441
|
+
return `mistralai/${model}`;
|
|
442
|
+
}
|
|
443
|
+
if (model === SupportedModel.LLAMA_3_70B_CHAT_HF || model === SupportedModel.LLAMA_3_8B_CHAT_HF || model === SupportedModel.LLAMA_3_1_8B_INSTRUCT_TURBO || model === SupportedModel.LLAMA_3_1_70B_INSTRUCT_TURBO || model === SupportedModel.LLAMA_3_1_405B_INSTRUCT_TURBO) {
|
|
444
|
+
return `meta-llama/${model}`;
|
|
445
|
+
}
|
|
446
|
+
if (model === SupportedModel.QWEN2_72B_INSTRUCT) {
|
|
447
|
+
return `Qwen/${model}`;
|
|
448
|
+
}
|
|
449
|
+
return model;
|
|
450
|
+
};
|
|
439
451
|
async function callLLM(provider, options, llmKeys, runtimeArgs) {
|
|
440
452
|
const model = getLangChainModel(provider, llmKeys, options.responseModel);
|
|
441
453
|
const langChainMessages = extendProviderSystemPrompt(
|
package/dist/index.mjs
CHANGED
|
@@ -11,7 +11,7 @@ import { ChatTogetherAI } from '@langchain/community/chat_models/togetherai';
|
|
|
11
11
|
|
|
12
12
|
const name = "notdiamond";
|
|
13
13
|
const type = "module";
|
|
14
|
-
const version = "1.0.
|
|
14
|
+
const version = "1.0.6";
|
|
15
15
|
const author = "not-diamond";
|
|
16
16
|
const license = "MIT";
|
|
17
17
|
const description = "TS/JS client for the NotDiamond API";
|
|
@@ -406,17 +406,29 @@ function getLangChainModel(provider, llmKeys, responseModel) {
|
|
|
406
406
|
if (responseModel) {
|
|
407
407
|
return new ChatTogetherAI({
|
|
408
408
|
apiKey: process.env.TOGETHERAI_API_KEY || llmKeys.togetherai,
|
|
409
|
-
model: provider.model
|
|
409
|
+
model: getTogetheraiModel(provider.model)
|
|
410
410
|
}).withStructuredOutput(responseModel);
|
|
411
411
|
}
|
|
412
412
|
return new ChatTogetherAI({
|
|
413
413
|
apiKey: process.env.TOGETHERAI_API_KEY || llmKeys.togetherai,
|
|
414
|
-
model: provider.model
|
|
414
|
+
model: getTogetheraiModel(provider.model)
|
|
415
415
|
});
|
|
416
416
|
default:
|
|
417
417
|
throw new Error(`Unsupported provider: ${provider.provider}`);
|
|
418
418
|
}
|
|
419
419
|
}
|
|
420
|
+
const getTogetheraiModel = (model) => {
|
|
421
|
+
if (model === SupportedModel.MISTRAL_7B_INSTRUCT_V0_2 || model === SupportedModel.MIXTRAL_8X7B_INSTRUCT_V0_1 || model === SupportedModel.MIXTRAL_8X22B_INSTRUCT_V0_1) {
|
|
422
|
+
return `mistralai/${model}`;
|
|
423
|
+
}
|
|
424
|
+
if (model === SupportedModel.LLAMA_3_70B_CHAT_HF || model === SupportedModel.LLAMA_3_8B_CHAT_HF || model === SupportedModel.LLAMA_3_1_8B_INSTRUCT_TURBO || model === SupportedModel.LLAMA_3_1_70B_INSTRUCT_TURBO || model === SupportedModel.LLAMA_3_1_405B_INSTRUCT_TURBO) {
|
|
425
|
+
return `meta-llama/${model}`;
|
|
426
|
+
}
|
|
427
|
+
if (model === SupportedModel.QWEN2_72B_INSTRUCT) {
|
|
428
|
+
return `Qwen/${model}`;
|
|
429
|
+
}
|
|
430
|
+
return model;
|
|
431
|
+
};
|
|
420
432
|
async function callLLM(provider, options, llmKeys, runtimeArgs) {
|
|
421
433
|
const model = getLangChainModel(provider, llmKeys, options.responseModel);
|
|
422
434
|
const langChainMessages = extendProviderSystemPrompt(
|