notdiamond 1.0.8 → 1.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +3 -9
- package/dist/index.mjs +3 -9
- package/package.json +1 -1
package/dist/index.cjs
CHANGED
|
@@ -30,7 +30,7 @@ const axios__default = /*#__PURE__*/_interopDefaultCompat(axios);
|
|
|
30
30
|
|
|
31
31
|
const name = "notdiamond";
|
|
32
32
|
const type = "module";
|
|
33
|
-
const version = "1.0.
|
|
33
|
+
const version = "1.0.8";
|
|
34
34
|
const author = "not-diamond";
|
|
35
35
|
const license = "MIT";
|
|
36
36
|
const description = "TS/JS client for the NotDiamond API";
|
|
@@ -672,20 +672,14 @@ class NotDiamond {
|
|
|
672
672
|
const { providers } = selectedModel;
|
|
673
673
|
const stream = await Promise.resolve(
|
|
674
674
|
callLLMStream(
|
|
675
|
-
providers?.[0] ||
|
|
676
|
-
provider: "openai",
|
|
677
|
-
model: "gpt-3.5-turbo"
|
|
678
|
-
},
|
|
675
|
+
providers?.[0] || options.default,
|
|
679
676
|
options,
|
|
680
677
|
this.llmKeys,
|
|
681
678
|
runtimeArgs
|
|
682
679
|
)
|
|
683
680
|
);
|
|
684
681
|
return {
|
|
685
|
-
provider: providers?.[0] ||
|
|
686
|
-
provider: "openai",
|
|
687
|
-
model: "gpt-3.5-turbo"
|
|
688
|
-
},
|
|
682
|
+
provider: providers?.[0] || options.default,
|
|
689
683
|
stream
|
|
690
684
|
};
|
|
691
685
|
}
|
package/dist/index.mjs
CHANGED
|
@@ -11,7 +11,7 @@ import { ChatTogetherAI } from '@langchain/community/chat_models/togetherai';
|
|
|
11
11
|
|
|
12
12
|
const name = "notdiamond";
|
|
13
13
|
const type = "module";
|
|
14
|
-
const version = "1.0.
|
|
14
|
+
const version = "1.0.8";
|
|
15
15
|
const author = "not-diamond";
|
|
16
16
|
const license = "MIT";
|
|
17
17
|
const description = "TS/JS client for the NotDiamond API";
|
|
@@ -653,20 +653,14 @@ class NotDiamond {
|
|
|
653
653
|
const { providers } = selectedModel;
|
|
654
654
|
const stream = await Promise.resolve(
|
|
655
655
|
callLLMStream(
|
|
656
|
-
providers?.[0] ||
|
|
657
|
-
provider: "openai",
|
|
658
|
-
model: "gpt-3.5-turbo"
|
|
659
|
-
},
|
|
656
|
+
providers?.[0] || options.default,
|
|
660
657
|
options,
|
|
661
658
|
this.llmKeys,
|
|
662
659
|
runtimeArgs
|
|
663
660
|
)
|
|
664
661
|
);
|
|
665
662
|
return {
|
|
666
|
-
provider: providers?.[0] ||
|
|
667
|
-
provider: "openai",
|
|
668
|
-
model: "gpt-3.5-turbo"
|
|
669
|
-
},
|
|
663
|
+
provider: providers?.[0] || options.default,
|
|
670
664
|
stream
|
|
671
665
|
};
|
|
672
666
|
}
|