@fugood/llama.node 0.6.1 → 0.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/darwin/arm64/llama-node.node +0 -0
- package/bin/darwin/x64/llama-node.node +0 -0
- package/bin/linux/arm64/llama-node.node +0 -0
- package/bin/linux/x64/llama-node.node +0 -0
- package/bin/linux-cuda/arm64/llama-node.node +0 -0
- package/bin/linux-cuda/x64/llama-node.node +0 -0
- package/bin/linux-vulkan/arm64/llama-node.node +0 -0
- package/bin/linux-vulkan/x64/llama-node.node +0 -0
- package/bin/win32/x64/llama-node.node +0 -0
- package/bin/win32-vulkan/arm64/llama-node.node +0 -0
- package/bin/win32-vulkan/arm64/node.lib +0 -0
- package/bin/win32-vulkan/x64/llama-node.node +0 -0
- package/bin/win32-vulkan/x64/node.lib +0 -0
- package/lib/binding.ts +1 -1
- package/lib/index.js +2 -2
- package/lib/index.ts +2 -2
- package/package.json +1 -1
- package/src/LlamaContext.cpp +3 -3
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
package/lib/binding.ts
CHANGED
|
@@ -218,7 +218,7 @@ export interface LlamaContext {
|
|
|
218
218
|
* @param path Path to the vocoder model
|
|
219
219
|
* @returns Promise resolving to true if loading was successful
|
|
220
220
|
*/
|
|
221
|
-
initVocoder(path: string): Promise<boolean>
|
|
221
|
+
initVocoder(options: { path: string }): Promise<boolean>
|
|
222
222
|
|
|
223
223
|
/**
|
|
224
224
|
* Unload the vocoder model
|
package/lib/index.js
CHANGED
|
@@ -204,8 +204,8 @@ class LlamaContextWrapper {
|
|
|
204
204
|
getMultimodalSupport() {
|
|
205
205
|
return this.ctx.getMultimodalSupport();
|
|
206
206
|
}
|
|
207
|
-
initVocoder(
|
|
208
|
-
return this.ctx.initVocoder(
|
|
207
|
+
initVocoder(options) {
|
|
208
|
+
return this.ctx.initVocoder(options);
|
|
209
209
|
}
|
|
210
210
|
releaseVocoder() {
|
|
211
211
|
return this.ctx.releaseVocoder();
|
package/lib/index.ts
CHANGED
|
@@ -270,8 +270,8 @@ class LlamaContextWrapper {
|
|
|
270
270
|
return this.ctx.getMultimodalSupport()
|
|
271
271
|
}
|
|
272
272
|
|
|
273
|
-
initVocoder(path: string): Promise<boolean> {
|
|
274
|
-
return this.ctx.initVocoder(
|
|
273
|
+
initVocoder(options: { path: string }): Promise<boolean> {
|
|
274
|
+
return this.ctx.initVocoder(options)
|
|
275
275
|
}
|
|
276
276
|
|
|
277
277
|
releaseVocoder(): Promise<void> {
|
package/package.json
CHANGED
package/src/LlamaContext.cpp
CHANGED
|
@@ -1246,11 +1246,11 @@ tts_type LlamaContext::getTTSType(Napi::Env env, nlohmann::json speaker) {
|
|
|
1246
1246
|
// initVocoder(path: string): boolean
|
|
1247
1247
|
Napi::Value LlamaContext::InitVocoder(const Napi::CallbackInfo &info) {
|
|
1248
1248
|
Napi::Env env = info.Env();
|
|
1249
|
-
if (info.Length() < 1 || !info[0].
|
|
1250
|
-
Napi::TypeError::New(env, "
|
|
1249
|
+
if (info.Length() < 1 || !info[0].IsObject()) {
|
|
1250
|
+
Napi::TypeError::New(env, "Object is expected for vocoder path")
|
|
1251
1251
|
.ThrowAsJavaScriptException();
|
|
1252
1252
|
}
|
|
1253
|
-
auto vocoder_path = info[0].ToString().Utf8Value();
|
|
1253
|
+
auto vocoder_path = info[0].As<Napi::Object>().Get("path").ToString().Utf8Value();
|
|
1254
1254
|
if (vocoder_path.empty()) {
|
|
1255
1255
|
Napi::TypeError::New(env, "vocoder path is required")
|
|
1256
1256
|
.ThrowAsJavaScriptException();
|