@fugood/llama.node 0.6.0 → 0.6.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CMakeLists.txt CHANGED
@@ -47,17 +47,30 @@ message(STATUS "Platform: ${PLATFORM}")
47
47
  message(STATUS "Architecture: ${ARCH}")
48
48
  message(STATUS "PLATFORM_BINARY_DIR: ${PLATFORM_BINARY_DIR}")
49
49
 
50
- # set strip flags
51
50
  if(CMAKE_BUILD_TYPE STREQUAL "Release")
52
- if(UNIX OR MINGW)
51
+ if((UNIX OR MINGW) AND NOT CLANG)
53
52
  set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -s")
54
53
  set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -s")
55
- elseif(MSVC)
56
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /s")
57
- set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /s")
58
54
  endif()
59
55
  endif()
60
56
 
57
+ # Improve speed
58
+ if(CMAKE_BUILD_TYPE STREQUAL "Release")
59
+ if (MSVC)
60
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /O2 /Ob2 /Oi /Ot /Oy /GL")
61
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /O2 /Ob2 /Oi /Ot /Oy /GL")
62
+ else()
63
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3 -funroll-loops -flto")
64
+ set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -O3 -funroll-loops -flto")
65
+ set(CMAKE_LINKER_FLAGS "${CMAKE_LINKER_FLAGS} -flto")
66
+ endif()
67
+ endif()
68
+
69
+ if (CLANG)
70
+ set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libgcc -static-libstdc++")
71
+ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -static-libgcc -static-libstdc++")
72
+ endif()
73
+
61
74
  # flags: -fPIC
62
75
  set(CMAKE_POSITION_INDEPENDENT_CODE ON)
63
76
 
@@ -108,6 +121,24 @@ file(
108
121
  "src/tts_utils.h"
109
122
  )
110
123
 
124
+ if (CLANG AND CMAKE_SYSTEM_NAME STREQUAL "Windows")
125
+ file(GLOB WIN_DYNAMIC_LOAD_SRC "src/win_dynamic_load.c")
126
+
127
+ add_library(win_dynamic_load ${WIN_DYNAMIC_LOAD_SRC})
128
+ if (NOT MSVC)
129
+ set_target_properties(win_dynamic_load PROPERTIES COMPILE_FLAGS "-Wno-implicit-function-declaration")
130
+ endif()
131
+
132
+ unset(CMAKE_JS_SRC)
133
+ unset(CMAKE_JS_LIB)
134
+ unset(CMAKE_JS_NODELIB_DEF)
135
+ unset(CMAKE_JS_NODELIB_TARGET)
136
+ unset(CMAKE_JS_NODELIB_TARGET_NAME)
137
+ string(REGEX REPLACE "/DELAYLOAD:NODE.EXE" "" CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS}")
138
+
139
+ set(CMAKE_JS_LIB win_dynamic_load)
140
+ endif()
141
+
111
142
  add_library(${PROJECT_NAME} SHARED ${SOURCE_FILES} ${CMAKE_JS_SRC})
112
143
  set_target_properties(${PROJECT_NAME} PROPERTIES PREFIX "" SUFFIX ".node")
113
144
  target_link_libraries(${PROJECT_NAME} ${CMAKE_JS_LIB} llama ggml common mtmd ${CMAKE_THREAD_LIBS_INIT})
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
package/lib/binding.ts CHANGED
@@ -218,7 +218,7 @@ export interface LlamaContext {
218
218
  * @param path Path to the vocoder model
219
219
  * @returns Promise resolving to true if loading was successful
220
220
  */
221
- initVocoder(path: string): Promise<boolean>
221
+ initVocoder(options: { path: string }): Promise<boolean>
222
222
 
223
223
  /**
224
224
  * Unload the vocoder model
package/lib/index.js CHANGED
@@ -204,8 +204,8 @@ class LlamaContextWrapper {
204
204
  getMultimodalSupport() {
205
205
  return this.ctx.getMultimodalSupport();
206
206
  }
207
- initVocoder(path) {
208
- return this.ctx.initVocoder(path);
207
+ initVocoder(options) {
208
+ return this.ctx.initVocoder(options);
209
209
  }
210
210
  releaseVocoder() {
211
211
  return this.ctx.releaseVocoder();
package/lib/index.ts CHANGED
@@ -270,8 +270,8 @@ class LlamaContextWrapper {
270
270
  return this.ctx.getMultimodalSupport()
271
271
  }
272
272
 
273
- initVocoder(path: string): Promise<boolean> {
274
- return this.ctx.initVocoder(path)
273
+ initVocoder(options: { path: string }): Promise<boolean> {
274
+ return this.ctx.initVocoder(options)
275
275
  }
276
276
 
277
277
  releaseVocoder(): Promise<void> {
package/package.json CHANGED
@@ -1,14 +1,16 @@
1
1
  {
2
2
  "name": "@fugood/llama.node",
3
3
  "access": "public",
4
- "version": "0.6.0",
4
+ "version": "0.6.2",
5
5
  "description": "An another Node binding of llama.cpp",
6
6
  "main": "lib/index.js",
7
7
  "scripts": {
8
+ "postinstall": "patch-package",
8
9
  "pretest": "node scripts/download-test-models.js",
9
10
  "test": "jest",
10
11
  "build": "tsc",
11
12
  "prepack": "yarn build",
13
+ "prebuild-native": "node scripts/generate_win_dynamic_load.js 6",
12
14
  "build-native": "cmake-js compile",
13
15
  "clean": "rimraf build",
14
16
  "prepare": "husky",
@@ -59,6 +61,7 @@
59
61
  "cmake-js": "^7.3.0",
60
62
  "husky": "^9.0.11",
61
63
  "jest": "^29.7.0",
64
+ "patch-package": "^8.0.0",
62
65
  "release-it": "^17.7.0",
63
66
  "rimraf": "^6.0.1",
64
67
  "typescript": "^5.4.5",
@@ -0,0 +1,26 @@
1
+ diff --git a/node_modules/node-api-headers/include/js_native_api.h b/node_modules/node-api-headers/include/js_native_api.h
2
+ index e06bdc8..c35398a 100644
3
+ --- a/node_modules/node-api-headers/include/js_native_api.h
4
+ +++ b/node_modules/node-api-headers/include/js_native_api.h
5
+ @@ -22,7 +22,7 @@
6
+ // If you need __declspec(dllimport), either include <node_api.h> instead, or
7
+ // define NAPI_EXTERN as __declspec(dllimport) on the compiler's command line.
8
+ #ifndef NAPI_EXTERN
9
+ -#ifdef _WIN32
10
+ +#if defined(_WIN32) && _MSC_VER
11
+ #define NAPI_EXTERN __declspec(dllexport)
12
+ #elif defined(__wasm32__)
13
+ #define NAPI_EXTERN \
14
+ diff --git a/node_modules/node-api-headers/include/node_api.h b/node_modules/node-api-headers/include/node_api.h
15
+ index 0345468..359c496 100644
16
+ --- a/node_modules/node-api-headers/include/node_api.h
17
+ +++ b/node_modules/node-api-headers/include/node_api.h
18
+ @@ -2,7 +2,7 @@
19
+ #define SRC_NODE_API_H_
20
+
21
+ #ifdef BUILDING_NODE_EXTENSION
22
+ -#ifdef _WIN32
23
+ +#if defined(_WIN32) && _MSC_VER
24
+ // Building native addon against node
25
+ #define NAPI_EXTERN __declspec(dllimport)
26
+ #elif defined(__wasm32__)
@@ -444,9 +444,9 @@ Napi::Value LlamaContext::GetModelInfo(const Napi::CallbackInfo &info) {
444
444
  details.Set("size", llama_model_size(model));
445
445
 
446
446
  Napi::Object chatTemplates = Napi::Object::New(info.Env());
447
- chatTemplates.Set("llamaChat", validateModelChatTemplate(model, false, ""));
447
+ chatTemplates.Set("llamaChat", validateModelChatTemplate(model, false, nullptr));
448
448
  Napi::Object minja = Napi::Object::New(info.Env());
449
- minja.Set("default", validateModelChatTemplate(model, true, ""));
449
+ minja.Set("default", validateModelChatTemplate(model, true, nullptr));
450
450
  Napi::Object defaultCaps = Napi::Object::New(info.Env());
451
451
  defaultCaps.Set(
452
452
  "tools",
@@ -498,7 +498,7 @@ Napi::Value LlamaContext::GetModelInfo(const Napi::CallbackInfo &info) {
498
498
 
499
499
  // Deprecated: use chatTemplates.llamaChat instead
500
500
  details.Set("isChatTemplateSupported",
501
- validateModelChatTemplate(_sess->model(), false, ""));
501
+ validateModelChatTemplate(_sess->model(), false, nullptr));
502
502
  return details;
503
503
  }
504
504
 
@@ -601,6 +601,8 @@ Napi::Value LlamaContext::GetFormattedChat(const Napi::CallbackInfo &info) {
601
601
  _sess, _templates, messages, chat_template, json_schema_str, tools_str,
602
602
  parallel_tool_calls, tool_choice);
603
603
 
604
+ console_log(env, std::string("format: ") + std::to_string(chatParams.format));
605
+
604
606
  Napi::Object result = Napi::Object::New(env);
605
607
  result.Set("prompt", chatParams.prompt);
606
608
  // chat_format: int
@@ -1244,11 +1246,11 @@ tts_type LlamaContext::getTTSType(Napi::Env env, nlohmann::json speaker) {
1244
1246
  // initVocoder(path: string): boolean
1245
1247
  Napi::Value LlamaContext::InitVocoder(const Napi::CallbackInfo &info) {
1246
1248
  Napi::Env env = info.Env();
1247
- if (info.Length() < 1 || !info[0].IsString()) {
1248
- Napi::TypeError::New(env, "String expected for vocoder path")
1249
+ if (info.Length() < 1 || !info[0].IsObject()) {
1250
+ Napi::TypeError::New(env, "Object is expected for vocoder path")
1249
1251
  .ThrowAsJavaScriptException();
1250
1252
  }
1251
- auto vocoder_path = info[0].ToString().Utf8Value();
1253
+ auto vocoder_path = info[0].As<Napi::Object>().Get("path").ToString().Utf8Value();
1252
1254
  if (vocoder_path.empty()) {
1253
1255
  Napi::TypeError::New(env, "vocoder path is required")
1254
1256
  .ThrowAsJavaScriptException();
package/src/tts_utils.cpp CHANGED
@@ -188,6 +188,10 @@ std::string process_text(const std::string &text,
188
188
  return processed_text;
189
189
  }
190
190
 
191
+ #ifdef _WIN32
192
+ #define M_PI 3.14159265358979323846
193
+ #endif
194
+
191
195
  void fill_hann_window(int length, bool periodic, float *output) {
192
196
  int offset = -1;
193
197
  if (periodic) {