node-llama-cpp 3.0.0-beta.19 → 3.0.0-beta.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. package/README.md +1 -1
  2. package/dist/bindings/AddonTypes.d.ts +1 -0
  3. package/dist/bindings/Llama.js +14 -7
  4. package/dist/bindings/Llama.js.map +1 -1
  5. package/dist/bindings/getLlama.d.ts +20 -2
  6. package/dist/bindings/getLlama.js +23 -14
  7. package/dist/bindings/getLlama.js.map +1 -1
  8. package/dist/bindings/utils/detectAvailableComputeLayers.js +4 -0
  9. package/dist/bindings/utils/detectAvailableComputeLayers.js.map +1 -1
  10. package/dist/bindings/utils/detectGlibc.js +12 -2
  11. package/dist/bindings/utils/detectGlibc.js.map +1 -1
  12. package/dist/cli/cli.js +11 -9
  13. package/dist/cli/cli.js.map +1 -1
  14. package/dist/cli/commands/BuildCommand.js +4 -2
  15. package/dist/cli/commands/BuildCommand.js.map +1 -1
  16. package/dist/cli/commands/ChatCommand.js +3 -2
  17. package/dist/cli/commands/ChatCommand.js.map +1 -1
  18. package/dist/cli/commands/ClearCommand.js +3 -2
  19. package/dist/cli/commands/ClearCommand.js.map +1 -1
  20. package/dist/cli/commands/CompleteCommand.js +3 -1
  21. package/dist/cli/commands/CompleteCommand.js.map +1 -1
  22. package/dist/cli/commands/DownloadCommand.js +3 -2
  23. package/dist/cli/commands/DownloadCommand.js.map +1 -1
  24. package/dist/cli/commands/InfillCommand.js +3 -1
  25. package/dist/cli/commands/InfillCommand.js.map +1 -1
  26. package/dist/cli/commands/InitCommand.d.ts +11 -0
  27. package/dist/cli/commands/InitCommand.js +195 -0
  28. package/dist/cli/commands/InitCommand.js.map +1 -0
  29. package/dist/cli/commands/PullCommand.js +18 -10
  30. package/dist/cli/commands/PullCommand.js.map +1 -1
  31. package/dist/cli/commands/inspect/InspectCommand.js +3 -1
  32. package/dist/cli/commands/inspect/InspectCommand.js.map +1 -1
  33. package/dist/cli/commands/inspect/commands/InspectGgufCommand.js +5 -2
  34. package/dist/cli/commands/inspect/commands/InspectGgufCommand.js.map +1 -1
  35. package/dist/cli/commands/inspect/commands/InspectGpuCommand.js +3 -1
  36. package/dist/cli/commands/inspect/commands/InspectGpuCommand.js.map +1 -1
  37. package/dist/cli/commands/inspect/commands/InspectMeasureCommand.js +3 -1
  38. package/dist/cli/commands/inspect/commands/InspectMeasureCommand.js.map +1 -1
  39. package/dist/cli/projectTemplates.d.ts +7 -0
  40. package/dist/cli/projectTemplates.js +10 -0
  41. package/dist/cli/projectTemplates.js.map +1 -0
  42. package/dist/cli/startCreateCli.d.ts +2 -0
  43. package/dist/cli/startCreateCli.js +26 -0
  44. package/dist/cli/startCreateCli.js.map +1 -0
  45. package/dist/cli/utils/consolePromptQuestion.d.ts +2 -1
  46. package/dist/cli/utils/consolePromptQuestion.js +4 -2
  47. package/dist/cli/utils/consolePromptQuestion.js.map +1 -1
  48. package/dist/cli/utils/interactivelyAskForModel.d.ts +7 -0
  49. package/dist/cli/utils/interactivelyAskForModel.js +451 -0
  50. package/dist/cli/utils/interactivelyAskForModel.js.map +1 -0
  51. package/dist/cli/utils/projectTemplates.d.ts +19 -0
  52. package/dist/cli/utils/projectTemplates.js +47 -0
  53. package/dist/cli/utils/projectTemplates.js.map +1 -0
  54. package/dist/cli/utils/resolveCommandGgufPath.js +7 -435
  55. package/dist/cli/utils/resolveCommandGgufPath.js.map +1 -1
  56. package/dist/cli/utils/splitAnsiToLines.d.ts +1 -1
  57. package/dist/cli/utils/splitAnsiToLines.js +20 -5
  58. package/dist/cli/utils/splitAnsiToLines.js.map +1 -1
  59. package/dist/cli/utils/withCliCommandDescriptionDocsUrl.d.ts +2 -0
  60. package/dist/cli/utils/withCliCommandDescriptionDocsUrl.js +23 -0
  61. package/dist/cli/utils/withCliCommandDescriptionDocsUrl.js.map +1 -0
  62. package/dist/commands.d.ts +1 -0
  63. package/dist/commands.js +3 -0
  64. package/dist/commands.js.map +1 -1
  65. package/dist/config.d.ts +21 -1
  66. package/dist/config.js +27 -4
  67. package/dist/config.js.map +1 -1
  68. package/dist/evaluator/LlamaContext/LlamaContext.js +0 -1
  69. package/dist/evaluator/LlamaContext/LlamaContext.js.map +1 -1
  70. package/dist/evaluator/LlamaModel.d.ts +32 -1
  71. package/dist/evaluator/LlamaModel.js +54 -1
  72. package/dist/evaluator/LlamaModel.js.map +1 -1
  73. package/dist/utils/createModelDownloader.d.ts +3 -0
  74. package/dist/utils/createModelDownloader.js.map +1 -1
  75. package/llama/addon.cpp +92 -0
  76. package/llama/binariesGithubRelease.json +1 -1
  77. package/llama/gitRelease.bundle +0 -0
  78. package/llama/llama.cpp.info.json +1 -1
  79. package/llamaBins/linux-arm64/_nlcBuildMetadata.json +1 -1
  80. package/llamaBins/linux-arm64/llama-addon.node +0 -0
  81. package/llamaBins/linux-armv7l/_nlcBuildMetadata.json +1 -1
  82. package/llamaBins/linux-armv7l/llama-addon.node +0 -0
  83. package/llamaBins/linux-x64/_nlcBuildMetadata.json +1 -1
  84. package/llamaBins/linux-x64/llama-addon.node +0 -0
  85. package/llamaBins/linux-x64-cuda/_nlcBuildMetadata.json +1 -1
  86. package/llamaBins/linux-x64-cuda/llama-addon.node +0 -0
  87. package/llamaBins/linux-x64-vulkan/_nlcBuildMetadata.json +1 -1
  88. package/llamaBins/linux-x64-vulkan/llama-addon.node +0 -0
  89. package/llamaBins/mac-arm64-metal/_nlcBuildMetadata.json +1 -1
  90. package/llamaBins/mac-arm64-metal/default.metallib +0 -0
  91. package/llamaBins/mac-arm64-metal/llama-addon.node +0 -0
  92. package/llamaBins/mac-x64/_nlcBuildMetadata.json +1 -1
  93. package/llamaBins/mac-x64/llama-addon.node +0 -0
  94. package/llamaBins/win-arm64/_nlcBuildMetadata.json +1 -1
  95. package/llamaBins/win-arm64/llama-addon.exp +0 -0
  96. package/llamaBins/win-arm64/llama-addon.lib +0 -0
  97. package/llamaBins/win-arm64/llama-addon.node +0 -0
  98. package/llamaBins/win-x64/_nlcBuildMetadata.json +1 -1
  99. package/llamaBins/win-x64/llama-addon.exp +0 -0
  100. package/llamaBins/win-x64/llama-addon.lib +0 -0
  101. package/llamaBins/win-x64/llama-addon.node +0 -0
  102. package/llamaBins/win-x64-cuda/_nlcBuildMetadata.json +1 -1
  103. package/llamaBins/win-x64-cuda/llama-addon.exp +0 -0
  104. package/llamaBins/win-x64-cuda/llama-addon.lib +0 -0
  105. package/llamaBins/win-x64-cuda/llama-addon.node +0 -0
  106. package/llamaBins/win-x64-vulkan/_nlcBuildMetadata.json +1 -1
  107. package/llamaBins/win-x64-vulkan/llama-addon.exp +0 -0
  108. package/llamaBins/win-x64-vulkan/llama-addon.lib +0 -0
  109. package/llamaBins/win-x64-vulkan/llama-addon.node +0 -0
  110. package/package.json +17 -13
  111. package/templates/package.json +10 -0
package/llama/addon.cpp CHANGED
@@ -218,6 +218,10 @@ Napi::Value getGpuType(const Napi::CallbackInfo& info) {
218
218
  }
219
219
 
220
220
  static Napi::Value getNapiToken(const Napi::CallbackInfo& info, llama_model* model, llama_token token) {
221
+ if (token < 0) {
222
+ return Napi::Number::From(info.Env(), -1);
223
+ }
224
+
221
225
  auto tokenType = llama_token_get_type(model, token);
222
226
 
223
227
  if (tokenType == LLAMA_TOKEN_TYPE_UNDEFINED || tokenType == LLAMA_TOKEN_TYPE_UNKNOWN) {
@@ -228,6 +232,10 @@ static Napi::Value getNapiToken(const Napi::CallbackInfo& info, llama_model* mod
228
232
  }
229
233
 
230
234
  static Napi::Value getNapiControlToken(const Napi::CallbackInfo& info, llama_model* model, llama_token token) {
235
+ if (token < 0) {
236
+ return Napi::Number::From(info.Env(), -1);
237
+ }
238
+
231
239
  auto tokenType = llama_token_get_type(model, token);
232
240
 
233
241
  if (tokenType != LLAMA_TOKEN_TYPE_CONTROL && tokenType != LLAMA_TOKEN_TYPE_USER_DEFINED) {
@@ -351,6 +359,7 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
351
359
  }
352
360
 
353
361
  Napi::Value Init(const Napi::CallbackInfo& info);
362
+ Napi::Value LoadLora(const Napi::CallbackInfo& info);
354
363
  Napi::Value AbortActiveModelLoad(const Napi::CallbackInfo& info) {
355
364
  abortModelLoad = true;
356
365
  return info.Env().Undefined();
@@ -585,6 +594,7 @@ class AddonModel : public Napi::ObjectWrap<AddonModel> {
585
594
  "AddonModel",
586
595
  {
587
596
  InstanceMethod("init", &AddonModel::Init),
597
+ InstanceMethod("loadLora", &AddonModel::LoadLora),
588
598
  InstanceMethod("abortActiveModelLoad", &AddonModel::AbortActiveModelLoad),
589
599
  InstanceMethod("tokenize", &AddonModel::Tokenize),
590
600
  InstanceMethod("detokenize", &AddonModel::Detokenize),
@@ -737,6 +747,76 @@ class AddonModelUnloadModelWorker : public Napi::AsyncWorker {
737
747
  deferred.Reject(err.Value());
738
748
  }
739
749
  };
750
+ class AddonModelLoadLoraWorker : public Napi::AsyncWorker {
751
+ public:
752
+ AddonModel* model;
753
+ std::string loraFilePath;
754
+ float loraScale;
755
+ int32_t loraThreads;
756
+ std::string baseModelPath;
757
+
758
+ AddonModelLoadLoraWorker(
759
+ const Napi::Env& env,
760
+ AddonModel* model,
761
+ std::string loraFilePath,
762
+ float loraScale,
763
+ int32_t loraThreads,
764
+ std::string baseModelPath
765
+ )
766
+ : Napi::AsyncWorker(env, "AddonModelLoadLoraWorker"),
767
+ model(model),
768
+ loraFilePath(loraFilePath),
769
+ loraScale(loraScale),
770
+ loraThreads(loraThreads),
771
+ baseModelPath(baseModelPath),
772
+ deferred(Napi::Promise::Deferred::New(env)) {
773
+ model->Ref();
774
+ }
775
+ ~AddonModelLoadLoraWorker() {
776
+ model->Unref();
777
+ }
778
+
779
+ Napi::Promise GetPromise() {
780
+ return deferred.Promise();
781
+ }
782
+
783
+ protected:
784
+ Napi::Promise::Deferred deferred;
785
+
786
+ void Execute() {
787
+ try {
788
+ const auto res = llama_model_apply_lora_from_file(
789
+ model->model,
790
+ loraFilePath.c_str(),
791
+ loraScale,
792
+ baseModelPath.empty() ? NULL : baseModelPath.c_str(),
793
+ loraThreads
794
+ );
795
+
796
+ if (res != 0) {
797
+ SetError(
798
+ std::string(
799
+ std::string("Failed to apply LoRA \"") + loraFilePath + std::string("\"") + (
800
+ baseModelPath.empty()
801
+ ? std::string("")
802
+ : (std::string(" with base model \"") + baseModelPath + std::string("\""))
803
+ )
804
+ )
805
+ );
806
+ }
807
+ } catch (const std::exception& e) {
808
+ SetError(e.what());
809
+ } catch(...) {
810
+ SetError("Unknown error when calling \"llama_model_apply_lora_from_file\"");
811
+ }
812
+ }
813
+ void OnOK() {
814
+ deferred.Resolve(Env().Undefined());
815
+ }
816
+ void OnError(const Napi::Error& err) {
817
+ deferred.Reject(err.Value());
818
+ }
819
+ };
740
820
 
741
821
  Napi::Value AddonModel::Init(const Napi::CallbackInfo& info) {
742
822
  if (disposed) {
@@ -748,6 +828,18 @@ Napi::Value AddonModel::Init(const Napi::CallbackInfo& info) {
748
828
  worker->Queue();
749
829
  return worker->GetPromise();
750
830
  }
831
+ Napi::Value AddonModel::LoadLora(const Napi::CallbackInfo& info) {
832
+ std::string loraFilePath = info[0].As<Napi::String>().Utf8Value();
833
+ float scale = info[1].As<Napi::Number>().FloatValue();
834
+ int32_t threads = info[2].As<Napi::Number>().Int32Value();
835
+ std::string baseModelPath = (info.Length() > 3 && info[3].IsString()) ? info[3].As<Napi::String>().Utf8Value() : std::string("");
836
+
837
+ int32_t resolvedThreads = threads == 0 ? std::thread::hardware_concurrency() : threads;
838
+
839
+ AddonModelLoadLoraWorker* worker = new AddonModelLoadLoraWorker(this->Env(), this, loraFilePath, scale, threads, baseModelPath);
840
+ worker->Queue();
841
+ return worker->GetPromise();
842
+ }
751
843
  Napi::Value AddonModel::Dispose(const Napi::CallbackInfo& info) {
752
844
  if (disposed) {
753
845
  return info.Env().Undefined();
@@ -1,3 +1,3 @@
1
1
  {
2
- "release": "b2861"
2
+ "release": "b2928"
3
3
  }
Binary file
@@ -1,4 +1,4 @@
1
1
  {
2
- "tag": "b2861",
2
+ "tag": "b2928",
3
3
  "llamaCppGithubRepo": "ggerganov/llama.cpp"
4
4
  }
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","platformInfo":{"name":"Ubuntu","version":"22.04"},"arch":"arm64","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","platformInfo":{"name":"Ubuntu","version":"22.04"},"arch":"arm64","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","platformInfo":{"name":"Ubuntu","version":"22.04"},"arch":"armv7l","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","platformInfo":{"name":"Ubuntu","version":"22.04"},"arch":"armv7l","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","platformInfo":{"name":"Ubuntu","version":"22.04"},"arch":"x64","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","platformInfo":{"name":"Ubuntu","version":"22.04"},"arch":"x64","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","platformInfo":{"name":"Ubuntu","version":"22.04"},"arch":"x64","gpu":"cuda","llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","platformInfo":{"name":"Ubuntu","version":"22.04"},"arch":"x64","gpu":"cuda","llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","platformInfo":{"name":"Ubuntu","version":"22.04"},"arch":"x64","gpu":"vulkan","llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"linux","platformInfo":{"name":"Ubuntu","version":"22.04"},"arch":"x64","gpu":"vulkan","llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"mac","platformInfo":{"name":"macOS","version":"21.6.0"},"arch":"arm64","gpu":"metal","llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"mac","platformInfo":{"name":"macOS","version":"21.6.0"},"arch":"arm64","gpu":"metal","llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"mac","platformInfo":{"name":"macOS","version":"21.6.0"},"arch":"x64","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"mac","platformInfo":{"name":"macOS","version":"21.6.0"},"arch":"x64","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
Binary file
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","platformInfo":{"name":"Windows","version":"10.0.20348"},"arch":"arm64","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","platformInfo":{"name":"Windows","version":"10.0.20348"},"arch":"arm64","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
Binary file
Binary file
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","platformInfo":{"name":"Windows","version":"10.0.20348"},"arch":"x64","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","platformInfo":{"name":"Windows","version":"10.0.20348"},"arch":"x64","gpu":false,"llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
Binary file
Binary file
Binary file
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","platformInfo":{"name":"Windows","version":"10.0.20348"},"arch":"x64","gpu":"cuda","llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","platformInfo":{"name":"Windows","version":"10.0.20348"},"arch":"x64","gpu":"cuda","llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
@@ -1 +1 @@
1
- {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","platformInfo":{"name":"Windows","version":"10.0.20348"},"arch":"x64","gpu":"vulkan","llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2861"}}}
1
+ {"buildOptions":{"customCmakeOptions":{},"progressLogs":true,"platform":"win","platformInfo":{"name":"Windows","version":"10.0.20348"},"arch":"x64","gpu":"vulkan","llamaCpp":{"repo":"ggerganov/llama.cpp","release":"b2928"}}}
package/package.json CHANGED
@@ -1,15 +1,16 @@
1
1
  {
2
2
  "name": "node-llama-cpp",
3
- "version": "3.0.0-beta.19",
3
+ "version": "3.0.0-beta.20",
4
4
  "description": "Run AI models locally on your machine with node.js bindings for llama.cpp. Force a JSON schema on the model output on the generation level",
5
- "main": "dist/index.js",
5
+ "main": "./dist/index.js",
6
6
  "type": "module",
7
7
  "types": "./dist/index.d.ts",
8
8
  "bin": {
9
- "node-llama-cpp": "./dist/cli/cli.js"
9
+ "node-llama-cpp": "dist/cli/cli.js"
10
10
  },
11
11
  "files": [
12
12
  "dist/",
13
+ "templates/packed/",
13
14
  "llama/",
14
15
  "llamaBins/",
15
16
  "package.json",
@@ -41,8 +42,10 @@
41
42
  },
42
43
  "scripts": {
43
44
  "prepare": "[ \"$CI\" = true ] || [ -d '.husky/_' ] || husky install",
45
+ "postinstall": "node ./dist/cli/cli.js postinstall",
44
46
  "prebuild": "rimraf ./dist ./tsconfig.tsbuildinfo",
45
- "build": "tsc --build tsconfig.json --force",
47
+ "build": "tsc --build tsconfig.json --force && npm run build:packTemplates",
48
+ "build:packTemplates": "vite-node scripts/packTemplates.ts",
46
49
  "addPostinstallScript": "npm pkg set scripts.postinstall=\"node ./dist/cli/cli.js postinstall\"",
47
50
  "prewatch": "rimraf ./dist ./tsconfig.tsbuildinfo",
48
51
  "watch": "tsc --build tsconfig.json --watch --force",
@@ -55,18 +58,17 @@
55
58
  "test:modelDependent:interactive": "vitest watch ./test/modelDependent",
56
59
  "test:typescript": "tsc --build tsconfig.json --dry --force",
57
60
  "lint": "npm run lint:eslint",
58
- "lint:eslint": "eslint --ext .js --ext .ts .",
61
+ "lint:eslint": "eslint --ext .js --ext .ts --report-unused-disable-directives .",
59
62
  "format": "npm run lint:eslint -- --fix",
60
63
  "dev:setup:downloadAllTestModels": "vite-node test/utils/scripts/downloadAllTestModels.ts",
61
64
  "dev:setup": "npm run build && node ./dist/cli/cli.js download --noUsageExample && npm run docs:generateTypedoc && npm run dev:setup:downloadAllTestModels",
62
65
  "dev:build": "npm run build && node ./dist/cli/cli.js build --noUsageExample",
63
- "clean": "rm -rf ./node_modules ./dist ./tsconfig.tsbuildinfo ./test/.models ./docs/api ./docs/api-overrides",
66
+ "clean": "rm -rf ./node_modules ./dist ./tsconfig.tsbuildinfo ./test/.models ./docs/api ./docs/api-overrides ./templates/packed",
64
67
  "docs:generateTypedoc": "typedoc && rimraf ./docs/api/index.md ./docs/api/globals.md ./docs/api/functions/LlamaText.md && npm run docs:generateTypedoc:overrides",
65
68
  "docs:generateTypedoc:overrides": "typedoc --entryPoints ./src/apiDocsOverrides.ts --out ./docs/api-overrides && copyfiles --flat \"./docs/api-overrides/classes/LlamaText.md\" ./docs/api/classes && rimraf ./docs/api-overrides",
66
69
  "docs:dev": "npm run docs:generateTypedoc && vitepress dev",
67
70
  "docs:build": "npm run docs:generateTypedoc && vitepress build",
68
- "docs:preview": "npm run docs:generateTypedoc && vitepress preview",
69
- "postinstall": "node ./dist/cli/cli.js postinstall"
71
+ "docs:preview": "npm run docs:generateTypedoc && vitepress preview"
70
72
  },
71
73
  "repository": {
72
74
  "type": "git",
@@ -113,8 +115,8 @@
113
115
  },
114
116
  "homepage": "https://withcatai.github.io/node-llama-cpp/",
115
117
  "devDependencies": {
116
- "@commitlint/cli": "^17.7.1",
117
- "@commitlint/config-conventional": "^17.7.0",
118
+ "@commitlint/cli": "^19.3.0",
119
+ "@commitlint/config-conventional": "^19.2.2",
118
120
  "@semantic-release/exec": "^6.0.3",
119
121
  "@shikijs/vitepress-twoslash": "^1.3.0",
120
122
  "@types/async-retry": "^1.4.8",
@@ -124,7 +126,7 @@
124
126
  "@types/node": "^20.11.29",
125
127
  "@types/proper-lockfile": "^4.1.4",
126
128
  "@types/semver": "^7.5.8",
127
- "@types/uuid": "^9.0.2",
129
+ "@types/validate-npm-package-name": "^4.0.2",
128
130
  "@types/which": "^3.0.0",
129
131
  "@types/yargs": "^17.0.24",
130
132
  "@typescript-eslint/eslint-plugin": "^6.3.0",
@@ -138,7 +140,7 @@
138
140
  "eslint-plugin-n": "^16.3.1",
139
141
  "husky": "^8.0.3",
140
142
  "rimraf": "^5.0.1",
141
- "semantic-release": "^22.0.8",
143
+ "semantic-release": "^23.1.1",
142
144
  "tslib": "^2.6.1",
143
145
  "typedoc": "^0.25.13",
144
146
  "typedoc-plugin-markdown": "^4.0.0-next.55",
@@ -162,10 +164,12 @@
162
164
  "env-var": "^7.3.1",
163
165
  "filenamify": "^6.0.0",
164
166
  "fs-extra": "^11.2.0",
167
+ "ignore": "^5.3.1",
165
168
  "ipull": "^3.1.1",
166
169
  "is-unicode-supported": "^2.0.0",
167
170
  "lifecycle-utils": "^1.4.1",
168
171
  "log-symbols": "^5.1.0",
172
+ "nanoid": "^5.0.7",
169
173
  "node-addon-api": "^7.0.0",
170
174
  "octokit": "^3.1.0",
171
175
  "ora": "^7.0.1",
@@ -176,7 +180,7 @@
176
180
  "slice-ansi": "^7.1.0",
177
181
  "stdout-update": "^4.0.1",
178
182
  "strip-ansi": "^7.1.0",
179
- "uuid": "^9.0.0",
183
+ "validate-npm-package-name": "^5.0.1",
180
184
  "which": "^4.0.0",
181
185
  "yargs": "^17.7.2"
182
186
  },
@@ -0,0 +1,10 @@
1
+ {
2
+ "name": "eslint-dependencies-for-ci",
3
+ "private": true,
4
+ "version": "0.0.0",
5
+ "type": "module",
6
+ "devDependencies": {
7
+ "eslint-plugin-react-hooks": "^4.6.0",
8
+ "eslint-plugin-react-refresh": "^0.4.5"
9
+ }
10
+ }