react-native-nitro-mlx 0.1.1 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. package/ios/Sources/HybridLLM.swift +115 -5
  2. package/lib/module/index.js +1 -1
  3. package/lib/module/index.js.map +1 -1
  4. package/lib/module/llm.js +23 -3
  5. package/lib/module/llm.js.map +1 -1
  6. package/lib/module/models.js +227 -0
  7. package/lib/module/models.js.map +1 -1
  8. package/lib/typescript/src/index.d.ts +3 -3
  9. package/lib/typescript/src/index.d.ts.map +1 -1
  10. package/lib/typescript/src/llm.d.ts +21 -3
  11. package/lib/typescript/src/llm.d.ts.map +1 -1
  12. package/lib/typescript/src/models.d.ts +27 -0
  13. package/lib/typescript/src/models.d.ts.map +1 -1
  14. package/lib/typescript/src/specs/LLM.nitro.d.ts +29 -2
  15. package/lib/typescript/src/specs/LLM.nitro.d.ts.map +1 -1
  16. package/nitrogen/generated/ios/MLXReactNative-Swift-Cxx-Bridge.hpp +87 -0
  17. package/nitrogen/generated/ios/MLXReactNative-Swift-Cxx-Umbrella.hpp +7 -0
  18. package/nitrogen/generated/ios/c++/HybridLLMSpecSwift.hpp +30 -2
  19. package/nitrogen/generated/ios/swift/HybridLLMSpec.swift +4 -1
  20. package/nitrogen/generated/ios/swift/HybridLLMSpec_cxx.swift +42 -7
  21. package/nitrogen/generated/ios/swift/LLMLoadOptions.swift +138 -0
  22. package/nitrogen/generated/ios/swift/LLMMessage.swift +47 -0
  23. package/nitrogen/generated/shared/c++/HybridLLMSpec.cpp +3 -0
  24. package/nitrogen/generated/shared/c++/HybridLLMSpec.hpp +12 -1
  25. package/nitrogen/generated/shared/c++/LLMLoadOptions.hpp +87 -0
  26. package/nitrogen/generated/shared/c++/LLMMessage.hpp +79 -0
  27. package/package.json +1 -9
  28. package/src/index.ts +10 -3
  29. package/src/llm.ts +32 -4
  30. package/src/models.ts +267 -0
  31. package/src/specs/LLM.nitro.ts +34 -2
@@ -12,6 +12,20 @@ export interface GenerationStats {
12
12
  /** Total generation time in milliseconds */
13
13
  totalTime: number;
14
14
  }
15
+ export interface LLMMessage {
16
+ role: string;
17
+ content: string;
18
+ }
19
+ /** Options for loading a model.
20
+ */
21
+ export interface LLMLoadOptions {
22
+ /** Callback invoked with loading progress (0-1) */
23
+ onProgress?: (progress: number) => void;
24
+ /** Additional context to provide to the model */
25
+ additionalContext?: LLMMessage[];
26
+ /** Whether to automatically manage message history */
27
+ manageHistory?: boolean;
28
+ }
15
29
  /**
16
30
  * Low-level LLM interface for text generation using MLX.
17
31
  * @internal Use the `LLM` export from `react-native-nitro-mlx` instead.
@@ -22,9 +36,9 @@ export interface LLM extends HybridObject<{
22
36
  /**
23
37
  * Load a model into memory. Downloads from HuggingFace if not already cached.
24
38
  * @param modelId - HuggingFace model ID (e.g., 'mlx-community/Qwen3-0.6B-4bit')
25
- * @param onProgress - Callback invoked with loading progress (0-1)
39
+ * @param options - Callback invoked with loading progress (0-1)
26
40
  */
27
- load(modelId: string, onProgress: (progress: number) => void): Promise<void>;
41
+ load(modelId: string, options?: LLMLoadOptions): Promise<void>;
28
42
  /**
29
43
  * Generate a complete response for a prompt.
30
44
  * @param prompt - The input text to generate a response for
@@ -42,11 +56,24 @@ export interface LLM extends HybridObject<{
42
56
  * Stop the current generation.
43
57
  */
44
58
  stop(): void;
59
+ /**
60
+ * Unload the current model and release memory.
61
+ */
62
+ unload(): void;
45
63
  /**
46
64
  * Get statistics from the last generation.
47
65
  * @returns Statistics including token count, speed, and timing
48
66
  */
49
67
  getLastGenerationStats(): GenerationStats;
68
+ /**
69
+ * Get the message history if management is enabled.
70
+ * @returns Array of messages in the history
71
+ */
72
+ getHistory(): LLMMessage[];
73
+ /**
74
+ * Clear the message history.
75
+ */
76
+ clearHistory(): void;
50
77
  /** Whether a model is currently loaded */
51
78
  readonly isLoaded: boolean;
52
79
  /** Whether text is currently being generated */
@@ -1 +1 @@
1
- {"version":3,"file":"LLM.nitro.d.ts","sourceRoot":"","sources":["../../../../src/specs/LLM.nitro.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAA;AAE9D;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,uCAAuC;IACvC,UAAU,EAAE,MAAM,CAAA;IAClB,4CAA4C;IAC5C,eAAe,EAAE,MAAM,CAAA;IACvB,+DAA+D;IAC/D,gBAAgB,EAAE,MAAM,CAAA;IACxB,4CAA4C;IAC5C,SAAS,EAAE,MAAM,CAAA;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,GAAI,SAAQ,YAAY,CAAC;IAAE,GAAG,EAAE,OAAO,CAAA;CAAE,CAAC;IACzD;;;;OAIG;IACH,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAE5E;;;;OAIG;IACH,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;IAEzC;;;;;OAKG;IACH,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;IAEzE;;OAEG;IACH,IAAI,IAAI,IAAI,CAAA;IAEZ;;;OAGG;IACH,sBAAsB,IAAI,eAAe,CAAA;IAEzC,0CAA0C;IAC1C,QAAQ,CAAC,QAAQ,EAAE,OAAO,CAAA;IAC1B,gDAAgD;IAChD,QAAQ,CAAC,YAAY,EAAE,OAAO,CAAA;IAC9B,2CAA2C;IAC3C,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAA;IAExB,2BAA2B;IAC3B,KAAK,EAAE,OAAO,CAAA;IACd,gDAAgD;IAChD,YAAY,EAAE,MAAM,CAAA;CACrB"}
1
+ {"version":3,"file":"LLM.nitro.d.ts","sourceRoot":"","sources":["../../../../src/specs/LLM.nitro.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,4BAA4B,CAAA;AAE9D;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,uCAAuC;IACvC,UAAU,EAAE,MAAM,CAAA;IAClB,4CAA4C;IAC5C,eAAe,EAAE,MAAM,CAAA;IACvB,+DAA+D;IAC/D,gBAAgB,EAAE,MAAM,CAAA;IACxB,4CAA4C;IAC5C,SAAS,EAAE,MAAM,CAAA;CAClB;AAED,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,MAAM,CAAA;IACZ,OAAO,EAAE,MAAM,CAAA;CAChB;AAED;GACG;AACH,MAAM,WAAW,cAAc;IAC7B,mDAAmD;IACnD,UAAU,CAAC,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAA;IACvC,iDAAiD;IACjD,iBAAiB,CAAC,EAAE,UAAU,EAAE,CAAA;IAChC,sDAAsD;IACtD,aAAa,CAAC,EAAE,OAAO,CAAA;CACxB;AAED;;;GAGG;AACH,MAAM,WAAW,GAAI,SAAQ,YAAY,CAAC;IAAE,GAAG,EAAE,OAAO,CAAA;CAAE,CAAC;IACzD;;;;OAIG;IACH,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAE9D;;;;OAIG;IACH,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;IAEzC;;;;;OAKG;IACH,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;IAEzE;;OAEG;IACH,IAAI,IAAI,IAAI,CAAA;IAEZ;;OAEG;IACH,MAAM,IAAI,IAAI,CAAA;IAEd;;;OAGG;IACH,sBAAsB,IAAI,eAAe,CAAA;IAEzC;;;OAGG;IACH,UAAU,IAAI,UAAU,EAAE,CAAA;IAE1B;;OAEG;IACH,YAAY,IAAI,IAAI,CAAA;IAEpB,0CAA0C;IAC1C,QAAQ,CAAC,QAAQ,EAAE,OAAO,CAAA;IAC1B,gDAAgD;IAChD,QAAQ,CAAC,YAAY,EAAE,OAAO,CAAA;IAC9B,2CAA2C;IAC3C,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAA;IAExB,2BAA2B;IAC3B,KAAK,EAAE,OAAO,CAAA;IACd,gDAAgD;IAChD,YAAY,EAAE,MAAM,CAAA;CACrB"}
@@ -14,6 +14,10 @@ namespace margelo::nitro::mlxreactnative { struct GenerationStats; }
14
14
  namespace margelo::nitro::mlxreactnative { class HybridLLMSpec; }
15
15
  // Forward declaration of `HybridModelManagerSpec` to properly resolve imports.
16
16
  namespace margelo::nitro::mlxreactnative { class HybridModelManagerSpec; }
17
+ // Forward declaration of `LLMLoadOptions` to properly resolve imports.
18
+ namespace margelo::nitro::mlxreactnative { struct LLMLoadOptions; }
19
+ // Forward declaration of `LLMMessage` to properly resolve imports.
20
+ namespace margelo::nitro::mlxreactnative { struct LLMMessage; }
17
21
 
18
22
  // Forward declarations of Swift defined types
19
23
  // Forward declaration of `HybridLLMSpec_cxx` to properly resolve imports.
@@ -25,12 +29,15 @@ namespace MLXReactNative { class HybridModelManagerSpec_cxx; }
25
29
  #include "GenerationStats.hpp"
26
30
  #include "HybridLLMSpec.hpp"
27
31
  #include "HybridModelManagerSpec.hpp"
32
+ #include "LLMLoadOptions.hpp"
33
+ #include "LLMMessage.hpp"
28
34
  #include <NitroModules/Promise.hpp>
29
35
  #include <NitroModules/PromiseHolder.hpp>
30
36
  #include <NitroModules/Result.hpp>
31
37
  #include <exception>
32
38
  #include <functional>
33
39
  #include <memory>
40
+ #include <optional>
34
41
  #include <string>
35
42
  #include <vector>
36
43
 
@@ -118,6 +125,77 @@ namespace margelo::nitro::mlxreactnative::bridge::swift {
118
125
  return Func_void_double_Wrapper(std::move(value));
119
126
  }
120
127
 
128
+ // pragma MARK: std::optional<std::function<void(double /* progress */)>>
129
+ /**
130
+ * Specialized version of `std::optional<std::function<void(double / * progress * /)>>`.
131
+ */
132
+ using std__optional_std__function_void_double____progress______ = std::optional<std::function<void(double /* progress */)>>;
133
+ inline std::optional<std::function<void(double /* progress */)>> create_std__optional_std__function_void_double____progress______(const std::function<void(double /* progress */)>& value) noexcept {
134
+ return std::optional<std::function<void(double /* progress */)>>(value);
135
+ }
136
+ inline bool has_value_std__optional_std__function_void_double____progress______(const std::optional<std::function<void(double /* progress */)>>& optional) noexcept {
137
+ return optional.has_value();
138
+ }
139
+ inline std::function<void(double /* progress */)> get_std__optional_std__function_void_double____progress______(const std::optional<std::function<void(double /* progress */)>>& optional) noexcept {
140
+ return *optional;
141
+ }
142
+
143
+ // pragma MARK: std::vector<LLMMessage>
144
+ /**
145
+ * Specialized version of `std::vector<LLMMessage>`.
146
+ */
147
+ using std__vector_LLMMessage_ = std::vector<LLMMessage>;
148
+ inline std::vector<LLMMessage> create_std__vector_LLMMessage_(size_t size) noexcept {
149
+ std::vector<LLMMessage> vector;
150
+ vector.reserve(size);
151
+ return vector;
152
+ }
153
+
154
+ // pragma MARK: std::optional<std::vector<LLMMessage>>
155
+ /**
156
+ * Specialized version of `std::optional<std::vector<LLMMessage>>`.
157
+ */
158
+ using std__optional_std__vector_LLMMessage__ = std::optional<std::vector<LLMMessage>>;
159
+ inline std::optional<std::vector<LLMMessage>> create_std__optional_std__vector_LLMMessage__(const std::vector<LLMMessage>& value) noexcept {
160
+ return std::optional<std::vector<LLMMessage>>(value);
161
+ }
162
+ inline bool has_value_std__optional_std__vector_LLMMessage__(const std::optional<std::vector<LLMMessage>>& optional) noexcept {
163
+ return optional.has_value();
164
+ }
165
+ inline std::vector<LLMMessage> get_std__optional_std__vector_LLMMessage__(const std::optional<std::vector<LLMMessage>>& optional) noexcept {
166
+ return *optional;
167
+ }
168
+
169
+ // pragma MARK: std::optional<bool>
170
+ /**
171
+ * Specialized version of `std::optional<bool>`.
172
+ */
173
+ using std__optional_bool_ = std::optional<bool>;
174
+ inline std::optional<bool> create_std__optional_bool_(const bool& value) noexcept {
175
+ return std::optional<bool>(value);
176
+ }
177
+ inline bool has_value_std__optional_bool_(const std::optional<bool>& optional) noexcept {
178
+ return optional.has_value();
179
+ }
180
+ inline bool get_std__optional_bool_(const std::optional<bool>& optional) noexcept {
181
+ return *optional;
182
+ }
183
+
184
+ // pragma MARK: std::optional<LLMLoadOptions>
185
+ /**
186
+ * Specialized version of `std::optional<LLMLoadOptions>`.
187
+ */
188
+ using std__optional_LLMLoadOptions_ = std::optional<LLMLoadOptions>;
189
+ inline std::optional<LLMLoadOptions> create_std__optional_LLMLoadOptions_(const LLMLoadOptions& value) noexcept {
190
+ return std::optional<LLMLoadOptions>(value);
191
+ }
192
+ inline bool has_value_std__optional_LLMLoadOptions_(const std::optional<LLMLoadOptions>& optional) noexcept {
193
+ return optional.has_value();
194
+ }
195
+ inline LLMLoadOptions get_std__optional_LLMLoadOptions_(const std::optional<LLMLoadOptions>& optional) noexcept {
196
+ return *optional;
197
+ }
198
+
121
199
  // pragma MARK: std::shared_ptr<Promise<std::string>>
122
200
  /**
123
201
  * Specialized version of `std::shared_ptr<Promise<std::string>>`.
@@ -200,6 +278,15 @@ namespace margelo::nitro::mlxreactnative::bridge::swift {
200
278
  return Result<GenerationStats>::withError(error);
201
279
  }
202
280
 
281
+ // pragma MARK: Result<std::vector<LLMMessage>>
282
+ using Result_std__vector_LLMMessage__ = Result<std::vector<LLMMessage>>;
283
+ inline Result_std__vector_LLMMessage__ create_Result_std__vector_LLMMessage__(const std::vector<LLMMessage>& value) noexcept {
284
+ return Result<std::vector<LLMMessage>>::withValue(value);
285
+ }
286
+ inline Result_std__vector_LLMMessage__ create_Result_std__vector_LLMMessage__(const std::exception_ptr& error) noexcept {
287
+ return Result<std::vector<LLMMessage>>::withError(error);
288
+ }
289
+
203
290
  // pragma MARK: std::shared_ptr<Promise<bool>>
204
291
  /**
205
292
  * Specialized version of `std::shared_ptr<Promise<bool>>`.
@@ -14,16 +14,23 @@ namespace margelo::nitro::mlxreactnative { struct GenerationStats; }
14
14
  namespace margelo::nitro::mlxreactnative { class HybridLLMSpec; }
15
15
  // Forward declaration of `HybridModelManagerSpec` to properly resolve imports.
16
16
  namespace margelo::nitro::mlxreactnative { class HybridModelManagerSpec; }
17
+ // Forward declaration of `LLMLoadOptions` to properly resolve imports.
18
+ namespace margelo::nitro::mlxreactnative { struct LLMLoadOptions; }
19
+ // Forward declaration of `LLMMessage` to properly resolve imports.
20
+ namespace margelo::nitro::mlxreactnative { struct LLMMessage; }
17
21
 
18
22
  // Include C++ defined types
19
23
  #include "GenerationStats.hpp"
20
24
  #include "HybridLLMSpec.hpp"
21
25
  #include "HybridModelManagerSpec.hpp"
26
+ #include "LLMLoadOptions.hpp"
27
+ #include "LLMMessage.hpp"
22
28
  #include <NitroModules/Promise.hpp>
23
29
  #include <NitroModules/Result.hpp>
24
30
  #include <exception>
25
31
  #include <functional>
26
32
  #include <memory>
33
+ #include <optional>
27
34
  #include <string>
28
35
  #include <vector>
29
36
 
@@ -12,12 +12,20 @@
12
12
  // Forward declaration of `HybridLLMSpec_cxx` to properly resolve imports.
13
13
  namespace MLXReactNative { class HybridLLMSpec_cxx; }
14
14
 
15
+ // Forward declaration of `LLMLoadOptions` to properly resolve imports.
16
+ namespace margelo::nitro::mlxreactnative { struct LLMLoadOptions; }
17
+ // Forward declaration of `LLMMessage` to properly resolve imports.
18
+ namespace margelo::nitro::mlxreactnative { struct LLMMessage; }
15
19
  // Forward declaration of `GenerationStats` to properly resolve imports.
16
20
  namespace margelo::nitro::mlxreactnative { struct GenerationStats; }
17
21
 
18
22
  #include <string>
19
23
  #include <NitroModules/Promise.hpp>
24
+ #include "LLMLoadOptions.hpp"
25
+ #include <optional>
20
26
  #include <functional>
27
+ #include "LLMMessage.hpp"
28
+ #include <vector>
21
29
  #include "GenerationStats.hpp"
22
30
 
23
31
  #include "MLXReactNative-Swift-Cxx-Umbrella.hpp"
@@ -86,8 +94,8 @@ namespace margelo::nitro::mlxreactnative {
86
94
 
87
95
  public:
88
96
  // Methods
89
- inline std::shared_ptr<Promise<void>> load(const std::string& modelId, const std::function<void(double /* progress */)>& onProgress) override {
90
- auto __result = _swiftPart.load(modelId, onProgress);
97
+ inline std::shared_ptr<Promise<void>> load(const std::string& modelId, const std::optional<LLMLoadOptions>& options) override {
98
+ auto __result = _swiftPart.load(modelId, options);
91
99
  if (__result.hasError()) [[unlikely]] {
92
100
  std::rethrow_exception(__result.error());
93
101
  }
@@ -116,6 +124,12 @@ namespace margelo::nitro::mlxreactnative {
116
124
  std::rethrow_exception(__result.error());
117
125
  }
118
126
  }
127
+ inline void unload() override {
128
+ auto __result = _swiftPart.unload();
129
+ if (__result.hasError()) [[unlikely]] {
130
+ std::rethrow_exception(__result.error());
131
+ }
132
+ }
119
133
  inline GenerationStats getLastGenerationStats() override {
120
134
  auto __result = _swiftPart.getLastGenerationStats();
121
135
  if (__result.hasError()) [[unlikely]] {
@@ -124,6 +138,20 @@ namespace margelo::nitro::mlxreactnative {
124
138
  auto __value = std::move(__result.value());
125
139
  return __value;
126
140
  }
141
+ inline std::vector<LLMMessage> getHistory() override {
142
+ auto __result = _swiftPart.getHistory();
143
+ if (__result.hasError()) [[unlikely]] {
144
+ std::rethrow_exception(__result.error());
145
+ }
146
+ auto __value = std::move(__result.value());
147
+ return __value;
148
+ }
149
+ inline void clearHistory() override {
150
+ auto __result = _swiftPart.clearHistory();
151
+ if (__result.hasError()) [[unlikely]] {
152
+ std::rethrow_exception(__result.error());
153
+ }
154
+ }
127
155
 
128
156
  private:
129
157
  MLXReactNative::HybridLLMSpec_cxx _swiftPart;
@@ -18,11 +18,14 @@ public protocol HybridLLMSpec_protocol: HybridObject {
18
18
  var systemPrompt: String { get set }
19
19
 
20
20
  // Methods
21
- func load(modelId: String, onProgress: @escaping (_ progress: Double) -> Void) throws -> Promise<Void>
21
+ func load(modelId: String, options: LLMLoadOptions?) throws -> Promise<Void>
22
22
  func generate(prompt: String) throws -> Promise<String>
23
23
  func stream(prompt: String, onToken: @escaping (_ token: String) -> Void) throws -> Promise<String>
24
24
  func stop() throws -> Void
25
+ func unload() throws -> Void
25
26
  func getLastGenerationStats() throws -> GenerationStats
27
+ func getHistory() throws -> [LLMMessage]
28
+ func clearHistory() throws -> Void
26
29
  }
27
30
 
28
31
  public extension HybridLLMSpec_protocol {
@@ -159,14 +159,9 @@ open class HybridLLMSpec_cxx {
159
159
 
160
160
  // Methods
161
161
  @inline(__always)
162
- public final func load(modelId: std.string, onProgress: bridge.Func_void_double) -> bridge.Result_std__shared_ptr_Promise_void___ {
162
+ public final func load(modelId: std.string, options: bridge.std__optional_LLMLoadOptions_) -> bridge.Result_std__shared_ptr_Promise_void___ {
163
163
  do {
164
- let __result = try self.__implementation.load(modelId: String(modelId), onProgress: { () -> (Double) -> Void in
165
- let __wrappedFunction = bridge.wrap_Func_void_double(onProgress)
166
- return { (__progress: Double) -> Void in
167
- __wrappedFunction.call(__progress)
168
- }
169
- }())
164
+ let __result = try self.__implementation.load(modelId: String(modelId), options: options.value)
170
165
  let __resultCpp = { () -> bridge.std__shared_ptr_Promise_void__ in
171
166
  let __promise = bridge.create_std__shared_ptr_Promise_void__()
172
167
  let __promiseHolder = bridge.wrap_std__shared_ptr_Promise_void__(__promise)
@@ -236,6 +231,17 @@ open class HybridLLMSpec_cxx {
236
231
  }
237
232
  }
238
233
 
234
+ @inline(__always)
235
+ public final func unload() -> bridge.Result_void_ {
236
+ do {
237
+ try self.__implementation.unload()
238
+ return bridge.create_Result_void_()
239
+ } catch (let __error) {
240
+ let __exceptionPtr = __error.toCpp()
241
+ return bridge.create_Result_void_(__exceptionPtr)
242
+ }
243
+ }
244
+
239
245
  @inline(__always)
240
246
  public final func getLastGenerationStats() -> bridge.Result_GenerationStats_ {
241
247
  do {
@@ -247,4 +253,33 @@ open class HybridLLMSpec_cxx {
247
253
  return bridge.create_Result_GenerationStats_(__exceptionPtr)
248
254
  }
249
255
  }
256
+
257
+ @inline(__always)
258
+ public final func getHistory() -> bridge.Result_std__vector_LLMMessage__ {
259
+ do {
260
+ let __result = try self.__implementation.getHistory()
261
+ let __resultCpp = { () -> bridge.std__vector_LLMMessage_ in
262
+ var __vector = bridge.create_std__vector_LLMMessage_(__result.count)
263
+ for __item in __result {
264
+ __vector.push_back(__item)
265
+ }
266
+ return __vector
267
+ }()
268
+ return bridge.create_Result_std__vector_LLMMessage__(__resultCpp)
269
+ } catch (let __error) {
270
+ let __exceptionPtr = __error.toCpp()
271
+ return bridge.create_Result_std__vector_LLMMessage__(__exceptionPtr)
272
+ }
273
+ }
274
+
275
+ @inline(__always)
276
+ public final func clearHistory() -> bridge.Result_void_ {
277
+ do {
278
+ try self.__implementation.clearHistory()
279
+ return bridge.create_Result_void_()
280
+ } catch (let __error) {
281
+ let __exceptionPtr = __error.toCpp()
282
+ return bridge.create_Result_void_(__exceptionPtr)
283
+ }
284
+ }
250
285
  }
@@ -0,0 +1,138 @@
1
+ ///
2
+ /// LLMLoadOptions.swift
3
+ /// This file was generated by nitrogen. DO NOT MODIFY THIS FILE.
4
+ /// https://github.com/mrousavy/nitro
5
+ /// Copyright © 2025 Marc Rousavy @ Margelo
6
+ ///
7
+
8
+ import Foundation
9
+ import NitroModules
10
+
11
+ /**
12
+ * Represents an instance of `LLMLoadOptions`, backed by a C++ struct.
13
+ */
14
+ public typealias LLMLoadOptions = margelo.nitro.mlxreactnative.LLMLoadOptions
15
+
16
+ public extension LLMLoadOptions {
17
+ private typealias bridge = margelo.nitro.mlxreactnative.bridge.swift
18
+
19
+ /**
20
+ * Create a new instance of `LLMLoadOptions`.
21
+ */
22
+ init(onProgress: ((_ progress: Double) -> Void)?, additionalContext: [LLMMessage]?, manageHistory: Bool?) {
23
+ self.init({ () -> bridge.std__optional_std__function_void_double____progress______ in
24
+ if let __unwrappedValue = onProgress {
25
+ return bridge.create_std__optional_std__function_void_double____progress______({ () -> bridge.Func_void_double in
26
+ let __closureWrapper = Func_void_double(__unwrappedValue)
27
+ return bridge.create_Func_void_double(__closureWrapper.toUnsafe())
28
+ }())
29
+ } else {
30
+ return .init()
31
+ }
32
+ }(), { () -> bridge.std__optional_std__vector_LLMMessage__ in
33
+ if let __unwrappedValue = additionalContext {
34
+ return bridge.create_std__optional_std__vector_LLMMessage__({ () -> bridge.std__vector_LLMMessage_ in
35
+ var __vector = bridge.create_std__vector_LLMMessage_(__unwrappedValue.count)
36
+ for __item in __unwrappedValue {
37
+ __vector.push_back(__item)
38
+ }
39
+ return __vector
40
+ }())
41
+ } else {
42
+ return .init()
43
+ }
44
+ }(), { () -> bridge.std__optional_bool_ in
45
+ if let __unwrappedValue = manageHistory {
46
+ return bridge.create_std__optional_bool_(__unwrappedValue)
47
+ } else {
48
+ return .init()
49
+ }
50
+ }())
51
+ }
52
+
53
+ var onProgress: ((_ progress: Double) -> Void)? {
54
+ @inline(__always)
55
+ get {
56
+ return { () -> ((_ progress: Double) -> Void)? in
57
+ if bridge.has_value_std__optional_std__function_void_double____progress______(self.__onProgress) {
58
+ let __unwrapped = bridge.get_std__optional_std__function_void_double____progress______(self.__onProgress)
59
+ return { () -> (Double) -> Void in
60
+ let __wrappedFunction = bridge.wrap_Func_void_double(__unwrapped)
61
+ return { (__progress: Double) -> Void in
62
+ __wrappedFunction.call(__progress)
63
+ }
64
+ }()
65
+ } else {
66
+ return nil
67
+ }
68
+ }()
69
+ }
70
+ @inline(__always)
71
+ set {
72
+ self.__onProgress = { () -> bridge.std__optional_std__function_void_double____progress______ in
73
+ if let __unwrappedValue = newValue {
74
+ return bridge.create_std__optional_std__function_void_double____progress______({ () -> bridge.Func_void_double in
75
+ let __closureWrapper = Func_void_double(__unwrappedValue)
76
+ return bridge.create_Func_void_double(__closureWrapper.toUnsafe())
77
+ }())
78
+ } else {
79
+ return .init()
80
+ }
81
+ }()
82
+ }
83
+ }
84
+
85
+ var additionalContext: [LLMMessage]? {
86
+ @inline(__always)
87
+ get {
88
+ return { () -> [LLMMessage]? in
89
+ if bridge.has_value_std__optional_std__vector_LLMMessage__(self.__additionalContext) {
90
+ let __unwrapped = bridge.get_std__optional_std__vector_LLMMessage__(self.__additionalContext)
91
+ return __unwrapped.map({ __item in __item })
92
+ } else {
93
+ return nil
94
+ }
95
+ }()
96
+ }
97
+ @inline(__always)
98
+ set {
99
+ self.__additionalContext = { () -> bridge.std__optional_std__vector_LLMMessage__ in
100
+ if let __unwrappedValue = newValue {
101
+ return bridge.create_std__optional_std__vector_LLMMessage__({ () -> bridge.std__vector_LLMMessage_ in
102
+ var __vector = bridge.create_std__vector_LLMMessage_(__unwrappedValue.count)
103
+ for __item in __unwrappedValue {
104
+ __vector.push_back(__item)
105
+ }
106
+ return __vector
107
+ }())
108
+ } else {
109
+ return .init()
110
+ }
111
+ }()
112
+ }
113
+ }
114
+
115
+ var manageHistory: Bool? {
116
+ @inline(__always)
117
+ get {
118
+ return { () -> Bool? in
119
+ if bridge.has_value_std__optional_bool_(self.__manageHistory) {
120
+ let __unwrapped = bridge.get_std__optional_bool_(self.__manageHistory)
121
+ return __unwrapped
122
+ } else {
123
+ return nil
124
+ }
125
+ }()
126
+ }
127
+ @inline(__always)
128
+ set {
129
+ self.__manageHistory = { () -> bridge.std__optional_bool_ in
130
+ if let __unwrappedValue = newValue {
131
+ return bridge.create_std__optional_bool_(__unwrappedValue)
132
+ } else {
133
+ return .init()
134
+ }
135
+ }()
136
+ }
137
+ }
138
+ }
@@ -0,0 +1,47 @@
1
+ ///
2
+ /// LLMMessage.swift
3
+ /// This file was generated by nitrogen. DO NOT MODIFY THIS FILE.
4
+ /// https://github.com/mrousavy/nitro
5
+ /// Copyright © 2025 Marc Rousavy @ Margelo
6
+ ///
7
+
8
+ import Foundation
9
+ import NitroModules
10
+
11
+ /**
12
+ * Represents an instance of `LLMMessage`, backed by a C++ struct.
13
+ */
14
+ public typealias LLMMessage = margelo.nitro.mlxreactnative.LLMMessage
15
+
16
+ public extension LLMMessage {
17
+ private typealias bridge = margelo.nitro.mlxreactnative.bridge.swift
18
+
19
+ /**
20
+ * Create a new instance of `LLMMessage`.
21
+ */
22
+ init(role: String, content: String) {
23
+ self.init(std.string(role), std.string(content))
24
+ }
25
+
26
+ var role: String {
27
+ @inline(__always)
28
+ get {
29
+ return String(self.__role)
30
+ }
31
+ @inline(__always)
32
+ set {
33
+ self.__role = std.string(newValue)
34
+ }
35
+ }
36
+
37
+ var content: String {
38
+ @inline(__always)
39
+ get {
40
+ return String(self.__content)
41
+ }
42
+ @inline(__always)
43
+ set {
44
+ self.__content = std.string(newValue)
45
+ }
46
+ }
47
+ }
@@ -25,7 +25,10 @@ namespace margelo::nitro::mlxreactnative {
25
25
  prototype.registerHybridMethod("generate", &HybridLLMSpec::generate);
26
26
  prototype.registerHybridMethod("stream", &HybridLLMSpec::stream);
27
27
  prototype.registerHybridMethod("stop", &HybridLLMSpec::stop);
28
+ prototype.registerHybridMethod("unload", &HybridLLMSpec::unload);
28
29
  prototype.registerHybridMethod("getLastGenerationStats", &HybridLLMSpec::getLastGenerationStats);
30
+ prototype.registerHybridMethod("getHistory", &HybridLLMSpec::getHistory);
31
+ prototype.registerHybridMethod("clearHistory", &HybridLLMSpec::clearHistory);
29
32
  });
30
33
  }
31
34
 
@@ -13,13 +13,21 @@
13
13
  #error NitroModules cannot be found! Are you sure you installed NitroModules properly?
14
14
  #endif
15
15
 
16
+ // Forward declaration of `LLMLoadOptions` to properly resolve imports.
17
+ namespace margelo::nitro::mlxreactnative { struct LLMLoadOptions; }
16
18
  // Forward declaration of `GenerationStats` to properly resolve imports.
17
19
  namespace margelo::nitro::mlxreactnative { struct GenerationStats; }
20
+ // Forward declaration of `LLMMessage` to properly resolve imports.
21
+ namespace margelo::nitro::mlxreactnative { struct LLMMessage; }
18
22
 
19
23
  #include <string>
20
24
  #include <NitroModules/Promise.hpp>
25
+ #include "LLMLoadOptions.hpp"
26
+ #include <optional>
21
27
  #include <functional>
22
28
  #include "GenerationStats.hpp"
29
+ #include "LLMMessage.hpp"
30
+ #include <vector>
23
31
 
24
32
  namespace margelo::nitro::mlxreactnative {
25
33
 
@@ -58,11 +66,14 @@ namespace margelo::nitro::mlxreactnative {
58
66
 
59
67
  public:
60
68
  // Methods
61
- virtual std::shared_ptr<Promise<void>> load(const std::string& modelId, const std::function<void(double /* progress */)>& onProgress) = 0;
69
+ virtual std::shared_ptr<Promise<void>> load(const std::string& modelId, const std::optional<LLMLoadOptions>& options) = 0;
62
70
  virtual std::shared_ptr<Promise<std::string>> generate(const std::string& prompt) = 0;
63
71
  virtual std::shared_ptr<Promise<std::string>> stream(const std::string& prompt, const std::function<void(const std::string& /* token */)>& onToken) = 0;
64
72
  virtual void stop() = 0;
73
+ virtual void unload() = 0;
65
74
  virtual GenerationStats getLastGenerationStats() = 0;
75
+ virtual std::vector<LLMMessage> getHistory() = 0;
76
+ virtual void clearHistory() = 0;
66
77
 
67
78
  protected:
68
79
  // Hybrid Setup