react-native-nitro-mlx 0.2.1 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/MLXReactNative.podspec +1 -1
  2. package/ios/Sources/HybridLLM.swift +296 -49
  3. package/lib/module/index.js +1 -0
  4. package/lib/module/index.js.map +1 -1
  5. package/lib/module/llm.js +33 -3
  6. package/lib/module/llm.js.map +1 -1
  7. package/lib/module/tool-utils.js +56 -0
  8. package/lib/module/tool-utils.js.map +1 -0
  9. package/lib/typescript/src/index.d.ts +3 -2
  10. package/lib/typescript/src/index.d.ts.map +1 -1
  11. package/lib/typescript/src/llm.d.ts +14 -2
  12. package/lib/typescript/src/llm.d.ts.map +1 -1
  13. package/lib/typescript/src/specs/LLM.nitro.d.ts +30 -3
  14. package/lib/typescript/src/specs/LLM.nitro.d.ts.map +1 -1
  15. package/lib/typescript/src/tool-utils.d.ts +13 -0
  16. package/lib/typescript/src/tool-utils.d.ts.map +1 -0
  17. package/nitrogen/generated/ios/MLXReactNative+autolinking.rb +1 -1
  18. package/nitrogen/generated/ios/MLXReactNative-Swift-Cxx-Bridge.cpp +34 -1
  19. package/nitrogen/generated/ios/MLXReactNative-Swift-Cxx-Bridge.hpp +173 -1
  20. package/nitrogen/generated/ios/MLXReactNative-Swift-Cxx-Umbrella.hpp +8 -1
  21. package/nitrogen/generated/ios/MLXReactNativeAutolinking.mm +1 -1
  22. package/nitrogen/generated/ios/MLXReactNativeAutolinking.swift +1 -1
  23. package/nitrogen/generated/ios/c++/HybridLLMSpecSwift.cpp +1 -1
  24. package/nitrogen/generated/ios/c++/HybridLLMSpecSwift.hpp +10 -3
  25. package/nitrogen/generated/ios/c++/HybridModelManagerSpecSwift.cpp +1 -1
  26. package/nitrogen/generated/ios/c++/HybridModelManagerSpecSwift.hpp +1 -1
  27. package/nitrogen/generated/ios/swift/Func_std__shared_ptr_Promise_std__shared_ptr_Promise_std__shared_ptr_AnyMap______std__shared_ptr_AnyMap_.swift +62 -0
  28. package/nitrogen/generated/ios/swift/Func_void.swift +1 -1
  29. package/nitrogen/generated/ios/swift/Func_void_bool.swift +1 -1
  30. package/nitrogen/generated/ios/swift/Func_void_double.swift +1 -1
  31. package/nitrogen/generated/ios/swift/Func_void_std__exception_ptr.swift +1 -1
  32. package/nitrogen/generated/ios/swift/Func_void_std__shared_ptr_AnyMap_.swift +47 -0
  33. package/nitrogen/generated/ios/swift/Func_void_std__shared_ptr_Promise_std__shared_ptr_AnyMap___.swift +67 -0
  34. package/nitrogen/generated/ios/swift/Func_void_std__string.swift +1 -1
  35. package/nitrogen/generated/ios/swift/Func_void_std__string_std__string.swift +47 -0
  36. package/nitrogen/generated/ios/swift/Func_void_std__vector_std__string_.swift +1 -1
  37. package/nitrogen/generated/ios/swift/GenerationStats.swift +1 -1
  38. package/nitrogen/generated/ios/swift/HybridLLMSpec.swift +2 -2
  39. package/nitrogen/generated/ios/swift/HybridLLMSpec_cxx.swift +14 -2
  40. package/nitrogen/generated/ios/swift/HybridModelManagerSpec.swift +1 -1
  41. package/nitrogen/generated/ios/swift/HybridModelManagerSpec_cxx.swift +1 -1
  42. package/nitrogen/generated/ios/swift/LLMLoadOptions.swift +44 -2
  43. package/nitrogen/generated/ios/swift/LLMMessage.swift +1 -1
  44. package/nitrogen/generated/ios/swift/ToolDefinition.swift +113 -0
  45. package/nitrogen/generated/ios/swift/ToolParameter.swift +69 -0
  46. package/nitrogen/generated/shared/c++/GenerationStats.hpp +1 -1
  47. package/nitrogen/generated/shared/c++/HybridLLMSpec.cpp +1 -1
  48. package/nitrogen/generated/shared/c++/HybridLLMSpec.hpp +2 -2
  49. package/nitrogen/generated/shared/c++/HybridModelManagerSpec.cpp +1 -1
  50. package/nitrogen/generated/shared/c++/HybridModelManagerSpec.hpp +1 -1
  51. package/nitrogen/generated/shared/c++/LLMLoadOptions.hpp +10 -3
  52. package/nitrogen/generated/shared/c++/LLMMessage.hpp +1 -1
  53. package/nitrogen/generated/shared/c++/ToolDefinition.hpp +93 -0
  54. package/nitrogen/generated/shared/c++/ToolParameter.hpp +87 -0
  55. package/package.json +8 -7
  56. package/src/index.ts +10 -3
  57. package/src/llm.ts +42 -3
  58. package/src/specs/LLM.nitro.ts +37 -3
  59. package/src/tool-utils.ts +74 -0
@@ -24,7 +24,7 @@ Pod::Spec.new do |s|
24
24
 
25
25
  spm_dependency(s,
26
26
  url: "https://github.com/ml-explore/mlx-swift-lm.git",
27
- requirement: {kind: "upToNextMinorVersion", minimumVersion: "2.29.2"},
27
+ requirement: {kind: "upToNextMinorVersion", minimumVersion: "2.29.3"},
28
28
  products: ["MLXLLM", "MLXLMCommon"]
29
29
  )
30
30
 
@@ -3,6 +3,7 @@ import NitroModules
3
3
  internal import MLX
4
4
  internal import MLXLLM
5
5
  internal import MLXLMCommon
6
+ internal import Tokenizers
6
7
 
7
8
  class HybridLLM: HybridLLMSpec {
8
9
  private var session: ChatSession?
@@ -17,6 +18,10 @@ class HybridLLM: HybridLLMSpec {
17
18
  private var modelFactory: ModelFactory = LLMModelFactory.shared
18
19
  private var manageHistory: Bool = false
19
20
  private var messageHistory: [LLMMessage] = []
21
+ private var loadTask: Task<Void, Error>?
22
+
23
+ private var tools: [ToolDefinition] = []
24
+ private var toolSchemas: [ToolSpec] = []
20
25
 
21
26
  var isLoaded: Bool { session != nil }
22
27
  var isGenerating: Bool { currentTask != nil }
@@ -60,51 +65,95 @@ class HybridLLM: HybridLLMSpec {
60
65
  allocatedMB, cacheMB, peakMB)
61
66
  }
62
67
 
68
+ private func buildToolSchema(from tool: ToolDefinition) -> ToolSpec {
69
+ var properties: [String: [String: Any]] = [:]
70
+ var required: [String] = []
71
+
72
+ for param in tool.parameters {
73
+ properties[param.name] = [
74
+ "type": param.type,
75
+ "description": param.description
76
+ ]
77
+ if param.required {
78
+ required.append(param.name)
79
+ }
80
+ }
81
+
82
+ return [
83
+ "type": "function",
84
+ "function": [
85
+ "name": tool.name,
86
+ "description": tool.description,
87
+ "parameters": [
88
+ "type": "object",
89
+ "properties": properties,
90
+ "required": required
91
+ ]
92
+ ]
93
+ ] as ToolSpec
94
+ }
95
+
63
96
  func load(modelId: String, options: LLMLoadOptions?) throws -> Promise<Void> {
64
- return Promise.async { [self] in
65
- MLX.GPU.set(cacheLimit: 2000000)
97
+ self.loadTask?.cancel()
66
98
 
67
- self.currentTask?.cancel()
68
- self.currentTask = nil
69
- self.session = nil
70
- self.container = nil
71
- MLX.GPU.clearCache()
99
+ return Promise.async { [self] in
100
+ let task = Task { @MainActor in
101
+ MLX.GPU.set(cacheLimit: 2000000)
72
102
 
73
- let memoryAfterCleanup = self.getMemoryUsage()
74
- let gpuAfterCleanup = self.getGPUMemoryUsage()
75
- log("After cleanup - Host: \(memoryAfterCleanup), GPU: \(gpuAfterCleanup)")
103
+ self.currentTask?.cancel()
104
+ self.currentTask = nil
105
+ self.session = nil
106
+ self.container = nil
107
+ self.tools = []
108
+ self.toolSchemas = []
109
+ MLX.GPU.clearCache()
110
+
111
+ let memoryAfterCleanup = self.getMemoryUsage()
112
+ let gpuAfterCleanup = self.getGPUMemoryUsage()
113
+ log("After cleanup - Host: \(memoryAfterCleanup), GPU: \(gpuAfterCleanup)")
114
+
115
+ let modelDir = await ModelDownloader.shared.getModelDirectory(modelId: modelId)
116
+ log("Loading from directory: \(modelDir.path)")
117
+
118
+ let config = ModelConfiguration(directory: modelDir)
119
+ let loadedContainer = try await self.modelFactory.loadContainer(
120
+ configuration: config
121
+ ) { progress in
122
+ options?.onProgress?(progress.fractionCompleted)
123
+ }
76
124
 
77
- let modelDir = await ModelDownloader.shared.getModelDirectory(modelId: modelId)
78
- log("Loading from directory: \(modelDir.path)")
125
+ try Task.checkCancellation()
79
126
 
80
- let config = ModelConfiguration(directory: modelDir)
81
- let loadedContainer = try await modelFactory.loadContainer(
82
- configuration: config
83
- ) { progress in
84
- options?.onProgress?(progress.fractionCompleted)
85
- }
127
+ let memoryAfterContainer = self.getMemoryUsage()
128
+ let gpuAfterContainer = self.getGPUMemoryUsage()
129
+ log("Model loaded - Host: \(memoryAfterContainer), GPU: \(gpuAfterContainer)")
86
130
 
87
- let memoryAfterContainer = self.getMemoryUsage()
88
- let gpuAfterContainer = self.getGPUMemoryUsage()
89
- log("Model loaded - Host: \(memoryAfterContainer), GPU: \(gpuAfterContainer)")
131
+ if let jsTools = options?.tools {
132
+ self.tools = jsTools
133
+ self.toolSchemas = jsTools.map { self.buildToolSchema(from: $0) }
134
+ log("Loaded \(self.tools.count) tools: \(self.tools.map { $0.name })")
135
+ }
90
136
 
91
- // Convert [LLMMessage]? to [String: Any]?
92
- let additionalContextDict: [String: Any]? = if let messages = options?.additionalContext {
93
- ["messages": messages.map { ["role": $0.role, "content": $0.content] }]
94
- } else {
95
- nil
96
- }
137
+ let additionalContextDict: [String: Any]? = if let messages = options?.additionalContext {
138
+ ["messages": messages.map { ["role": $0.role, "content": $0.content] }]
139
+ } else {
140
+ nil
141
+ }
97
142
 
98
- self.container = loadedContainer
99
- self.session = ChatSession(loadedContainer, instructions: self.systemPrompt, additionalContext: additionalContextDict)
100
- self.modelId = modelId
143
+ self.container = loadedContainer
144
+ self.session = ChatSession(loadedContainer, instructions: self.systemPrompt, additionalContext: additionalContextDict)
145
+ self.modelId = modelId
101
146
 
102
- self.manageHistory = options?.manageHistory ?? false
103
- self.messageHistory = options?.additionalContext ?? []
147
+ self.manageHistory = options?.manageHistory ?? false
148
+ self.messageHistory = options?.additionalContext ?? []
104
149
 
105
- if self.manageHistory {
106
- log("History management enabled with \(self.messageHistory.count) initial messages")
150
+ if self.manageHistory {
151
+ log("History management enabled with \(self.messageHistory.count) initial messages")
152
+ }
107
153
  }
154
+
155
+ self.loadTask = task
156
+ try await task.value
108
157
  }
109
158
  }
110
159
 
@@ -143,8 +192,14 @@ class HybridLLM: HybridLLMSpec {
143
192
  }
144
193
  }
145
194
 
146
- func stream(prompt: String, onToken: @escaping (String) -> Void) throws -> Promise<String> {
147
- guard let session = session else {
195
+ private let maxToolCallDepth = 10
196
+
197
+ func stream(
198
+ prompt: String,
199
+ onToken: @escaping (String) -> Void,
200
+ onToolCall: ((String, String) -> Void)?
201
+ ) throws -> Promise<String> {
202
+ guard let container = container as? ModelContainer else {
148
203
  throw LLMError.notLoaded
149
204
  }
150
205
 
@@ -154,22 +209,24 @@ class HybridLLM: HybridLLMSpec {
154
209
  }
155
210
 
156
211
  let task = Task<String, Error> {
157
- var result = ""
158
- var tokenCount = 0
159
212
  let startTime = Date()
160
213
  var firstTokenTime: Date?
214
+ var tokenCount = 0
161
215
 
162
- log("Streaming response for: \(prompt.prefix(50))...")
163
- for try await chunk in session.streamResponse(to: prompt) {
164
- if Task.isCancelled { break }
165
-
166
- if firstTokenTime == nil {
167
- firstTokenTime = Date()
168
- }
169
- tokenCount += 1
170
- result += chunk
171
- onToken(chunk)
172
- }
216
+ let result = try await self.performGeneration(
217
+ container: container,
218
+ prompt: prompt,
219
+ toolResults: nil,
220
+ depth: 0,
221
+ onToken: { token in
222
+ if firstTokenTime == nil {
223
+ firstTokenTime = Date()
224
+ }
225
+ tokenCount += 1
226
+ onToken(token)
227
+ },
228
+ onToolCall: onToolCall ?? { _, _ in }
229
+ )
173
230
 
174
231
  let endTime = Date()
175
232
  let totalTime = endTime.timeIntervalSince(startTime) * 1000
@@ -205,12 +262,200 @@ class HybridLLM: HybridLLMSpec {
205
262
  }
206
263
  }
207
264
 
265
+ private func performGeneration(
266
+ container: ModelContainer,
267
+ prompt: String,
268
+ toolResults: [String]?,
269
+ depth: Int,
270
+ onToken: @escaping (String) -> Void,
271
+ onToolCall: @escaping (String, String) -> Void
272
+ ) async throws -> String {
273
+ if depth >= maxToolCallDepth {
274
+ log("Max tool call depth reached (\(maxToolCallDepth))")
275
+ return ""
276
+ }
277
+
278
+ var output = ""
279
+ var pendingToolCalls: [(tool: ToolDefinition, args: [String: Any], argsJson: String)] = []
280
+
281
+ var chat: [Chat.Message] = []
282
+
283
+ if !self.systemPrompt.isEmpty {
284
+ chat.append(.system(self.systemPrompt))
285
+ }
286
+
287
+ for msg in self.messageHistory {
288
+ switch msg.role {
289
+ case "user": chat.append(.user(msg.content))
290
+ case "assistant": chat.append(.assistant(msg.content))
291
+ case "system": chat.append(.system(msg.content))
292
+ case "tool": chat.append(.tool(msg.content))
293
+ default: break
294
+ }
295
+ }
296
+
297
+ if depth == 0 {
298
+ chat.append(.user(prompt))
299
+ }
300
+
301
+ if let toolResults = toolResults {
302
+ for result in toolResults {
303
+ chat.append(.tool(result))
304
+ }
305
+ }
306
+
307
+ let userInput = UserInput(
308
+ chat: chat,
309
+ tools: !self.toolSchemas.isEmpty ? self.toolSchemas : nil
310
+ )
311
+
312
+ let lmInput = try await container.prepare(input: userInput)
313
+
314
+ let stream = try await container.perform { context in
315
+ let parameters = GenerateParameters(maxTokens: 2048, temperature: 0.7)
316
+ return try MLXLMCommon.generate(
317
+ input: lmInput,
318
+ parameters: parameters,
319
+ context: context
320
+ )
321
+ }
322
+
323
+ for await generation in stream {
324
+ if Task.isCancelled { break }
325
+
326
+ switch generation {
327
+ case .chunk(let text):
328
+ output += text
329
+ onToken(text)
330
+
331
+ case .toolCall(let toolCall):
332
+ log("Tool call detected: \(toolCall.function.name)")
333
+
334
+ guard let tool = self.tools.first(where: { $0.name == toolCall.function.name }) else {
335
+ log("Unknown tool: \(toolCall.function.name)")
336
+ continue
337
+ }
338
+
339
+ let argsDict = self.convertToolCallArguments(toolCall.function.arguments)
340
+ let argsJson = self.dictionaryToJson(argsDict)
341
+
342
+ pendingToolCalls.append((tool: tool, args: argsDict, argsJson: argsJson))
343
+ onToolCall(toolCall.function.name, argsJson)
344
+
345
+ case .info(let info):
346
+ log("Generation info: \(info.generationTokenCount) tokens, \(String(format: "%.1f", info.tokensPerSecond)) tokens/s")
347
+ }
348
+ }
349
+
350
+ if !pendingToolCalls.isEmpty {
351
+ log("Executing \(pendingToolCalls.count) tool call(s)")
352
+
353
+ var allToolResults: [String] = []
354
+
355
+ for (tool, argsDict, _) in pendingToolCalls {
356
+ do {
357
+ let argsAnyMap = self.dictionaryToAnyMap(argsDict)
358
+ let outerPromise = tool.handler(argsAnyMap)
359
+ let innerPromise = try await outerPromise.await()
360
+ let resultAnyMap = try await innerPromise.await()
361
+ let resultDict = self.anyMapToDictionary(resultAnyMap)
362
+ let resultJson = self.dictionaryToJson(resultDict)
363
+
364
+ log("Tool result for \(tool.name): \(resultJson.prefix(100))...")
365
+ allToolResults.append(resultJson)
366
+ } catch {
367
+ log("Tool execution error for \(tool.name): \(error)")
368
+ allToolResults.append("{\"error\": \"Tool execution failed\"}")
369
+ }
370
+ }
371
+
372
+ if !output.isEmpty {
373
+ self.messageHistory.append(LLMMessage(role: "assistant", content: output))
374
+ }
375
+
376
+ if depth == 0 {
377
+ self.messageHistory.append(LLMMessage(role: "user", content: prompt))
378
+ }
379
+
380
+ for result in allToolResults {
381
+ self.messageHistory.append(LLMMessage(role: "tool", content: result))
382
+ }
383
+
384
+ onToken("\u{200B}")
385
+
386
+ let continuation = try await self.performGeneration(
387
+ container: container,
388
+ prompt: prompt,
389
+ toolResults: allToolResults,
390
+ depth: depth + 1,
391
+ onToken: onToken,
392
+ onToolCall: onToolCall
393
+ )
394
+
395
+ return output + continuation
396
+ }
397
+
398
+ return output
399
+ }
400
+
401
+ private func convertToolCallArguments(_ arguments: [String: JSONValue]) -> [String: Any] {
402
+ var result: [String: Any] = [:]
403
+ for (key, value) in arguments {
404
+ result[key] = value.anyValue
405
+ }
406
+ return result
407
+ }
408
+
409
+ private func dictionaryToJson(_ dict: [String: Any]) -> String {
410
+ guard let data = try? JSONSerialization.data(withJSONObject: dict),
411
+ let json = String(data: data, encoding: .utf8) else {
412
+ return "{}"
413
+ }
414
+ return json
415
+ }
416
+
417
+ private func dictionaryToAnyMap(_ dict: [String: Any]) -> AnyMap {
418
+ let anyMap = AnyMap()
419
+ for (key, value) in dict {
420
+ switch value {
421
+ case let stringValue as String:
422
+ anyMap.setString(key: key, value: stringValue)
423
+ case let doubleValue as Double:
424
+ anyMap.setDouble(key: key, value: doubleValue)
425
+ case let intValue as Int:
426
+ anyMap.setDouble(key: key, value: Double(intValue))
427
+ case let boolValue as Bool:
428
+ anyMap.setBoolean(key: key, value: boolValue)
429
+ default:
430
+ anyMap.setString(key: key, value: String(describing: value))
431
+ }
432
+ }
433
+ return anyMap
434
+ }
435
+
436
+ private func anyMapToDictionary(_ anyMap: AnyMap) -> [String: Any] {
437
+ var dict: [String: Any] = [:]
438
+ for key in anyMap.getAllKeys() {
439
+ if anyMap.isString(key: key) {
440
+ dict[key] = anyMap.getString(key: key)
441
+ } else if anyMap.isDouble(key: key) {
442
+ dict[key] = anyMap.getDouble(key: key)
443
+ } else if anyMap.isBool(key: key) {
444
+ dict[key] = anyMap.getBoolean(key: key)
445
+ }
446
+ }
447
+ return dict
448
+ }
449
+
208
450
  func stop() throws {
209
451
  currentTask?.cancel()
210
452
  currentTask = nil
211
453
  }
212
454
 
213
455
  func unload() throws {
456
+ loadTask?.cancel()
457
+ loadTask = nil
458
+
214
459
  let memoryBefore = getMemoryUsage()
215
460
  let gpuBefore = getGPUMemoryUsage()
216
461
  log("Before unload - Host: \(memoryBefore), GPU: \(gpuBefore)")
@@ -219,6 +464,8 @@ class HybridLLM: HybridLLMSpec {
219
464
  currentTask = nil
220
465
  session = nil
221
466
  container = nil
467
+ tools = []
468
+ toolSchemas = []
222
469
  messageHistory = []
223
470
  manageHistory = false
224
471
  modelId = ""
@@ -3,4 +3,5 @@
3
3
  export { LLM } from "./llm.js";
4
4
  export { ModelManager } from "./modelManager.js";
5
5
  export { MLXModel, MLXModels, ModelFamily, ModelProvider } from "./models.js";
6
+ export { createTool } from "./tool-utils.js";
6
7
  //# sourceMappingURL=index.js.map
@@ -1 +1 @@
1
- {"version":3,"names":["LLM","ModelManager","MLXModel","MLXModels","ModelFamily","ModelProvider"],"sourceRoot":"../../src","sources":["index.ts"],"mappings":";;AAAA,SAASA,GAAG,QAAsB,UAAO;AACzC,SAASC,YAAY,QAAQ,mBAAgB;AAC7C,SACEC,QAAQ,EACRC,SAAS,EACTC,WAAW,EAEXC,aAAa,QAER,aAAU","ignoreList":[]}
1
+ {"version":3,"names":["LLM","ModelManager","MLXModel","MLXModels","ModelFamily","ModelProvider","createTool"],"sourceRoot":"../../src","sources":["index.ts"],"mappings":";;AAAA,SAASA,GAAG,QAA8D,UAAO;AACjF,SAASC,YAAY,QAAQ,mBAAgB;AAC7C,SACEC,QAAQ,EACRC,SAAS,EACTC,WAAW,EAEXC,aAAa,QAER,aAAU;AAUjB,SAASC,UAAU,QAAqC,iBAAc","ignoreList":[]}
package/lib/module/llm.js CHANGED
@@ -50,13 +50,43 @@ export const LLM = {
50
50
  return getInstance().generate(prompt);
51
51
  },
52
52
  /**
53
- * Stream a response token by token.
53
+ * Stream a response token by token with optional tool calling support.
54
+ * Tools must be provided when loading the model via `load()` options.
55
+ * Tools are automatically executed when the model calls them.
54
56
  * @param prompt - The input text to generate a response for
55
57
  * @param onToken - Callback invoked for each generated token
58
+ * @param onToolCall - Optional callback invoked when a tool is called.
59
+ * Receives the current tool call and an accumulated array of all tool calls so far.
56
60
  * @returns The complete generated text
57
61
  */
58
- stream(prompt, onToken) {
59
- return getInstance().stream(prompt, onToken);
62
+ stream(prompt, onToken, onToolCall) {
63
+ const accumulatedToolCalls = [];
64
+ return getInstance().stream(prompt, onToken, (name, argsJson) => {
65
+ if (onToolCall) {
66
+ try {
67
+ const args = JSON.parse(argsJson);
68
+ const toolCall = {
69
+ name,
70
+ arguments: args
71
+ };
72
+ accumulatedToolCalls.push(toolCall);
73
+ onToolCall({
74
+ toolCall,
75
+ allToolCalls: [...accumulatedToolCalls]
76
+ });
77
+ } catch {
78
+ const toolCall = {
79
+ name,
80
+ arguments: {}
81
+ };
82
+ accumulatedToolCalls.push(toolCall);
83
+ onToolCall({
84
+ toolCall,
85
+ allToolCalls: [...accumulatedToolCalls]
86
+ });
87
+ }
88
+ }
89
+ });
60
90
  },
61
91
  /**
62
92
  * Stop the current generation. Safe to call even if not generating.
@@ -1 +1 @@
1
- {"version":3,"names":["NitroModules","instance","getInstance","createHybridObject","LLM","load","modelId","options","generate","prompt","stream","onToken","stop","unload","getLastGenerationStats","getHistory","clearHistory","isLoaded","isGenerating","debug","value","systemPrompt"],"sourceRoot":"../../src","sources":["llm.ts"],"mappings":";;AAAA,SAASA,YAAY,QAAQ,4BAA4B;AAGzD,IAAIC,QAAwB,GAAG,IAAI;AAOnC,SAASC,WAAWA,CAAA,EAAY;EAC9B,IAAI,CAACD,QAAQ,EAAE;IACbA,QAAQ,GAAGD,YAAY,CAACG,kBAAkB,CAAU,KAAK,CAAC;EAC5D;EACA,OAAOF,QAAQ;AACjB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,MAAMG,GAAG,GAAG;EACjB;AACF;AACA;AACA;AACA;EACEC,IAAIA,CAACC,OAAe,EAAEC,OAAuB,EAAiB;IAC5D,OAAOL,WAAW,CAAC,CAAC,CAACG,IAAI,CAACC,OAAO,EAAEC,OAAO,CAAC;EAC7C,CAAC;EAED;AACF;AACA;AACA;AACA;AACA;EACEC,QAAQA,CAACC,MAAc,EAAmB;IACxC,OAAOP,WAAW,CAAC,CAAC,CAACM,QAAQ,CAACC,MAAM,CAAC;EACvC,CAAC;EAED;AACF;AACA;AACA;AACA;AACA;EACEC,MAAMA,CAACD,MAAc,EAAEE,OAAgC,EAAmB;IACxE,OAAOT,WAAW,CAAC,CAAC,CAACQ,MAAM,CAACD,MAAM,EAAEE,OAAO,CAAC;EAC9C,CAAC;EAED;AACF;AACA;EACEC,IAAIA,CAAA,EAAS;IACXV,WAAW,CAAC,CAAC,CAACU,IAAI,CAAC,CAAC;EACtB,CAAC;EAED;AACF;AACA;AACA;EACEC,MAAMA,CAAA,EAAS;IACbX,WAAW,CAAC,CAAC,CAACW,MAAM,CAAC,CAAC;EACxB,CAAC;EAED;AACF;AACA;AACA;EACEC,sBAAsBA,CAAA,EAAoB;IACxC,OAAOZ,WAAW,CAAC,CAAC,CAACY,sBAAsB,CAAC,CAAC;EAC/C,CAAC;EAED;AACF;AACA;AACA;EACEC,UAAUA,CAAA,EAAc;IACtB,OAAOb,WAAW,CAAC,CAAC,CAACa,UAAU,CAAC,CAAC;EACnC,CAAC;EAED;AACF;AACA;EACEC,YAAYA,CAAA,EAAS;IACnBd,WAAW,CAAC,CAAC,CAACc,YAAY,CAAC,CAAC;EAC9B,CAAC;EAED;EACA,IAAIC,QAAQA,CAAA,EAAY;IACtB,OAAOf,WAAW,CAAC,CAAC,CAACe,QAAQ;EAC/B,CAAC;EAED;EACA,IAAIC,YAAYA,CAAA,EAAY;IAC1B,OAAOhB,WAAW,CAAC,CAAC,CAACgB,YAAY;EACnC,CAAC;EAED;EACA,IAAIZ,OAAOA,CAAA,EAAW;IACpB,OAAOJ,WAAW,CAAC,CAAC,CAACI,OAAO;EAC9B,CAAC;EAED;EACA,IAAIa,KAAKA,CAAA,EAAY;IACnB,OAAOjB,WAAW,CAAC,CAAC,CAACiB,KAAK;EAC5B,CAAC;EAED,IAAIA,KAAKA,CAACC,KAAc,EAAE;IACxBlB,WAAW,CAAC,CAAC,CAACiB,KAAK,GAAGC,KAAK;EAC7B,CAAC;EAED;AACF;AACA;AACA;AACA;EACE,IAAIC,YAAYA,CAAA,EAAW;IACzB,OAAOnB,WAAW,CAAC,CAAC,CAACmB,YAAY;EACnC,CAAC;EAED,IAAIA,YAAYA,CAACD,KAAa,EAAE;IAC9BlB,WAAW,CAAC,CAAC,CAACmB,YAAY,GAAGD,KAAK;EACpC;AACF,CAAC","ignoreList":[]}
1
+ {"version":3,"names":["NitroModules","instance","getInstance","createHybridObject","LLM","load","modelId","options","generate","prompt","stream","onToken","onToolCall","accumulatedToolCalls","name","argsJson","args","JSON","parse","toolCall","arguments","push","allToolCalls","stop","unload","getLastGenerationStats","getHistory","clearHistory","isLoaded","isGenerating","debug","value","systemPrompt"],"sourceRoot":"../../src","sources":["llm.ts"],"mappings":";;AAAA,SAASA,YAAY,QAAQ,4BAA4B;AAGzD,IAAIC,QAAwB,GAAG,IAAI;AAiBnC,SAASC,WAAWA,CAAA,EAAY;EAC9B,IAAI,CAACD,QAAQ,EAAE;IACbA,QAAQ,GAAGD,YAAY,CAACG,kBAAkB,CAAU,KAAK,CAAC;EAC5D;EACA,OAAOF,QAAQ;AACjB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,MAAMG,GAAG,GAAG;EACjB;AACF;AACA;AACA;AACA;EACEC,IAAIA,CAACC,OAAe,EAAEC,OAAuB,EAAiB;IAC5D,OAAOL,WAAW,CAAC,CAAC,CAACG,IAAI,CAACC,OAAO,EAAEC,OAAO,CAAC;EAC7C,CAAC;EAED;AACF;AACA;AACA;AACA;AACA;EACEC,QAAQA,CAACC,MAAc,EAAmB;IACxC,OAAOP,WAAW,CAAC,CAAC,CAACM,QAAQ,CAACC,MAAM,CAAC;EACvC,CAAC;EAED;AACF;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;EACEC,MAAMA,CACJD,MAAc,EACdE,OAAgC,EAChCC,UAA6C,EAC5B;IACjB,MAAMC,oBAAoC,GAAG,EAAE;IAE/C,OAAOX,WAAW,CAAC,CAAC,CAACQ,MAAM,CAACD,MAAM,EAAEE,OAAO,EAAE,CAACG,IAAY,EAAEC,QAAgB,KAAK;MAC/E,IAAIH,UAAU,EAAE;QACd,IAAI;UACF,MAAMI,IAAI,GAAGC,IAAI,CAACC,KAAK,CAACH,QAAQ,CAA4B;UAC5D,MAAMI,QAAQ,GAAG;YAAEL,IAAI;YAAEM,SAAS,EAAEJ;UAAK,CAAC;UAC1CH,oBAAoB,CAACQ,IAAI,CAACF,QAAQ,CAAC;UACnCP,UAAU,CAAC;YACTO,QAAQ;YACRG,YAAY,EAAE,CAAC,GAAGT,oBAAoB;UACxC,CAAC,CAAC;QACJ,CAAC,CAAC,MAAM;UACN,MAAMM,QAAQ,GAAG;YAAEL,IAAI;YAAEM,SAAS,EAAE,CAAC;UAAE,CAAC;UACxCP,oBAAoB,CAACQ,IAAI,CAACF,QAAQ,CAAC;UACnCP,UAAU,CAAC;YACTO,QAAQ;YACRG,YAAY,EAAE,CAAC,GAAGT,oBAAoB;UACxC,CAAC,CAAC;QACJ;MACF;IACF,CAAC,CAAC;EACJ,CAAC;EAED;AACF;AACA;EACEU,IAAIA,CAAA,EAAS;IACXrB,WAAW,CAAC,CAAC,CAACqB,IAAI,CAAC,CAAC;EACtB,CAAC;EAED;AACF;AACA;AACA;EACEC,MAAMA,CAAA,EAAS;IACbtB,WAAW,CAAC,CAAC,CAACsB,MAAM,CAAC,CAAC;EACxB,CAAC;EAED;AACF;AACA;AACA;EACEC,sBAAsBA,CAAA,EAAoB;IACxC,OAAOvB,WAAW,CAAC,CAAC,CAACuB,sBAAsB,CAAC,CAAC;EAC/C,CAAC;EAED;AACF;AACA;AACA;EACEC,UAAUA,CAAA,EAAc;IACtB,OAAOxB,WAAW,CAAC,CAAC,CAACwB,UAAU,CAAC,CAAC;EACnC,CAAC;EAED;AACF;AACA;EACEC,YAAYA,CAAA,EAAS;IACnBzB,WAAW,CAAC,CAAC,CAACyB,YAAY,CAAC,CAAC;EAC9B,CAAC;EAED;EACA,IAAIC,QAAQA,CAAA,EAAY;IACtB,OAAO1B,WAAW,CAAC,CAAC,CAAC0B,QAAQ;EAC/B,CAAC;EAED;EACA,IAAIC,YAAYA,CAAA,EAAY;IAC1B,OAAO3B,WAAW,CAAC,CAAC,CAAC2B,YAAY;EACnC,CAAC;EAED;EACA,IAAIvB,OAAOA,CAAA,EAAW;IACpB,OAAOJ,WAAW,CAAC,CAAC,CAACI,OAAO;EAC9B,CAAC;EAED;EACA,IAAIwB,KAAKA,CAAA,EAAY;IACnB,OAAO5B,WAAW,CAAC,CAAC,CAAC4B,KAAK;EAC5B,CAAC;EAED,IAAIA,KAAKA,CAACC,KAAc,EAAE;IACxB7B,WAAW,CAAC,CAAC,CAAC4B,KAAK,GAAGC,KAAK;EAC7B,CAAC;EAED;AACF;AACA;AACA;AACA;EACE,IAAIC,YAAYA,CAAA,EAAW;IACzB,OAAO9B,WAAW,CAAC,CAAC,CAAC8B,YAAY;EACnC,CAAC;EAED,IAAIA,YAAYA,CAACD,KAAa,EAAE;IAC9B7B,WAAW,CAAC,CAAC,CAAC8B,YAAY,GAAGD,KAAK;EACpC;AACF,CAAC","ignoreList":[]}
@@ -0,0 +1,56 @@
1
+ "use strict";
2
+
3
+ function getZodTypeString(zodType) {
4
+ const typeName = zodType._zod.def.type;
5
+ switch (typeName) {
6
+ case 'string':
7
+ return 'string';
8
+ case 'number':
9
+ case 'int':
10
+ return 'number';
11
+ case 'boolean':
12
+ return 'boolean';
13
+ case 'array':
14
+ return 'array';
15
+ case 'object':
16
+ return 'object';
17
+ case 'optional':
18
+ return getZodTypeString(zodType._zod.def.innerType);
19
+ case 'default':
20
+ return getZodTypeString(zodType._zod.def.innerType);
21
+ default:
22
+ return 'string';
23
+ }
24
+ }
25
+ function isZodOptional(zodType) {
26
+ const typeName = zodType._zod.def.type;
27
+ return typeName === 'optional' || typeName === 'default';
28
+ }
29
+ function zodSchemaToParameters(schema) {
30
+ const shape = schema._zod.def.shape;
31
+ const parameters = [];
32
+ for (const [key, zodType] of Object.entries(shape)) {
33
+ const zType = zodType;
34
+ parameters.push({
35
+ name: key,
36
+ type: getZodTypeString(zType),
37
+ description: zType.description ?? '',
38
+ required: !isZodOptional(zType)
39
+ });
40
+ }
41
+ return parameters;
42
+ }
43
+ export function createTool(definition) {
44
+ return {
45
+ name: definition.name,
46
+ description: definition.description,
47
+ parameters: zodSchemaToParameters(definition.arguments),
48
+ handler: async args => {
49
+ const argsObj = args;
50
+ const parsedArgs = definition.arguments.parse(argsObj);
51
+ const result = await definition.handler(parsedArgs);
52
+ return result;
53
+ }
54
+ };
55
+ }
56
+ //# sourceMappingURL=tool-utils.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"names":["getZodTypeString","zodType","typeName","_zod","def","type","innerType","isZodOptional","zodSchemaToParameters","schema","shape","parameters","key","Object","entries","zType","push","name","description","required","createTool","definition","arguments","handler","args","argsObj","parsedArgs","parse","result"],"sourceRoot":"../../src","sources":["tool-utils.ts"],"mappings":";;AAcA,SAASA,gBAAgBA,CAACC,OAAkB,EAAqB;EAC/D,MAAMC,QAAQ,GAAGD,OAAO,CAACE,IAAI,CAACC,GAAG,CAACC,IAAI;EACtC,QAAQH,QAAQ;IACd,KAAK,QAAQ;MACX,OAAO,QAAQ;IACjB,KAAK,QAAQ;IACb,KAAK,KAAK;MACR,OAAO,QAAQ;IACjB,KAAK,SAAS;MACZ,OAAO,SAAS;IAClB,KAAK,OAAO;MACV,OAAO,OAAO;IAChB,KAAK,QAAQ;MACX,OAAO,QAAQ;IACjB,KAAK,UAAU;MACb,OAAOF,gBAAgB,CAAEC,OAAO,CAA8BE,IAAI,CAACC,GAAG,CAACE,SAAS,CAAC;IACnF,KAAK,SAAS;MACZ,OAAON,gBAAgB,CAAEC,OAAO,CAA6BE,IAAI,CAACC,GAAG,CAACE,SAAS,CAAC;IAClF;MACE,OAAO,QAAQ;EACnB;AACF;AAEA,SAASC,aAAaA,CAACN,OAAkB,EAAW;EAClD,MAAMC,QAAQ,GAAGD,OAAO,CAACE,IAAI,CAACC,GAAG,CAACC,IAAI;EACtC,OAAOH,QAAQ,KAAK,UAAU,IAAIA,QAAQ,KAAK,SAAS;AAC1D;AAEA,SAASM,qBAAqBA,CAACC,MAAuB,EAAmB;EACvE,MAAMC,KAAK,GAAGD,MAAM,CAACN,IAAI,CAACC,GAAG,CAACM,KAAK;EACnC,MAAMC,UAA2B,GAAG,EAAE;EAEtC,KAAK,MAAM,CAACC,GAAG,EAAEX,OAAO,CAAC,IAAIY,MAAM,CAACC,OAAO,CAACJ,KAAK,CAAC,EAAE;IAClD,MAAMK,KAAK,GAAGd,OAAoB;IAClCU,UAAU,CAACK,IAAI,CAAC;MACdC,IAAI,EAAEL,GAAG;MACTP,IAAI,EAAEL,gBAAgB,CAACe,KAAK,CAAC;MAC7BG,WAAW,EAAEH,KAAK,CAACG,WAAW,IAAI,EAAE;MACpCC,QAAQ,EAAE,CAACZ,aAAa,CAACQ,KAAK;IAChC,CAAC,CAAC;EACJ;EAEA,OAAOJ,UAAU;AACnB;AAEA,OAAO,SAASS,UAAUA,CACxBC,UAAqC,EACrB;EAChB,OAAO;IACLJ,IAAI,EAAEI,UAAU,CAACJ,IAAI;IACrBC,WAAW,EAAEG,UAAU,CAACH,WAAW;IACnCP,UAAU,EAAEH,qBAAqB,CAACa,UAAU,CAACC,SAAS,CAAC;IACvDC,OAAO,EAAE,MAAOC,IAAY,IAAK;MAC/B,MAAMC,OAAO,GAAGD,IAA0C;MAC1D,MAAME,UAAU,GAAGL,UAAU,CAACC,SAAS,CAACK,KAAK,CAACF,OAAO,CAAC;MACtD,MAAMG,MAAM,GAAG,MAAMP,UAAU,CAACE,OAAO,CAACG,UAAU,CAAC;MACnD,OAAOE,MAAM;IACf;EACF,CAAC;AACH","ignoreList":[]}
@@ -1,6 +1,7 @@
1
- export { LLM, type Message } from './llm';
1
+ export { LLM, type Message, type ToolCallInfo, type ToolCallUpdate } from './llm';
2
2
  export { ModelManager } from './modelManager';
3
3
  export { MLXModel, MLXModels, ModelFamily, type ModelInfo, ModelProvider, type ModelQuantization, } from './models';
4
- export type { GenerationStats, LLM as LLMSpec, LLMLoadOptions } from './specs/LLM.nitro';
4
+ export type { GenerationStats, LLM as LLMSpec, LLMLoadOptions, ToolDefinition, ToolParameter, ToolParameterType, } from './specs/LLM.nitro';
5
5
  export type { ModelManager as ModelManagerSpec } from './specs/ModelManager.nitro';
6
+ export { createTool, type TypeSafeToolDefinition } from './tool-utils';
6
7
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,KAAK,OAAO,EAAE,MAAM,OAAO,CAAA;AACzC,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAA;AAC7C,OAAO,EACL,QAAQ,EACR,SAAS,EACT,WAAW,EACX,KAAK,SAAS,EACd,aAAa,EACb,KAAK,iBAAiB,GACvB,MAAM,UAAU,CAAA;AAEjB,YAAY,EAAE,eAAe,EAAE,GAAG,IAAI,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAA;AACxF,YAAY,EAAE,YAAY,IAAI,gBAAgB,EAAE,MAAM,4BAA4B,CAAA"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,KAAK,OAAO,EAAE,KAAK,YAAY,EAAE,KAAK,cAAc,EAAE,MAAM,OAAO,CAAA;AACjF,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAA;AAC7C,OAAO,EACL,QAAQ,EACR,SAAS,EACT,WAAW,EACX,KAAK,SAAS,EACd,aAAa,EACb,KAAK,iBAAiB,GACvB,MAAM,UAAU,CAAA;AACjB,YAAY,EACV,eAAe,EACf,GAAG,IAAI,OAAO,EACd,cAAc,EACd,cAAc,EACd,aAAa,EACb,iBAAiB,GAClB,MAAM,mBAAmB,CAAA;AAC1B,YAAY,EAAE,YAAY,IAAI,gBAAgB,EAAE,MAAM,4BAA4B,CAAA;AAClF,OAAO,EAAE,UAAU,EAAE,KAAK,sBAAsB,EAAE,MAAM,cAAc,CAAA"}
@@ -3,6 +3,14 @@ export type Message = {
3
3
  role: 'user' | 'assistant' | 'system';
4
4
  content: string;
5
5
  };
6
+ export type ToolCallInfo = {
7
+ name: string;
8
+ arguments: Record<string, unknown>;
9
+ };
10
+ export type ToolCallUpdate = {
11
+ toolCall: ToolCallInfo;
12
+ allToolCalls: ToolCallInfo[];
13
+ };
6
14
  /**
7
15
  * LLM text generation using MLX on Apple Silicon.
8
16
  *
@@ -40,12 +48,16 @@ export declare const LLM: {
40
48
  */
41
49
  generate(prompt: string): Promise<string>;
42
50
  /**
43
- * Stream a response token by token.
51
+ * Stream a response token by token with optional tool calling support.
52
+ * Tools must be provided when loading the model via `load()` options.
53
+ * Tools are automatically executed when the model calls them.
44
54
  * @param prompt - The input text to generate a response for
45
55
  * @param onToken - Callback invoked for each generated token
56
+ * @param onToolCall - Optional callback invoked when a tool is called.
57
+ * Receives the current tool call and an accumulated array of all tool calls so far.
46
58
  * @returns The complete generated text
47
59
  */
48
- stream(prompt: string, onToken: (token: string) => void): Promise<string>;
60
+ stream(prompt: string, onToken: (token: string) => void, onToolCall?: (update: ToolCallUpdate) => void): Promise<string>;
49
61
  /**
50
62
  * Stop the current generation. Safe to call even if not generating.
51
63
  */
@@ -1 +1 @@
1
- {"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../../../src/llm.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,eAAe,EAAE,cAAc,EAAkB,MAAM,mBAAmB,CAAA;AAIxF,MAAM,MAAM,OAAO,GAAG;IACpB,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,QAAQ,CAAA;IACrC,OAAO,EAAE,MAAM,CAAA;CAChB,CAAA;AASD;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,eAAO,MAAM,GAAG;IACd;;;;OAIG;kBACW,MAAM,WAAW,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;IAI7D;;;;;OAKG;qBACc,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAIzC;;;;;OAKG;mBACY,MAAM,WAAW,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC;IAIzE;;OAEG;YACK,IAAI;IAIZ;;;OAGG;cACO,IAAI;IAId;;;OAGG;8BACuB,eAAe;IAIzC;;;OAGG;kBACW,OAAO,EAAE;IAIvB;;OAEG;oBACa,IAAI;IAIpB,mEAAmE;uBACnD,OAAO;IAIvB,gDAAgD;2BAC5B,OAAO;IAI3B,oEAAoE;sBACrD,MAAM;IAIrB,sCAAsC;WACzB,OAAO;IAQpB;;;;OAIG;kBACiB,MAAM;CAO3B,CAAA"}
1
+ {"version":3,"file":"llm.d.ts","sourceRoot":"","sources":["../../../src/llm.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,eAAe,EAAE,cAAc,EAAkB,MAAM,mBAAmB,CAAA;AAIxF,MAAM,MAAM,OAAO,GAAG;IACpB,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,QAAQ,CAAA;IACrC,OAAO,EAAE,MAAM,CAAA;CAChB,CAAA;AAED,MAAM,MAAM,YAAY,GAAG;IACzB,IAAI,EAAE,MAAM,CAAA;IACZ,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;CACnC,CAAA;AAED,MAAM,MAAM,cAAc,GAAG;IAC3B,QAAQ,EAAE,YAAY,CAAA;IACtB,YAAY,EAAE,YAAY,EAAE,CAAA;CAC7B,CAAA;AASD;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,eAAO,MAAM,GAAG;IACd;;;;OAIG;kBACW,MAAM,WAAW,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;IAI7D;;;;;OAKG;qBACc,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAIzC;;;;;;;;;OASG;mBAEO,MAAM,WACL,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,eACnB,CAAC,MAAM,EAAE,cAAc,KAAK,IAAI,GAC5C,OAAO,CAAC,MAAM,CAAC;IAyBlB;;OAEG;YACK,IAAI;IAIZ;;;OAGG;cACO,IAAI;IAId;;;OAGG;8BACuB,eAAe;IAIzC;;;OAGG;kBACW,OAAO,EAAE;IAIvB;;OAEG;oBACa,IAAI;IAIpB,mEAAmE;uBACnD,OAAO;IAIvB,gDAAgD;2BAC5B,OAAO;IAI3B,oEAAoE;sBACrD,MAAM;IAIrB,sCAAsC;WACzB,OAAO;IAQpB;;;;OAIG;kBACiB,MAAM;CAO3B,CAAA"}