@runanywhere/llamacpp 0.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/RunAnywhereLlama.podspec +131 -0
  2. package/android/CMakeLists.txt +105 -0
  3. package/android/build.gradle +288 -0
  4. package/android/src/main/AndroidManifest.xml +3 -0
  5. package/android/src/main/cpp/cpp-adapter.cpp +14 -0
  6. package/android/src/main/java/com/margelo/nitro/runanywhere/llama/RunAnywhereLlamaPackage.kt +35 -0
  7. package/android/src/main/jniLibs/arm64-v8a/librac_backend_llamacpp.so +0 -0
  8. package/android/src/main/jniLibs/arm64-v8a/librac_backend_llamacpp_jni.so +0 -0
  9. package/android/src/main/jniLibs/arm64-v8a/librunanywhere_llamacpp.so +0 -0
  10. package/cpp/HybridRunAnywhereLlama.cpp +346 -0
  11. package/cpp/HybridRunAnywhereLlama.hpp +107 -0
  12. package/cpp/bridges/LLMBridge.cpp +209 -0
  13. package/cpp/bridges/LLMBridge.hpp +109 -0
  14. package/cpp/bridges/StructuredOutputBridge.cpp +151 -0
  15. package/cpp/bridges/StructuredOutputBridge.hpp +66 -0
  16. package/cpp/rac_llm_llamacpp.h +34 -0
  17. package/ios/.testlocal +0 -0
  18. package/ios/Frameworks/RABackendLLAMACPP.xcframework/Info.plist +44 -0
  19. package/ios/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64/RABackendLLAMACPP.framework/Headers/RABackendLLAMACPP.h +2 -0
  20. package/ios/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64/RABackendLLAMACPP.framework/Info.plist +11 -0
  21. package/ios/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64/RABackendLLAMACPP.framework/Modules/module.modulemap +5 -0
  22. package/ios/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64/RABackendLLAMACPP.framework/RABackendLLAMACPP +0 -0
  23. package/ios/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64_x86_64-simulator/RABackendLLAMACPP.framework/Headers/RABackendLLAMACPP.h +2 -0
  24. package/ios/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64_x86_64-simulator/RABackendLLAMACPP.framework/Info.plist +11 -0
  25. package/ios/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64_x86_64-simulator/RABackendLLAMACPP.framework/Modules/module.modulemap +5 -0
  26. package/ios/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64_x86_64-simulator/RABackendLLAMACPP.framework/RABackendLLAMACPP +0 -0
  27. package/ios/LlamaCPPBackend.podspec +127 -0
  28. package/nitro.json +16 -0
  29. package/nitrogen/generated/.gitattributes +1 -0
  30. package/nitrogen/generated/android/kotlin/com/margelo/nitro/runanywhere/llama/runanywherellamaOnLoad.kt +35 -0
  31. package/nitrogen/generated/android/runanywherellama+autolinking.cmake +81 -0
  32. package/nitrogen/generated/android/runanywherellama+autolinking.gradle +27 -0
  33. package/nitrogen/generated/android/runanywherellamaOnLoad.cpp +44 -0
  34. package/nitrogen/generated/android/runanywherellamaOnLoad.hpp +25 -0
  35. package/nitrogen/generated/ios/RunAnywhereLlama+autolinking.rb +60 -0
  36. package/nitrogen/generated/ios/RunAnywhereLlama-Swift-Cxx-Bridge.cpp +17 -0
  37. package/nitrogen/generated/ios/RunAnywhereLlama-Swift-Cxx-Bridge.hpp +27 -0
  38. package/nitrogen/generated/ios/RunAnywhereLlama-Swift-Cxx-Umbrella.hpp +38 -0
  39. package/nitrogen/generated/ios/RunAnywhereLlamaAutolinking.mm +35 -0
  40. package/nitrogen/generated/ios/RunAnywhereLlamaAutolinking.swift +12 -0
  41. package/nitrogen/generated/shared/c++/HybridRunAnywhereLlamaSpec.cpp +33 -0
  42. package/nitrogen/generated/shared/c++/HybridRunAnywhereLlamaSpec.hpp +77 -0
  43. package/package.json +60 -0
  44. package/react-native.config.js +14 -0
  45. package/src/LlamaCPP.ts +206 -0
  46. package/src/LlamaCppProvider.ts +120 -0
  47. package/src/index.ts +59 -0
  48. package/src/native/NativeRunAnywhereLlama.ts +58 -0
  49. package/src/native/index.ts +11 -0
  50. package/src/specs/RunAnywhereLlama.nitro.ts +160 -0
@@ -0,0 +1,109 @@
1
+ /**
2
+ * @file LLMBridge.hpp
3
+ * @brief LLM capability bridge for React Native
4
+ *
5
+ * Matches Swift's CppBridge+LLM.swift pattern, providing:
6
+ * - Model lifecycle (load/unload)
7
+ * - Text generation (sync and streaming)
8
+ * - Cancellation support
9
+ *
10
+ * Aligned with rac_llm_component.h and rac_llm_types.h API.
11
+ * RACommons is REQUIRED - no stub implementations.
12
+ */
13
+
14
+ #pragma once
15
+
16
+ #include <functional>
17
+ #include <memory>
18
+ #include <string>
19
+
20
+ // RACommons LLM headers - REQUIRED (flat include paths)
21
+ #include "rac_llm_component.h"
22
+ #include "rac_llm_types.h"
23
+
24
+ namespace runanywhere {
25
+ namespace bridges {
26
+
27
+ /**
28
+ * @brief LLM streaming callbacks
29
+ */
30
+ struct LLMStreamCallbacks {
31
+ std::function<bool(const std::string&)> onToken;
32
+ std::function<void(const std::string&, int, double)> onComplete;
33
+ std::function<void(int, const std::string&)> onError;
34
+ };
35
+
36
+ /**
37
+ * @brief LLM generation options
38
+ */
39
+ struct LLMOptions {
40
+ int maxTokens = 512;
41
+ double temperature = 0.7;
42
+ double topP = 0.9;
43
+ int topK = 40;
44
+ std::string systemPrompt;
45
+ std::string stopSequence;
46
+ };
47
+
48
+ /**
49
+ * @brief LLM generation result
50
+ */
51
+ struct LLMResult {
52
+ std::string text;
53
+ int tokenCount = 0;
54
+ double durationMs = 0.0;
55
+ bool cancelled = false;
56
+ };
57
+
58
+ /**
59
+ * @brief LLM capability bridge singleton
60
+ *
61
+ * Matches CppBridge+LLM.swift API.
62
+ * NOTE: RACommons is REQUIRED. All methods will throw std::runtime_error if
63
+ * the underlying C API calls fail.
64
+ */
65
+ class LLMBridge {
66
+ public:
67
+ static LLMBridge& shared();
68
+
69
+ // Lifecycle
70
+ bool isLoaded() const;
71
+ std::string currentModelId() const;
72
+ /**
73
+ * Load an LLM model
74
+ * @param modelPath Path to the model file (.gguf)
75
+ * @param modelId Model identifier for telemetry (e.g., "smollm2-360m-q8_0")
76
+ * @param modelName Human-readable model name (e.g., "SmolLM2 360M Q8_0")
77
+ * @return RAC_SUCCESS or error code
78
+ */
79
+ rac_result_t loadModel(const std::string& modelPath,
80
+ const std::string& modelId = "",
81
+ const std::string& modelName = "");
82
+ rac_result_t unload();
83
+ void cleanup();
84
+ void cancel();
85
+ void destroy();
86
+
87
+ // Generation
88
+ LLMResult generate(const std::string& prompt, const LLMOptions& options);
89
+ void generateStream(const std::string& prompt, const LLMOptions& options,
90
+ const LLMStreamCallbacks& callbacks);
91
+
92
+ // State
93
+ rac_lifecycle_state_t getState() const;
94
+
95
+ private:
96
+ LLMBridge();
97
+ ~LLMBridge();
98
+
99
+ // Disable copy/move
100
+ LLMBridge(const LLMBridge&) = delete;
101
+ LLMBridge& operator=(const LLMBridge&) = delete;
102
+
103
+ rac_handle_t handle_ = nullptr;
104
+ std::string loadedModelId_;
105
+ bool cancellationRequested_ = false;
106
+ };
107
+
108
+ } // namespace bridges
109
+ } // namespace runanywhere
@@ -0,0 +1,151 @@
1
+ /**
2
+ * @file StructuredOutputBridge.cpp
3
+ * @brief Structured Output bridge implementation
4
+ *
5
+ * Uses RACommons structured output API for prompt preparation and JSON extraction.
6
+ * Uses LLMBridge for actual text generation.
7
+ * RACommons is REQUIRED - no stub implementations.
8
+ */
9
+
10
+ #include "StructuredOutputBridge.hpp"
11
+ #include "LLMBridge.hpp"
12
+ #include <stdexcept>
13
+ #include <cstdlib> // For free()
14
+
15
+ // Unified logging via rac_logger.h
16
+ #include "rac_logger.h"
17
+
18
+ // Log category for this module
19
+ #define LOG_CATEGORY "LLM.StructuredOutput"
20
+
21
+ namespace runanywhere {
22
+ namespace bridges {
23
+
24
+ StructuredOutputBridge& StructuredOutputBridge::shared() {
25
+ static StructuredOutputBridge instance;
26
+ return instance;
27
+ }
28
+
29
+ StructuredOutputResult StructuredOutputBridge::generate(
30
+ const std::string& prompt,
31
+ const std::string& schema,
32
+ const std::string& optionsJson
33
+ ) {
34
+ StructuredOutputResult result;
35
+
36
+ if (!LLMBridge::shared().isLoaded()) {
37
+ throw std::runtime_error("StructuredOutputBridge: LLM model not loaded. Call loadModel() first.");
38
+ }
39
+
40
+ // Prepare the prompt using RACommons structured output API
41
+ rac_structured_output_config_t config = RAC_STRUCTURED_OUTPUT_DEFAULT;
42
+ config.json_schema = schema.c_str();
43
+ config.include_schema_in_prompt = RAC_TRUE;
44
+
45
+ char* preparedPrompt = nullptr;
46
+ rac_result_t prepResult = rac_structured_output_prepare_prompt(
47
+ prompt.c_str(),
48
+ &config,
49
+ &preparedPrompt
50
+ );
51
+
52
+ std::string structuredPrompt;
53
+ if (prepResult == RAC_SUCCESS && preparedPrompt) {
54
+ structuredPrompt = preparedPrompt;
55
+ free(preparedPrompt);
56
+ } else {
57
+ // Fallback: Build prompt manually
58
+ RAC_LOG_DEBUG(LOG_CATEGORY, "Fallback to manual prompt preparation");
59
+ structuredPrompt =
60
+ "You must respond with valid JSON matching this schema:\n" +
61
+ schema + "\n\n" +
62
+ "User request: " + prompt + "\n\n" +
63
+ "Respond with valid JSON only, no other text:";
64
+ }
65
+
66
+ // Generate using LLMBridge
67
+ LLMOptions opts;
68
+ opts.maxTokens = 1024;
69
+ opts.temperature = 0.1; // Lower temperature for structured output
70
+ // TODO: Parse optionsJson if provided
71
+
72
+ LLMResult llmResult;
73
+ try {
74
+ llmResult = LLMBridge::shared().generate(structuredPrompt, opts);
75
+ } catch (const std::runtime_error& e) {
76
+ throw std::runtime_error("StructuredOutputBridge: LLM generation failed: " + std::string(e.what()));
77
+ }
78
+
79
+ if (llmResult.text.empty()) {
80
+ throw std::runtime_error("StructuredOutputBridge: LLM generation returned empty text.");
81
+ }
82
+
83
+ // Extract JSON using RACommons API
84
+ char* extractedJson = nullptr;
85
+ size_t jsonLength = 0;
86
+ rac_result_t extractResult = rac_structured_output_extract_json(
87
+ llmResult.text.c_str(),
88
+ &extractedJson,
89
+ &jsonLength
90
+ );
91
+
92
+ if (extractResult == RAC_SUCCESS && extractedJson && jsonLength > 0) {
93
+ result.json = std::string(extractedJson, jsonLength);
94
+ result.success = true;
95
+ free(extractedJson);
96
+ RAC_LOG_INFO(LOG_CATEGORY, "Successfully extracted JSON (%zu bytes)", jsonLength);
97
+ } else {
98
+ // Fallback: Try manual extraction
99
+ RAC_LOG_DEBUG(LOG_CATEGORY, "Fallback to manual JSON extraction");
100
+
101
+ std::string text = llmResult.text;
102
+ size_t start = 0, end = 0;
103
+
104
+ // Try using RACommons to find JSON boundaries
105
+ if (rac_structured_output_find_complete_json(text.c_str(), &start, &end) == RAC_TRUE) {
106
+ result.json = text.substr(start, end - start);
107
+ result.success = true;
108
+ } else {
109
+ // Manual fallback
110
+ start = text.find('{');
111
+ end = text.rfind('}');
112
+
113
+ if (start != std::string::npos && end != std::string::npos && end > start) {
114
+ result.json = text.substr(start, end - start + 1);
115
+ result.success = true;
116
+ } else {
117
+ // Try array
118
+ start = text.find('[');
119
+ end = text.rfind(']');
120
+ if (start != std::string::npos && end != std::string::npos && end > start) {
121
+ result.json = text.substr(start, end - start + 1);
122
+ result.success = true;
123
+ } else {
124
+ throw std::runtime_error("StructuredOutputBridge: Could not extract valid JSON from response: " + text);
125
+ }
126
+ }
127
+ }
128
+ }
129
+
130
+ // Validate the extracted JSON (optional but good for debugging)
131
+ if (result.success) {
132
+ rac_structured_output_validation_t validation = {};
133
+ rac_result_t valResult = rac_structured_output_validate(
134
+ result.json.c_str(),
135
+ &config,
136
+ &validation
137
+ );
138
+
139
+ if (valResult != RAC_SUCCESS || validation.is_valid != RAC_TRUE) {
140
+ RAC_LOG_WARNING(LOG_CATEGORY, "Extracted JSON failed validation");
141
+ // Don't throw - the JSON was extracted, just log warning
142
+ }
143
+
144
+ rac_structured_output_validation_free(&validation);
145
+ }
146
+
147
+ return result;
148
+ }
149
+
150
+ } // namespace bridges
151
+ } // namespace runanywhere
@@ -0,0 +1,66 @@
1
+ /**
2
+ * @file StructuredOutputBridge.hpp
3
+ * @brief Structured Output bridge for React Native
4
+ *
5
+ * Matches Swift's RunAnywhere+StructuredOutput.swift pattern, providing:
6
+ * - JSON schema-guided generation
7
+ * - Structured output extraction
8
+ *
9
+ * Aligned with rac_llm_structured_output.h API.
10
+ * RACommons is REQUIRED - no stub implementations.
11
+ */
12
+
13
+ #pragma once
14
+
15
+ #include <string>
16
+
17
+ // RACommons structured output header - REQUIRED (flat include paths)
18
+ #include "rac_llm_structured_output.h"
19
+ #include "rac_llm_types.h"
20
+
21
+ namespace runanywhere {
22
+ namespace bridges {
23
+
24
+ /**
25
+ * @brief Structured output result
26
+ */
27
+ struct StructuredOutputResult {
28
+ std::string json;
29
+ bool success = false;
30
+ std::string error;
31
+ };
32
+
33
+ /**
34
+ * @brief Structured Output bridge singleton
35
+ *
36
+ * Generates LLM output following a JSON schema.
37
+ * NOTE: RACommons is REQUIRED. All methods will throw std::runtime_error if
38
+ * the underlying C API calls fail.
39
+ */
40
+ class StructuredOutputBridge {
41
+ public:
42
+ static StructuredOutputBridge& shared();
43
+
44
+ /**
45
+ * Generate structured output following a JSON schema
46
+ * @param prompt User prompt
47
+ * @param schema JSON schema string
48
+ * @param optionsJson Generation options
49
+ * @return Structured output result
50
+ */
51
+ StructuredOutputResult generate(
52
+ const std::string& prompt,
53
+ const std::string& schema,
54
+ const std::string& optionsJson = ""
55
+ );
56
+
57
+ private:
58
+ StructuredOutputBridge() = default;
59
+ ~StructuredOutputBridge() = default;
60
+
61
+ StructuredOutputBridge(const StructuredOutputBridge&) = delete;
62
+ StructuredOutputBridge& operator=(const StructuredOutputBridge&) = delete;
63
+ };
64
+
65
+ } // namespace bridges
66
+ } // namespace runanywhere
@@ -0,0 +1,34 @@
1
+ /**
2
+ * @file rac_llm_llamacpp.h
3
+ * @brief Backend registration API for LlamaCPP
4
+ *
5
+ * Forward declarations for LlamaCPP backend registration functions.
6
+ * These symbols are exported by RABackendLLAMACPP.xcframework.
7
+ */
8
+
9
+ #ifndef RAC_LLM_LLAMACPP_H
10
+ #define RAC_LLM_LLAMACPP_H
11
+
12
+ #include "rac_types.h"
13
+
14
+ #ifdef __cplusplus
15
+ extern "C" {
16
+ #endif
17
+
18
+ /**
19
+ * Register the LlamaCPP backend with the RACommons service registry.
20
+ * @return RAC_SUCCESS on success, RAC_ERROR_MODULE_ALREADY_REGISTERED if already registered
21
+ */
22
+ rac_result_t rac_backend_llamacpp_register(void);
23
+
24
+ /**
25
+ * Unregister the LlamaCPP backend from the RACommons service registry.
26
+ * @return RAC_SUCCESS on success
27
+ */
28
+ rac_result_t rac_backend_llamacpp_unregister(void);
29
+
30
+ #ifdef __cplusplus
31
+ }
32
+ #endif
33
+
34
+ #endif /* RAC_LLM_LLAMACPP_H */
package/ios/.testlocal ADDED
File without changes
@@ -0,0 +1,44 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
3
+ <plist version="1.0">
4
+ <dict>
5
+ <key>AvailableLibraries</key>
6
+ <array>
7
+ <dict>
8
+ <key>BinaryPath</key>
9
+ <string>RABackendLLAMACPP.framework/RABackendLLAMACPP</string>
10
+ <key>LibraryIdentifier</key>
11
+ <string>ios-arm64</string>
12
+ <key>LibraryPath</key>
13
+ <string>RABackendLLAMACPP.framework</string>
14
+ <key>SupportedArchitectures</key>
15
+ <array>
16
+ <string>arm64</string>
17
+ </array>
18
+ <key>SupportedPlatform</key>
19
+ <string>ios</string>
20
+ </dict>
21
+ <dict>
22
+ <key>BinaryPath</key>
23
+ <string>RABackendLLAMACPP.framework/RABackendLLAMACPP</string>
24
+ <key>LibraryIdentifier</key>
25
+ <string>ios-arm64_x86_64-simulator</string>
26
+ <key>LibraryPath</key>
27
+ <string>RABackendLLAMACPP.framework</string>
28
+ <key>SupportedArchitectures</key>
29
+ <array>
30
+ <string>arm64</string>
31
+ <string>x86_64</string>
32
+ </array>
33
+ <key>SupportedPlatform</key>
34
+ <string>ios</string>
35
+ <key>SupportedPlatformVariant</key>
36
+ <string>simulator</string>
37
+ </dict>
38
+ </array>
39
+ <key>CFBundlePackageType</key>
40
+ <string>XFWK</string>
41
+ <key>XCFrameworkFormatVersion</key>
42
+ <string>1.0</string>
43
+ </dict>
44
+ </plist>
@@ -0,0 +1,11 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
3
+ <plist version="1.0">
4
+ <dict>
5
+ <key>CFBundleExecutable</key><string>RABackendLLAMACPP</string>
6
+ <key>CFBundleIdentifier</key><string>ai.runanywhere.RABackendLLAMACPP</string>
7
+ <key>CFBundlePackageType</key><string>FMWK</string>
8
+ <key>CFBundleShortVersionString</key><string>0.1.5</string>
9
+ <key>MinimumOSVersion</key><string>13.0</string>
10
+ </dict>
11
+ </plist>
@@ -0,0 +1,5 @@
1
+ framework module RABackendLLAMACPP {
2
+ umbrella header "RABackendLLAMACPP.h"
3
+ export *
4
+ module * { export * }
5
+ }
@@ -0,0 +1,11 @@
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
3
+ <plist version="1.0">
4
+ <dict>
5
+ <key>CFBundleExecutable</key><string>RABackendLLAMACPP</string>
6
+ <key>CFBundleIdentifier</key><string>ai.runanywhere.RABackendLLAMACPP</string>
7
+ <key>CFBundlePackageType</key><string>FMWK</string>
8
+ <key>CFBundleShortVersionString</key><string>0.1.5</string>
9
+ <key>MinimumOSVersion</key><string>13.0</string>
10
+ </dict>
11
+ </plist>
@@ -0,0 +1,5 @@
1
+ framework module RABackendLLAMACPP {
2
+ umbrella header "RABackendLLAMACPP.h"
3
+ export *
4
+ module * { export * }
5
+ }
@@ -0,0 +1,127 @@
1
+ require "json"
2
+
3
+ package = JSON.parse(File.read(File.join(__dir__, "..", "package.json")))
4
+
5
+ # =============================================================================
6
+ # Version Constants (MUST match Swift Package.swift)
7
+ # =============================================================================
8
+ CORE_VERSION = "0.1.4"
9
+
10
+ # =============================================================================
11
+ # Binary Source - RABackendLlamaCPP from runanywhere-sdks
12
+ # =============================================================================
13
+ GITHUB_ORG = "RunanywhereAI"
14
+ CORE_REPO = "runanywhere-sdks"
15
+
16
+ # =============================================================================
17
+ # testLocal Toggle
18
+ # Set RA_TEST_LOCAL=1 or create .testlocal file to use local binaries
19
+ # =============================================================================
20
+ TEST_LOCAL = ENV['RA_TEST_LOCAL'] == '1' || File.exist?(File.join(__dir__, '.testlocal'))
21
+
22
+ Pod::Spec.new do |s|
23
+ s.name = "LlamaCPPBackend"
24
+ s.module_name = "RunAnywhereLlama"
25
+ s.version = package["version"]
26
+ s.summary = package["description"]
27
+ s.homepage = "https://runanywhere.com"
28
+ s.license = package["license"]
29
+ s.authors = "RunAnywhere AI"
30
+
31
+ s.platforms = { :ios => "15.1" }
32
+ s.source = { :git => "https://github.com/RunanywhereAI/sdks.git", :tag => "#{s.version}" }
33
+
34
+ # =============================================================================
35
+ # Llama Backend - RABackendLlamaCPP
36
+ # Downloads from runanywhere-sdks (NOT runanywhere-sdks)
37
+ # =============================================================================
38
+ if TEST_LOCAL
39
+ puts "[LlamaCPPBackend] Using LOCAL RABackendLlamaCPP from Frameworks/"
40
+ s.vendored_frameworks = "Frameworks/RABackendLLAMACPP.xcframework"
41
+ else
42
+ s.prepare_command = <<-CMD
43
+ set -e
44
+
45
+ FRAMEWORK_DIR="Frameworks"
46
+ VERSION="#{CORE_VERSION}"
47
+ VERSION_FILE="$FRAMEWORK_DIR/.llamacpp_version"
48
+
49
+ # Check if already downloaded with correct version
50
+ if [ -f "$VERSION_FILE" ] && [ -d "$FRAMEWORK_DIR/RABackendLLAMACPP.xcframework" ]; then
51
+ CURRENT_VERSION=$(cat "$VERSION_FILE")
52
+ if [ "$CURRENT_VERSION" = "$VERSION" ]; then
53
+ echo "✅ RABackendLLAMACPP.xcframework version $VERSION already downloaded"
54
+ exit 0
55
+ fi
56
+ fi
57
+
58
+ echo "📦 Downloading RABackendLlamaCPP.xcframework version $VERSION..."
59
+
60
+ mkdir -p "$FRAMEWORK_DIR"
61
+ rm -rf "$FRAMEWORK_DIR/RABackendLLAMACPP.xcframework"
62
+
63
+ # Download from runanywhere-sdks
64
+ DOWNLOAD_URL="https://github.com/#{GITHUB_ORG}/#{CORE_REPO}/releases/download/core-v$VERSION/RABackendLlamaCPP-ios-v$VERSION.zip"
65
+ ZIP_FILE="/tmp/RABackendLlamaCPP.zip"
66
+
67
+ echo " URL: $DOWNLOAD_URL"
68
+
69
+ curl -L -f -o "$ZIP_FILE" "$DOWNLOAD_URL" || {
70
+ echo "❌ Failed to download RABackendLlamaCPP from $DOWNLOAD_URL"
71
+ exit 1
72
+ }
73
+
74
+ echo "📂 Extracting RABackendLLAMACPP.xcframework..."
75
+ unzip -q -o "$ZIP_FILE" -d "$FRAMEWORK_DIR/"
76
+ rm -f "$ZIP_FILE"
77
+
78
+ echo "$VERSION" > "$VERSION_FILE"
79
+
80
+ if [ -d "$FRAMEWORK_DIR/RABackendLLAMACPP.xcframework" ]; then
81
+ echo "✅ RABackendLLAMACPP.xcframework installed successfully"
82
+ else
83
+ echo "❌ RABackendLLAMACPP.xcframework extraction failed"
84
+ exit 1
85
+ fi
86
+ CMD
87
+
88
+ s.vendored_frameworks = "Frameworks/RABackendLLAMACPP.xcframework"
89
+ end
90
+
91
+ # Source files - Llama C++ implementation
92
+ s.source_files = [
93
+ "../cpp/HybridRunAnywhereLlama.cpp",
94
+ "../cpp/HybridRunAnywhereLlama.hpp",
95
+ "../cpp/bridges/**/*.{cpp,hpp}",
96
+ ]
97
+
98
+ # Build settings
99
+ s.pod_target_xcconfig = {
100
+ "CLANG_CXX_LANGUAGE_STANDARD" => "c++17",
101
+ "HEADER_SEARCH_PATHS" => [
102
+ "$(PODS_TARGET_SRCROOT)/../cpp",
103
+ "$(PODS_TARGET_SRCROOT)/../cpp/bridges",
104
+ "$(PODS_TARGET_SRCROOT)/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64/Headers",
105
+ "$(PODS_TARGET_SRCROOT)/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64_x86_64-simulator/Headers",
106
+ "$(PODS_ROOT)/Headers/Public",
107
+ ].join(" "),
108
+ "GCC_PREPROCESSOR_DEFINITIONS" => "$(inherited) HAS_LLAMACPP=1",
109
+ "DEFINES_MODULE" => "YES",
110
+ "SWIFT_OBJC_INTEROP_MODE" => "objcxx",
111
+ }
112
+
113
+ # Required system libraries
114
+ s.libraries = "c++"
115
+ s.frameworks = "Accelerate", "Foundation", "CoreML"
116
+
117
+ # Dependencies
118
+ s.dependency 'RunAnywhereCore'
119
+ s.dependency 'React-jsi'
120
+ s.dependency 'React-callinvoker'
121
+
122
+ # Load Nitrogen-generated autolinking
123
+ load '../nitrogen/generated/ios/RunAnywhereLlama+autolinking.rb'
124
+ add_nitrogen_files(s)
125
+
126
+ install_modules_dependencies(s)
127
+ end
package/nitro.json ADDED
@@ -0,0 +1,16 @@
1
+ {
2
+ "cxxNamespace": ["runanywhere", "llama"],
3
+ "ios": {
4
+ "iosModuleName": "RunAnywhereLlama"
5
+ },
6
+ "android": {
7
+ "androidNamespace": ["runanywhere", "llama"],
8
+ "androidCxxLibName": "runanywherellama"
9
+ },
10
+ "autolinking": {
11
+ "RunAnywhereLlama": {
12
+ "cpp": "HybridRunAnywhereLlama"
13
+ }
14
+ },
15
+ "ignorePaths": ["node_modules", "lib", "example"]
16
+ }
@@ -0,0 +1 @@
1
+ ** linguist-generated=true
@@ -0,0 +1,35 @@
1
+ ///
2
+ /// runanywherellamaOnLoad.kt
3
+ /// This file was generated by nitrogen. DO NOT MODIFY THIS FILE.
4
+ /// https://github.com/mrousavy/nitro
5
+ /// Copyright © 2026 Marc Rousavy @ Margelo
6
+ ///
7
+
8
+ package com.margelo.nitro.runanywhere.llama
9
+
10
+ import android.util.Log
11
+
12
+ internal class runanywherellamaOnLoad {
13
+ companion object {
14
+ private const val TAG = "runanywherellamaOnLoad"
15
+ private var didLoad = false
16
+ /**
17
+ * Initializes the native part of "runanywherellama".
18
+ * This method is idempotent and can be called more than once.
19
+ */
20
+ @JvmStatic
21
+ fun initializeNative() {
22
+ if (didLoad) return
23
+ try {
24
+ Log.i(TAG, "Loading runanywherellama C++ library...")
25
+ System.loadLibrary("runanywherellama")
26
+ Log.i(TAG, "Successfully loaded runanywherellama C++ library!")
27
+ didLoad = true
28
+ } catch (e: Error) {
29
+ Log.e(TAG, "Failed to load runanywherellama C++ library! Is it properly installed and linked? " +
30
+ "Is the name correct? (see `CMakeLists.txt`, at `add_library(...)`)", e)
31
+ throw e
32
+ }
33
+ }
34
+ }
35
+ }