@runanywhere/llamacpp 0.16.7 → 0.16.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,23 +2,6 @@ require "json"
2
2
 
3
3
  package = JSON.parse(File.read(File.join(__dir__, "package.json")))
4
4
 
5
- # =============================================================================
6
- # Version Constants (MUST match Swift Package.swift)
7
- # =============================================================================
8
- LLAMACPP_VERSION = "0.1.4"
9
-
10
- # =============================================================================
11
- # Binary Source - RABackendLlamaCPP from runanywhere-sdks
12
- # =============================================================================
13
- LLAMACPP_GITHUB_ORG = "RunanywhereAI"
14
- LLAMACPP_REPO = "runanywhere-sdks"
15
-
16
- # =============================================================================
17
- # testLocal Toggle
18
- # Set RA_TEST_LOCAL=1 or create .testlocal file to use local binaries
19
- # =============================================================================
20
- LLAMACPP_TEST_LOCAL = ENV['RA_TEST_LOCAL'] == '1' || File.exist?(File.join(__dir__, 'ios', '.testlocal'))
21
-
22
5
  Pod::Spec.new do |s|
23
6
  s.name = "RunAnywhereLlama"
24
7
  s.version = package["version"]
@@ -31,70 +14,19 @@ Pod::Spec.new do |s|
31
14
  s.source = { :git => "https://github.com/RunanywhereAI/sdks.git", :tag => "#{s.version}" }
32
15
 
33
16
  # =============================================================================
34
- # Llama Backend - RABackendLlamaCPP
35
- # Downloads from runanywhere-sdks (NOT runanywhere-sdks)
17
+ # LlamaCPP Backend - xcframework is bundled in npm package
18
+ # No downloads needed - framework is included in ios/Frameworks/
36
19
  # =============================================================================
37
- if LLAMACPP_TEST_LOCAL
38
- puts "[RunAnywhereLlama] Using LOCAL RABackendLlamaCPP from ios/Frameworks/"
39
- s.vendored_frameworks = "ios/Frameworks/RABackendLLAMACPP.xcframework"
40
- else
41
- s.prepare_command = <<-CMD
42
- set -e
43
-
44
- FRAMEWORK_DIR="ios/Frameworks"
45
- VERSION="#{LLAMACPP_VERSION}"
46
- VERSION_FILE="$FRAMEWORK_DIR/.llamacpp_version"
47
-
48
- # Check if already downloaded with correct version
49
- if [ -f "$VERSION_FILE" ] && [ -d "$FRAMEWORK_DIR/RABackendLLAMACPP.xcframework" ]; then
50
- CURRENT_VERSION=$(cat "$VERSION_FILE")
51
- if [ "$CURRENT_VERSION" = "$VERSION" ]; then
52
- echo "✅ RABackendLLAMACPP.xcframework version $VERSION already downloaded"
53
- exit 0
54
- fi
55
- fi
56
-
57
- echo "📦 Downloading RABackendLlamaCPP.xcframework version $VERSION..."
58
-
59
- mkdir -p "$FRAMEWORK_DIR"
60
- rm -rf "$FRAMEWORK_DIR/RABackendLLAMACPP.xcframework"
61
-
62
- # Download from runanywhere-sdks
63
- DOWNLOAD_URL="https://github.com/#{LLAMACPP_GITHUB_ORG}/#{LLAMACPP_REPO}/releases/download/core-v$VERSION/RABackendLlamaCPP-ios-v$VERSION.zip"
64
- ZIP_FILE="/tmp/RABackendLlamaCPP.zip"
65
-
66
- echo " URL: $DOWNLOAD_URL"
67
-
68
- curl -L -f -o "$ZIP_FILE" "$DOWNLOAD_URL" || {
69
- echo "❌ Failed to download RABackendLlamaCPP from $DOWNLOAD_URL"
70
- exit 1
71
- }
72
-
73
- echo "📂 Extracting RABackendLLAMACPP.xcframework..."
74
- unzip -q -o "$ZIP_FILE" -d "$FRAMEWORK_DIR/"
75
- rm -f "$ZIP_FILE"
76
-
77
- echo "$VERSION" > "$VERSION_FILE"
78
-
79
- if [ -d "$FRAMEWORK_DIR/RABackendLLAMACPP.xcframework" ]; then
80
- echo "✅ RABackendLLAMACPP.xcframework installed successfully"
81
- else
82
- echo "❌ RABackendLLAMACPP.xcframework extraction failed"
83
- exit 1
84
- fi
85
- CMD
86
-
87
- s.vendored_frameworks = "ios/Frameworks/RABackendLLAMACPP.xcframework"
88
- end
20
+ puts "[RunAnywhereLlama] Using bundled RABackendLLAMACPP.xcframework from npm package"
21
+ s.vendored_frameworks = "ios/Frameworks/RABackendLLAMACPP.xcframework"
89
22
 
90
- # Source files - Llama C++ implementation
23
+ # Source files
91
24
  s.source_files = [
92
25
  "cpp/HybridRunAnywhereLlama.cpp",
93
26
  "cpp/HybridRunAnywhereLlama.hpp",
94
27
  "cpp/bridges/**/*.{cpp,hpp}",
95
28
  ]
96
29
 
97
- # Build settings
98
30
  s.pod_target_xcconfig = {
99
31
  "CLANG_CXX_LANGUAGE_STANDARD" => "c++17",
100
32
  "HEADER_SEARCH_PATHS" => [
@@ -102,11 +34,8 @@ Pod::Spec.new do |s|
102
34
  "$(PODS_TARGET_SRCROOT)/cpp/bridges",
103
35
  "$(PODS_TARGET_SRCROOT)/ios/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64/RABackendLLAMACPP.framework/Headers",
104
36
  "$(PODS_TARGET_SRCROOT)/ios/Frameworks/RABackendLLAMACPP.xcframework/ios-arm64_x86_64-simulator/RABackendLLAMACPP.framework/Headers",
105
- # RACommons headers from sibling core package (for rac_logger.h)
106
37
  "$(PODS_TARGET_SRCROOT)/../core/ios/Frameworks/RACommons.xcframework/ios-arm64/RACommons.framework/Headers",
107
38
  "$(PODS_TARGET_SRCROOT)/../core/ios/Frameworks/RACommons.xcframework/ios-arm64_x86_64-simulator/RACommons.framework/Headers",
108
- "$(PODS_TARGET_SRCROOT)/../core/ios/Binaries/RACommons.xcframework/ios-arm64/RACommons.framework/Headers",
109
- "$(PODS_TARGET_SRCROOT)/../core/ios/Binaries/RACommons.xcframework/ios-arm64_x86_64-simulator/RACommons.framework/Headers",
110
39
  "$(PODS_ROOT)/Headers/Public",
111
40
  ].join(" "),
112
41
  "GCC_PREPROCESSOR_DEFINITIONS" => "$(inherited) HAS_LLAMACPP=1",
@@ -114,16 +43,13 @@ Pod::Spec.new do |s|
114
43
  "SWIFT_OBJC_INTEROP_MODE" => "objcxx",
115
44
  }
116
45
 
117
- # Required system libraries
118
46
  s.libraries = "c++"
119
47
  s.frameworks = "Accelerate", "Foundation", "CoreML"
120
48
 
121
- # Dependencies
122
49
  s.dependency 'RunAnywhereCore'
123
50
  s.dependency 'React-jsi'
124
51
  s.dependency 'React-callinvoker'
125
52
 
126
- # Load Nitrogen-generated autolinking
127
53
  load 'nitrogen/generated/ios/RunAnywhereLlama+autolinking.rb'
128
54
  add_nitrogen_files(s)
129
55
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@runanywhere/llamacpp",
3
- "version": "0.16.7",
3
+ "version": "0.16.9",
4
4
  "description": "LlamaCpp backend for RunAnywhere React Native SDK - GGUF model support for on-device LLM",
5
5
  "main": "src/index.ts",
6
6
  "types": "src/index.ts",
package/ios/.testlocal DELETED
File without changes