red-candle 1.3.1 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Cargo.lock +11 -20
- data/ext/candle/Cargo.toml +1 -1
- data/ext/candle/src/ruby/device.rs +8 -7
- data/ext/candle/src/ruby/dtype.rs +3 -2
- data/ext/candle/src/ruby/embedding_model.rs +31 -14
- data/ext/candle/src/ruby/errors.rs +6 -4
- data/ext/candle/src/ruby/llm.rs +78 -68
- data/ext/candle/src/ruby/ner.rs +106 -95
- data/ext/candle/src/ruby/reranker.rs +51 -38
- data/ext/candle/src/ruby/structured.rs +13 -12
- data/ext/candle/src/ruby/tensor.rs +7 -6
- data/ext/candle/src/ruby/tokenizer.rs +101 -84
- data/lib/candle/version.rb +1 -1
- metadata +31 -6
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
use magnus::{
|
|
1
|
+
use magnus::{function, method, prelude::*, Error, Module, RArray, RHash, RModule, Ruby, TryConvert};
|
|
2
2
|
use crate::tokenizer::{TokenizerWrapper as InnerTokenizer, loader::TokenizerLoader};
|
|
3
3
|
use crate::ruby::Result;
|
|
4
4
|
|
|
@@ -9,136 +9,151 @@ pub struct Tokenizer(pub InnerTokenizer);
|
|
|
9
9
|
impl Tokenizer {
|
|
10
10
|
/// Create a new tokenizer from a file path
|
|
11
11
|
pub fn from_file(path: String) -> Result<Self> {
|
|
12
|
+
let ruby = Ruby::get().unwrap();
|
|
12
13
|
let tokenizer = TokenizerLoader::from_file(&path)
|
|
13
|
-
.map_err(|e| Error::new(
|
|
14
|
+
.map_err(|e| Error::new(ruby.exception_runtime_error(), e.to_string()))?;
|
|
14
15
|
Ok(Self(InnerTokenizer::new(tokenizer)))
|
|
15
16
|
}
|
|
16
17
|
|
|
17
18
|
/// Create a new tokenizer from HuggingFace model ID
|
|
18
19
|
pub fn from_pretrained(model_id: String) -> Result<Self> {
|
|
20
|
+
let ruby = Ruby::get().unwrap();
|
|
21
|
+
let runtime_error = ruby.exception_runtime_error();
|
|
19
22
|
// Use tokio runtime for async operations
|
|
20
23
|
let rt = tokio::runtime::Runtime::new()
|
|
21
|
-
.map_err(|e| Error::new(
|
|
22
|
-
|
|
24
|
+
.map_err(|e| Error::new(runtime_error, format!("Failed to create runtime: {}", e)))?;
|
|
25
|
+
|
|
23
26
|
let tokenizer = rt.block_on(async {
|
|
24
27
|
TokenizerLoader::from_hf_hub(&model_id, None).await
|
|
25
28
|
})
|
|
26
|
-
.map_err(|e| Error::new(
|
|
27
|
-
|
|
29
|
+
.map_err(|e| Error::new(runtime_error, e.to_string()))?;
|
|
30
|
+
|
|
28
31
|
Ok(Self(InnerTokenizer::new(tokenizer)))
|
|
29
32
|
}
|
|
30
33
|
|
|
31
34
|
/// Encode text into token IDs
|
|
32
35
|
pub fn encode(&self, text: String, add_special_tokens: Option<bool>) -> Result<RArray> {
|
|
36
|
+
let ruby = Ruby::get().unwrap();
|
|
33
37
|
let add_special = add_special_tokens.unwrap_or(true);
|
|
34
38
|
let token_ids = self.0.encode(&text, add_special)
|
|
35
|
-
.map_err(|e| Error::new(
|
|
36
|
-
|
|
37
|
-
Ok(
|
|
39
|
+
.map_err(|e| Error::new(ruby.exception_runtime_error(), e.to_string()))?;
|
|
40
|
+
|
|
41
|
+
Ok(ruby.ary_from_vec(token_ids.into_iter().map(|id| id as i64).collect()))
|
|
38
42
|
}
|
|
39
|
-
|
|
43
|
+
|
|
40
44
|
/// Encode text into token strings (words/subwords)
|
|
41
45
|
pub fn encode_to_tokens(&self, text: String, add_special_tokens: Option<bool>) -> Result<RArray> {
|
|
46
|
+
let ruby = Ruby::get().unwrap();
|
|
47
|
+
let runtime_error = ruby.exception_runtime_error();
|
|
42
48
|
let add_special = add_special_tokens.unwrap_or(true);
|
|
43
49
|
let token_ids = self.0.encode(&text, add_special)
|
|
44
|
-
.map_err(|e| Error::new(
|
|
45
|
-
|
|
50
|
+
.map_err(|e| Error::new(runtime_error, e.to_string()))?;
|
|
51
|
+
|
|
46
52
|
let mut tokens = Vec::new();
|
|
47
53
|
for id in token_ids {
|
|
48
54
|
let token = self.0.token_to_piece(id)
|
|
49
|
-
.map_err(|e| Error::new(
|
|
55
|
+
.map_err(|e| Error::new(runtime_error, e.to_string()))?;
|
|
50
56
|
tokens.push(token);
|
|
51
57
|
}
|
|
52
|
-
|
|
53
|
-
Ok(
|
|
58
|
+
|
|
59
|
+
Ok(ruby.ary_from_vec(tokens))
|
|
54
60
|
}
|
|
55
61
|
|
|
56
62
|
/// Encode multiple texts in batch
|
|
57
63
|
pub fn encode_batch(&self, texts: RArray, add_special_tokens: Option<bool>) -> Result<RArray> {
|
|
64
|
+
let ruby = Ruby::get().unwrap();
|
|
65
|
+
let runtime_error = ruby.exception_runtime_error();
|
|
58
66
|
let texts: Vec<String> = texts.to_vec()?;
|
|
59
67
|
let add_special = add_special_tokens.unwrap_or(true);
|
|
60
|
-
|
|
68
|
+
|
|
61
69
|
let token_ids_batch = self.0.encode_batch(texts, add_special)
|
|
62
|
-
.map_err(|e| Error::new(
|
|
63
|
-
|
|
64
|
-
let result =
|
|
70
|
+
.map_err(|e| Error::new(runtime_error, e.to_string()))?;
|
|
71
|
+
|
|
72
|
+
let result = ruby.ary_new();
|
|
65
73
|
for token_ids in token_ids_batch {
|
|
66
|
-
result.push(
|
|
74
|
+
result.push(ruby.ary_from_vec(token_ids.into_iter().map(|id| id as i64).collect()))?;
|
|
67
75
|
}
|
|
68
|
-
|
|
76
|
+
|
|
69
77
|
Ok(result)
|
|
70
78
|
}
|
|
71
|
-
|
|
79
|
+
|
|
72
80
|
/// Encode multiple texts in batch, returning token strings
|
|
73
81
|
pub fn encode_batch_to_tokens(&self, texts: RArray, add_special_tokens: Option<bool>) -> Result<RArray> {
|
|
82
|
+
let ruby = Ruby::get().unwrap();
|
|
83
|
+
let runtime_error = ruby.exception_runtime_error();
|
|
74
84
|
let texts: Vec<String> = texts.to_vec()?;
|
|
75
85
|
let add_special = add_special_tokens.unwrap_or(true);
|
|
76
|
-
|
|
86
|
+
|
|
77
87
|
let token_ids_batch = self.0.encode_batch(texts, add_special)
|
|
78
|
-
.map_err(|e| Error::new(
|
|
79
|
-
|
|
80
|
-
let result =
|
|
88
|
+
.map_err(|e| Error::new(runtime_error, e.to_string()))?;
|
|
89
|
+
|
|
90
|
+
let result = ruby.ary_new();
|
|
81
91
|
for token_ids in token_ids_batch {
|
|
82
92
|
let mut tokens = Vec::new();
|
|
83
93
|
for id in token_ids {
|
|
84
94
|
let token = self.0.token_to_piece(id)
|
|
85
|
-
.map_err(|e| Error::new(
|
|
95
|
+
.map_err(|e| Error::new(runtime_error, e.to_string()))?;
|
|
86
96
|
tokens.push(token);
|
|
87
97
|
}
|
|
88
|
-
result.push(
|
|
98
|
+
result.push(ruby.ary_from_vec(tokens))?;
|
|
89
99
|
}
|
|
90
|
-
|
|
100
|
+
|
|
91
101
|
Ok(result)
|
|
92
102
|
}
|
|
93
103
|
|
|
94
104
|
/// Encode text and return both token IDs and token strings
|
|
95
105
|
pub fn encode_with_tokens(&self, text: String, add_special_tokens: Option<bool>) -> Result<RHash> {
|
|
106
|
+
let ruby = Ruby::get().unwrap();
|
|
107
|
+
let runtime_error = ruby.exception_runtime_error();
|
|
96
108
|
let add_special = add_special_tokens.unwrap_or(true);
|
|
97
109
|
let token_ids = self.0.encode(&text, add_special)
|
|
98
|
-
.map_err(|e| Error::new(
|
|
99
|
-
|
|
110
|
+
.map_err(|e| Error::new(runtime_error, e.to_string()))?;
|
|
111
|
+
|
|
100
112
|
let mut tokens = Vec::new();
|
|
101
113
|
for &id in &token_ids {
|
|
102
114
|
let token = self.0.token_to_piece(id)
|
|
103
|
-
.map_err(|e| Error::new(
|
|
115
|
+
.map_err(|e| Error::new(runtime_error, e.to_string()))?;
|
|
104
116
|
tokens.push(token);
|
|
105
117
|
}
|
|
106
|
-
|
|
107
|
-
let hash =
|
|
108
|
-
hash.aset(
|
|
109
|
-
hash.aset(
|
|
110
|
-
|
|
118
|
+
|
|
119
|
+
let hash = ruby.hash_new();
|
|
120
|
+
hash.aset(ruby.to_symbol("ids"), ruby.ary_from_vec(token_ids.into_iter().map(|id| id as i64).collect()))?;
|
|
121
|
+
hash.aset(ruby.to_symbol("tokens"), ruby.ary_from_vec(tokens))?;
|
|
122
|
+
|
|
111
123
|
Ok(hash)
|
|
112
124
|
}
|
|
113
|
-
|
|
125
|
+
|
|
114
126
|
/// Decode token IDs back to text
|
|
115
127
|
pub fn decode(&self, token_ids: RArray, skip_special_tokens: Option<bool>) -> Result<String> {
|
|
128
|
+
let ruby = Ruby::get().unwrap();
|
|
116
129
|
let token_ids: Vec<i64> = token_ids.to_vec()?;
|
|
117
130
|
let token_ids: Vec<u32> = token_ids.into_iter()
|
|
118
131
|
.map(|id| id as u32)
|
|
119
132
|
.collect();
|
|
120
133
|
let skip_special = skip_special_tokens.unwrap_or(true);
|
|
121
|
-
|
|
134
|
+
|
|
122
135
|
self.0.decode(&token_ids, skip_special)
|
|
123
|
-
.map_err(|e| Error::new(
|
|
136
|
+
.map_err(|e| Error::new(ruby.exception_runtime_error(), e.to_string()))
|
|
124
137
|
}
|
|
125
138
|
|
|
126
139
|
/// Get the string representation of a single token ID
|
|
127
140
|
pub fn id_to_token(&self, token_id: i64) -> Result<String> {
|
|
141
|
+
let ruby = Ruby::get().unwrap();
|
|
128
142
|
self.0.token_to_piece(token_id as u32)
|
|
129
|
-
.map_err(|e| Error::new(
|
|
143
|
+
.map_err(|e| Error::new(ruby.exception_runtime_error(), e.to_string()))
|
|
130
144
|
}
|
|
131
145
|
|
|
132
146
|
/// Get the vocabulary as a hash of token string to ID
|
|
133
147
|
pub fn get_vocab(&self, with_added_tokens: Option<bool>) -> Result<RHash> {
|
|
148
|
+
let ruby = Ruby::get().unwrap();
|
|
134
149
|
let with_added = with_added_tokens.unwrap_or(true);
|
|
135
150
|
let vocab = self.0.inner().get_vocab(with_added);
|
|
136
|
-
|
|
137
|
-
let hash =
|
|
151
|
+
|
|
152
|
+
let hash = ruby.hash_new();
|
|
138
153
|
for (token, id) in vocab {
|
|
139
154
|
hash.aset(token, id as i64)?;
|
|
140
155
|
}
|
|
141
|
-
|
|
156
|
+
|
|
142
157
|
Ok(hash)
|
|
143
158
|
}
|
|
144
159
|
|
|
@@ -151,23 +166,24 @@ impl Tokenizer {
|
|
|
151
166
|
/// Enable padding - returns a new tokenizer with padding enabled
|
|
152
167
|
pub fn with_padding(&self, kwargs: RHash) -> Result<Self> {
|
|
153
168
|
use tokenizers::{PaddingParams, PaddingStrategy, PaddingDirection};
|
|
154
|
-
|
|
169
|
+
let ruby = Ruby::get().unwrap();
|
|
170
|
+
|
|
155
171
|
let mut params = PaddingParams::default();
|
|
156
|
-
|
|
172
|
+
|
|
157
173
|
// Extract parameters from kwargs
|
|
158
|
-
if let Some(length) = kwargs.get(
|
|
174
|
+
if let Some(length) = kwargs.get(ruby.to_symbol("length")) {
|
|
159
175
|
if let Ok(len) = usize::try_convert(length) {
|
|
160
176
|
params.strategy = PaddingStrategy::Fixed(len);
|
|
161
177
|
}
|
|
162
178
|
}
|
|
163
|
-
|
|
164
|
-
if let Some(max_length) = kwargs.get(
|
|
179
|
+
|
|
180
|
+
if let Some(max_length) = kwargs.get(ruby.to_symbol("max_length")) {
|
|
165
181
|
if let Ok(_) = usize::try_convert(max_length) {
|
|
166
182
|
params.strategy = PaddingStrategy::BatchLongest;
|
|
167
183
|
}
|
|
168
184
|
}
|
|
169
|
-
|
|
170
|
-
if let Some(direction) = kwargs.get(
|
|
185
|
+
|
|
186
|
+
if let Some(direction) = kwargs.get(ruby.to_symbol("direction")) {
|
|
171
187
|
if let Ok(dir) = String::try_convert(direction) {
|
|
172
188
|
params.direction = match dir.as_str() {
|
|
173
189
|
"right" => PaddingDirection::Right,
|
|
@@ -176,19 +192,19 @@ impl Tokenizer {
|
|
|
176
192
|
};
|
|
177
193
|
}
|
|
178
194
|
}
|
|
179
|
-
|
|
180
|
-
if let Some(pad_id) = kwargs.get(
|
|
195
|
+
|
|
196
|
+
if let Some(pad_id) = kwargs.get(ruby.to_symbol("pad_id")) {
|
|
181
197
|
if let Ok(id) = u32::try_convert(pad_id) {
|
|
182
198
|
params.pad_id = id;
|
|
183
199
|
}
|
|
184
200
|
}
|
|
185
|
-
|
|
186
|
-
if let Some(pad_token) = kwargs.get(
|
|
201
|
+
|
|
202
|
+
if let Some(pad_token) = kwargs.get(ruby.to_symbol("pad_token")) {
|
|
187
203
|
if let Ok(token) = String::try_convert(pad_token) {
|
|
188
204
|
params.pad_token = token;
|
|
189
205
|
}
|
|
190
206
|
}
|
|
191
|
-
|
|
207
|
+
|
|
192
208
|
let mut new_tokenizer = self.0.clone();
|
|
193
209
|
let _ = new_tokenizer.inner_mut().with_padding(Some(params));
|
|
194
210
|
Ok(Self(new_tokenizer))
|
|
@@ -197,14 +213,14 @@ impl Tokenizer {
|
|
|
197
213
|
/// Enable truncation - returns a new tokenizer with truncation enabled
|
|
198
214
|
pub fn with_truncation(&self, max_length: usize) -> Result<Self> {
|
|
199
215
|
use tokenizers::{TruncationParams, TruncationStrategy, TruncationDirection};
|
|
200
|
-
|
|
216
|
+
|
|
201
217
|
let params = TruncationParams {
|
|
202
218
|
max_length,
|
|
203
219
|
strategy: TruncationStrategy::LongestFirst,
|
|
204
220
|
stride: 0,
|
|
205
221
|
direction: TruncationDirection::Right,
|
|
206
222
|
};
|
|
207
|
-
|
|
223
|
+
|
|
208
224
|
let mut new_tokenizer = self.0.clone();
|
|
209
225
|
let _ = new_tokenizer.inner_mut().with_truncation(Some(params));
|
|
210
226
|
Ok(Self(new_tokenizer))
|
|
@@ -212,8 +228,9 @@ impl Tokenizer {
|
|
|
212
228
|
|
|
213
229
|
/// Get special tokens information
|
|
214
230
|
pub fn get_special_tokens(&self) -> Result<RHash> {
|
|
215
|
-
let
|
|
216
|
-
|
|
231
|
+
let ruby = Ruby::get().unwrap();
|
|
232
|
+
let hash = ruby.hash_new();
|
|
233
|
+
|
|
217
234
|
// Common special tokens
|
|
218
235
|
let special_tokens = vec![
|
|
219
236
|
("[CLS]", "cls_token"),
|
|
@@ -224,50 +241,49 @@ impl Tokenizer {
|
|
|
224
241
|
("<s>", "bos_token"),
|
|
225
242
|
("</s>", "eos_token"),
|
|
226
243
|
];
|
|
227
|
-
|
|
244
|
+
|
|
228
245
|
let vocab = self.0.inner().get_vocab(true);
|
|
229
|
-
|
|
246
|
+
|
|
230
247
|
for (token, name) in special_tokens {
|
|
231
248
|
if let Some(id) = vocab.get(token) {
|
|
232
249
|
hash.aset(name, *id as i64)?;
|
|
233
250
|
}
|
|
234
251
|
}
|
|
235
|
-
|
|
252
|
+
|
|
236
253
|
Ok(hash)
|
|
237
254
|
}
|
|
238
255
|
|
|
239
256
|
/// Get tokenizer options as a hash
|
|
240
257
|
pub fn options(&self) -> Result<RHash> {
|
|
241
|
-
let
|
|
242
|
-
|
|
258
|
+
let ruby = Ruby::get().unwrap();
|
|
259
|
+
let hash = ruby.hash_new();
|
|
260
|
+
|
|
243
261
|
// Get vocab size
|
|
244
262
|
hash.aset("vocab_size", self.vocab_size(Some(true)))?;
|
|
245
263
|
hash.aset("vocab_size_base", self.vocab_size(Some(false)))?;
|
|
246
|
-
|
|
264
|
+
|
|
247
265
|
// Get special tokens info
|
|
248
266
|
let special_tokens = self.get_special_tokens()?;
|
|
249
267
|
hash.aset("special_tokens", special_tokens)?;
|
|
250
|
-
|
|
268
|
+
|
|
251
269
|
// Get padding/truncation info if available
|
|
252
270
|
let inner_tokenizer = self.0.inner();
|
|
253
|
-
|
|
271
|
+
|
|
254
272
|
// Check if padding is enabled
|
|
255
273
|
if let Some(_padding) = inner_tokenizer.get_padding() {
|
|
256
|
-
let padding_info =
|
|
274
|
+
let padding_info = ruby.hash_new();
|
|
257
275
|
padding_info.aset("enabled", true)?;
|
|
258
|
-
// Note: We can't easily extract all padding params from the tokenizers library
|
|
259
|
-
// but we can indicate it's enabled
|
|
260
276
|
hash.aset("padding", padding_info)?;
|
|
261
277
|
}
|
|
262
|
-
|
|
263
|
-
// Check if truncation is enabled
|
|
278
|
+
|
|
279
|
+
// Check if truncation is enabled
|
|
264
280
|
if let Some(truncation) = inner_tokenizer.get_truncation() {
|
|
265
|
-
let truncation_info =
|
|
281
|
+
let truncation_info = ruby.hash_new();
|
|
266
282
|
truncation_info.aset("enabled", true)?;
|
|
267
283
|
truncation_info.aset("max_length", truncation.max_length)?;
|
|
268
284
|
hash.aset("truncation", truncation_info)?;
|
|
269
285
|
}
|
|
270
|
-
|
|
286
|
+
|
|
271
287
|
Ok(hash)
|
|
272
288
|
}
|
|
273
289
|
|
|
@@ -278,13 +294,13 @@ impl Tokenizer {
|
|
|
278
294
|
.ok()
|
|
279
295
|
.map(|h| h.len())
|
|
280
296
|
.unwrap_or(0);
|
|
281
|
-
|
|
297
|
+
|
|
282
298
|
let mut parts = vec![format!("#<Candle::Tokenizer vocab_size={}", vocab_size)];
|
|
283
|
-
|
|
299
|
+
|
|
284
300
|
if special_tokens > 0 {
|
|
285
301
|
parts.push(format!("special_tokens={}", special_tokens));
|
|
286
302
|
}
|
|
287
|
-
|
|
303
|
+
|
|
288
304
|
// Check for padding/truncation
|
|
289
305
|
let inner_tokenizer = self.0.inner();
|
|
290
306
|
if inner_tokenizer.get_padding().is_some() {
|
|
@@ -293,18 +309,19 @@ impl Tokenizer {
|
|
|
293
309
|
if let Some(truncation) = inner_tokenizer.get_truncation() {
|
|
294
310
|
parts.push(format!("truncation={}", truncation.max_length));
|
|
295
311
|
}
|
|
296
|
-
|
|
312
|
+
|
|
297
313
|
parts.join(" ") + ">"
|
|
298
314
|
}
|
|
299
315
|
}
|
|
300
316
|
|
|
301
317
|
pub fn init(rb_candle: RModule) -> Result<()> {
|
|
302
|
-
let
|
|
303
|
-
|
|
318
|
+
let ruby = Ruby::get().unwrap();
|
|
319
|
+
let tokenizer_class = rb_candle.define_class("Tokenizer", ruby.class_object())?;
|
|
320
|
+
|
|
304
321
|
// Class methods
|
|
305
322
|
tokenizer_class.define_singleton_method("from_file", function!(Tokenizer::from_file, 1))?;
|
|
306
323
|
tokenizer_class.define_singleton_method("from_pretrained", function!(Tokenizer::from_pretrained, 1))?;
|
|
307
|
-
|
|
324
|
+
|
|
308
325
|
// Instance methods
|
|
309
326
|
tokenizer_class.define_method("encode", method!(Tokenizer::encode, 2))?;
|
|
310
327
|
tokenizer_class.define_method("encode_to_tokens", method!(Tokenizer::encode_to_tokens, 2))?;
|
|
@@ -321,6 +338,6 @@ pub fn init(rb_candle: RModule) -> Result<()> {
|
|
|
321
338
|
tokenizer_class.define_method("options", method!(Tokenizer::options, 0))?;
|
|
322
339
|
tokenizer_class.define_method("inspect", method!(Tokenizer::inspect, 0))?;
|
|
323
340
|
tokenizer_class.define_method("to_s", method!(Tokenizer::inspect, 0))?;
|
|
324
|
-
|
|
341
|
+
|
|
325
342
|
Ok(())
|
|
326
|
-
}
|
|
343
|
+
}
|
data/lib/candle/version.rb
CHANGED
metadata
CHANGED
|
@@ -1,15 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: red-candle
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 1.
|
|
4
|
+
version: 1.4.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Christopher Petersen
|
|
8
8
|
- kojix2
|
|
9
|
-
autorequire:
|
|
10
9
|
bindir: bin
|
|
11
10
|
cert_chain: []
|
|
12
|
-
date:
|
|
11
|
+
date: 1980-01-02 00:00:00.000000000 Z
|
|
13
12
|
dependencies:
|
|
14
13
|
- !ruby/object:Gem::Dependency
|
|
15
14
|
name: rb_sys
|
|
@@ -25,6 +24,34 @@ dependencies:
|
|
|
25
24
|
- - ">="
|
|
26
25
|
- !ruby/object:Gem::Version
|
|
27
26
|
version: '0'
|
|
27
|
+
- !ruby/object:Gem::Dependency
|
|
28
|
+
name: logger
|
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
|
30
|
+
requirements:
|
|
31
|
+
- - ">="
|
|
32
|
+
- !ruby/object:Gem::Version
|
|
33
|
+
version: '0'
|
|
34
|
+
type: :runtime
|
|
35
|
+
prerelease: false
|
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
37
|
+
requirements:
|
|
38
|
+
- - ">="
|
|
39
|
+
- !ruby/object:Gem::Version
|
|
40
|
+
version: '0'
|
|
41
|
+
- !ruby/object:Gem::Dependency
|
|
42
|
+
name: benchmark
|
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
|
44
|
+
requirements:
|
|
45
|
+
- - ">="
|
|
46
|
+
- !ruby/object:Gem::Version
|
|
47
|
+
version: '0'
|
|
48
|
+
type: :development
|
|
49
|
+
prerelease: false
|
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
51
|
+
requirements:
|
|
52
|
+
- - ">="
|
|
53
|
+
- !ruby/object:Gem::Version
|
|
54
|
+
version: '0'
|
|
28
55
|
- !ruby/object:Gem::Dependency
|
|
29
56
|
name: minitest
|
|
30
57
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -225,7 +252,6 @@ homepage: https://github.com/scientist-labs/red-candle
|
|
|
225
252
|
licenses:
|
|
226
253
|
- MIT
|
|
227
254
|
metadata: {}
|
|
228
|
-
post_install_message:
|
|
229
255
|
rdoc_options: []
|
|
230
256
|
require_paths:
|
|
231
257
|
- lib
|
|
@@ -241,8 +267,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
241
267
|
version: '3.3'
|
|
242
268
|
requirements:
|
|
243
269
|
- Rust >= 1.85
|
|
244
|
-
rubygems_version:
|
|
245
|
-
signing_key:
|
|
270
|
+
rubygems_version: 4.0.3
|
|
246
271
|
specification_version: 4
|
|
247
272
|
summary: Ruby gem for running state-of-the-art language models locally. Access LLMs,
|
|
248
273
|
embeddings, rerankers, and NER models directly from Ruby using Rust-powered Candle
|