tokenizers 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
data/Cargo.toml ADDED
@@ -0,0 +1,14 @@
1
+ [package]
2
+ name = "tokenizers-ruby"
3
+ version = "0.1.0"
4
+ authors = ["Andrew Kane <andrew@ankane.org>"]
5
+ edition = "2018"
6
+
7
+ [lib]
8
+ name = "tokenizers"
9
+ crate-type = ["cdylib"]
10
+
11
+ [dependencies]
12
+ lazy_static = "1"
13
+ rutie = "0.8.3"
14
+ tokenizers = "0.11"
data/LICENSE.txt ADDED
@@ -0,0 +1,202 @@
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "[]"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright [yyyy] [name of copyright owner]
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
data/README.md ADDED
@@ -0,0 +1,67 @@
1
+ # Tokenizers
2
+
3
+ :slightly_smiling_face: Fast state-of-the-art [tokenizers](https://github.com/huggingface/tokenizers) for Ruby
4
+
5
+ [![Build Status](https://github.com/ankane/tokenizers-ruby/workflows/build/badge.svg?branch=master)](https://github.com/ankane/tokenizers-ruby/actions)
6
+
7
+ ## Installation
8
+
9
+ Add this line to your application’s Gemfile:
10
+
11
+ ```ruby
12
+ gem "tokenizers"
13
+ ```
14
+
15
+ Note: Rust is currently required for installation.
16
+
17
+ ## Getting Started
18
+
19
+ Load a pretrained tokenizer
20
+
21
+ ```ruby
22
+ tokenizer = Tokenizers.from_pretrained("bert-base-cased")
23
+ ```
24
+
25
+ Encode
26
+
27
+ ```ruby
28
+ encoded = tokenizer.encode("I can feel the magic, can you?")
29
+ encoded.ids
30
+ encoded.tokens
31
+ ```
32
+
33
+ Decode
34
+
35
+ ```ruby
36
+ tokenizer.decode(ids)
37
+ ```
38
+
39
+ Load a tokenizer from files
40
+
41
+ ```ruby
42
+ tokenizer = Tokenizers::CharBPETokenizer.new("vocab.json", "merges.txt")
43
+ ```
44
+
45
+ ## History
46
+
47
+ View the [changelog](https://github.com/ankane/tokenizers-ruby/blob/master/CHANGELOG.md)
48
+
49
+ ## Contributing
50
+
51
+ Everyone is encouraged to help improve this project. Here are a few ways you can help:
52
+
53
+ - [Report bugs](https://github.com/ankane/tokenizers-ruby/issues)
54
+ - Fix bugs and [submit pull requests](https://github.com/ankane/tokenizers-ruby/pulls)
55
+ - Write, clarify, or fix documentation
56
+ - Suggest or add new features
57
+
58
+ To get started with development:
59
+
60
+ ```sh
61
+ git clone https://github.com/ankane/tokenizers-ruby.git
62
+ cd tokenizers-ruby
63
+ bundle install
64
+ bundle exec ruby ext/tokenizers/extconf.rb && make
65
+ bundle exec rake download:files
66
+ bundle exec rake test
67
+ ```
@@ -0,0 +1,7 @@
1
+ File.write "Makefile", <<~EOS
2
+ install:
3
+ \tcargo build --release
4
+ \tmv target/release/libtokenizers.#{RbConfig::CONFIG["SOEXT"]} lib/tokenizers/ext.#{RbConfig::CONFIG["DLEXT"]}
5
+ clean:
6
+ \tcargo clean
7
+ EOS
@@ -0,0 +1,19 @@
1
+ module Tokenizers
2
+ class CharBPETokenizer
3
+ def initialize(vocab, merges)
4
+ @tokenizer = Tokenizer.new(BPE.new(vocab, merges))
5
+ @tokenizer.add_special_tokens(["<unk>"])
6
+ @tokenizer.normalizer = BertNormalizer.new
7
+ @tokenizer.pre_tokenizer = BertPreTokenizer.new
8
+ @tokenizer.decoder = BPEDecoder.new
9
+ end
10
+
11
+ def encode(text)
12
+ @tokenizer.encode(text)
13
+ end
14
+
15
+ def decode(ids)
16
+ @tokenizer.decode(ids)
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,3 @@
1
+ module Tokenizers
2
+ VERSION = "0.1.0"
3
+ end
data/lib/tokenizers.rb ADDED
@@ -0,0 +1,14 @@
1
+ # extlib
2
+ require "tokenizers/ext"
3
+
4
+ # modules
5
+ require "tokenizers/char_bpe_tokenizer"
6
+ require "tokenizers/version"
7
+
8
+ module Tokenizers
9
+ class Error < StandardError; end
10
+
11
+ def self.from_pretrained(identifier, revision: "main", auth_token: nil)
12
+ _from_pretrained(identifier, revision, auth_token)
13
+ end
14
+ end
data/src/lib.rs ADDED
@@ -0,0 +1,290 @@
1
+ #[macro_use]
2
+ extern crate rutie;
3
+
4
+ use rutie::{AnyException, AnyObject, Array, Integer, Module, Object, RString, VerifiedObject, VM};
5
+ use tokenizers::decoders::bpe::BPEDecoder;
6
+ use tokenizers::models::bpe::BPE;
7
+ use tokenizers::normalizers::BertNormalizer;
8
+ use tokenizers::pre_tokenizers::bert::BertPreTokenizer;
9
+ use tokenizers::tokenizer::Tokenizer;
10
+ use tokenizers::{decoders, AddedToken, Encoding};
11
+
12
+ pub const VERSION: &str = env!("CARGO_PKG_VERSION");
13
+
14
+ wrappable_struct!(Tokenizer, TokenizerWrapper, TOKENIZER_WRAPPER);
15
+ wrappable_struct!(BPE, BPEWrapper, BPE_WRAPPER);
16
+ wrappable_struct!(Encoding, EncodingWrapper, ENCODING_WRAPPER);
17
+ wrappable_struct!(BPEDecoder, BPEDecoderWrapper, BPE_DECODER_WRAPPER);
18
+ wrappable_struct!(BertPreTokenizer, BertPreTokenizerWrapper, BERT_PRE_TOKENIZER_WRAPPER);
19
+ wrappable_struct!(BertNormalizer, BertNormalizerWrapper, BERT_NORMALIZER_WRAPPER);
20
+
21
+ module!(rbTokenizers);
22
+
23
+ class!(rbBPE);
24
+ class!(rbTokenizer);
25
+ class!(rbEncoding);
26
+ class!(rbBPEDecoder);
27
+ class!(rbBertPreTokenizer);
28
+ class!(rbBertNormalizer);
29
+
30
+ fn unwrap_object<T>(res: Result<T, AnyException>) -> T {
31
+ res.map_err(VM::raise_ex).unwrap()
32
+ }
33
+
34
+ fn unwrap_optional<T>(res: Result<AnyObject, AnyException>) -> Option<T>
35
+ where
36
+ T: VerifiedObject,
37
+ {
38
+ let x = unwrap_object(res);
39
+ if x.is_nil() {
40
+ None
41
+ } else {
42
+ Some(unwrap_object(x.try_convert_to::<T>()))
43
+ }
44
+ }
45
+
46
+ fn handle_error<T>(res: Result<T, Box<dyn std::error::Error + Send + Sync>>) -> T {
47
+ match res {
48
+ Ok(x) => x,
49
+ Err(e) => {
50
+ VM::raise(
51
+ Module::from_existing("Tokenizers").get_nested_class("Error"),
52
+ &e.to_string(),
53
+ );
54
+ unreachable!()
55
+ }
56
+ }
57
+ }
58
+
59
+ methods!(
60
+ rbTokenizers,
61
+ _rtself,
62
+
63
+ fn tokenizers_from_pretrained(identifier: RString, revision: RString, auth_token: AnyObject) -> AnyObject {
64
+ let identifier = unwrap_object(identifier);
65
+ let revision = unwrap_object(revision);
66
+ let auth_token: Option<RString> = unwrap_optional(auth_token);
67
+
68
+ let params = tokenizers::FromPretrainedParameters {
69
+ revision: revision.to_string(),
70
+ auth_token: auth_token.map(|x| x.to_string()),
71
+ user_agent: [("bindings", "Ruby"), ("version", VERSION)]
72
+ .iter()
73
+ .map(|(k, v)| (k.to_string(), v.to_string()))
74
+ .collect(),
75
+ };
76
+
77
+ let tokenizer = handle_error(Tokenizer::from_pretrained(identifier.to_string(), Some(params)));
78
+ Module::from_existing("Tokenizers")
79
+ .get_nested_class("Tokenizer")
80
+ .wrap_data(tokenizer, &*TOKENIZER_WRAPPER)
81
+ }
82
+ );
83
+
84
+ methods!(
85
+ rbBPE,
86
+ _rtself,
87
+
88
+ fn bpe_new(vocab: RString, merges: RString) -> AnyObject {
89
+ let vocab = unwrap_object(vocab);
90
+ let merges = unwrap_object(merges);
91
+
92
+ let bpe = handle_error(BPE::from_file(&vocab.to_string(), &merges.to_string())
93
+ .unk_token("<unk>".into())
94
+ .end_of_word_suffix("</w>".into())
95
+ .build());
96
+
97
+ Module::from_existing("Tokenizers")
98
+ .get_nested_class("BPE")
99
+ .wrap_data(bpe, &*BPE_WRAPPER)
100
+ }
101
+ );
102
+
103
+ methods!(
104
+ rbTokenizer,
105
+ _rtself,
106
+
107
+ fn tokenizer_new(model: AnyObject) -> AnyObject {
108
+ let model = unwrap_object(model);
109
+
110
+ // TODO support any model
111
+ let model = model.get_data(&*BPE_WRAPPER).clone();
112
+
113
+ let mut tokenizer = Tokenizer::new(model);
114
+
115
+ Module::from_existing("Tokenizers")
116
+ .get_nested_class("Tokenizer")
117
+ .wrap_data(tokenizer, &*TOKENIZER_WRAPPER)
118
+ }
119
+ );
120
+
121
+ methods!(
122
+ rbTokenizer,
123
+ rtself,
124
+
125
+ fn tokenizer_add_special_tokens(tokens: Array) -> rbTokenizer {
126
+ let tokenizer = rtself.get_data_mut(&*TOKENIZER_WRAPPER);
127
+ let tokens = unwrap_object(tokens);
128
+
129
+ let mut vec = Vec::new();
130
+ for token in tokens.into_iter() {
131
+ vec.push(AddedToken::from(unwrap_object(token.try_convert_to::<RString>()).to_string(), true));
132
+ }
133
+ tokenizer.add_special_tokens(&vec);
134
+ rtself
135
+ }
136
+
137
+ fn tokenizer_encode(text: RString) -> AnyObject {
138
+ let tokenizer = rtself.get_data(&*TOKENIZER_WRAPPER);
139
+ let text = unwrap_object(text);
140
+
141
+ let encoding = handle_error(tokenizer.encode(text.to_string(), false));
142
+ Module::from_existing("Tokenizers")
143
+ .get_nested_class("Encoding")
144
+ .wrap_data(encoding, &*ENCODING_WRAPPER)
145
+ }
146
+
147
+ fn tokenizer_decode(ids: Array) -> RString {
148
+ let tokenizer = rtself.get_data(&*TOKENIZER_WRAPPER);
149
+ let ids = unwrap_object(ids);
150
+
151
+ let mut vec = Vec::new();
152
+ for item in ids.into_iter() {
153
+ vec.push(unwrap_object(item.try_convert_to::<Integer>()).into());
154
+ }
155
+ let s = handle_error(tokenizer.decode(vec, true));
156
+ RString::new_utf8(&s)
157
+ }
158
+
159
+ fn tokenizer_decoder_set(decoder: AnyObject) -> AnyObject {
160
+ let tokenizer = rtself.get_data_mut(&*TOKENIZER_WRAPPER);
161
+ let decoder = unwrap_object(decoder);
162
+
163
+ tokenizer.with_decoder(decoder.get_data(&*BPE_DECODER_WRAPPER).clone());
164
+ decoder
165
+ }
166
+
167
+ fn tokenizer_pre_tokenizer_set(pre_tokenizer: AnyObject) -> AnyObject {
168
+ let tokenizer = rtself.get_data_mut(&*TOKENIZER_WRAPPER);
169
+ let pre_tokenizer = unwrap_object(pre_tokenizer);
170
+
171
+ tokenizer.with_pre_tokenizer(*pre_tokenizer.get_data(&*BERT_PRE_TOKENIZER_WRAPPER));
172
+ pre_tokenizer
173
+ }
174
+
175
+ fn tokenizer_normalizer_set(normalizer: AnyObject) -> AnyObject {
176
+ let tokenizer = rtself.get_data_mut(&*TOKENIZER_WRAPPER);
177
+ let normalizer = unwrap_object(normalizer);
178
+
179
+ tokenizer.with_normalizer(*normalizer.get_data(&*BERT_NORMALIZER_WRAPPER));
180
+ normalizer
181
+ }
182
+ );
183
+
184
+ methods!(
185
+ rbEncoding,
186
+ rtself,
187
+
188
+ fn encoding_ids() -> Array {
189
+ let encoding = rtself.get_data(&*ENCODING_WRAPPER);
190
+
191
+ let mut array = Array::new();
192
+ for x in encoding.get_ids() {
193
+ array.push(Integer::from(*x));
194
+ }
195
+ array
196
+ }
197
+
198
+ fn encoding_tokens() -> Array {
199
+ let encoding = rtself.get_data(&*ENCODING_WRAPPER);
200
+
201
+ let mut array = Array::new();
202
+ for x in encoding.get_tokens() {
203
+ array.push(RString::new_utf8(x));
204
+ }
205
+ array
206
+ }
207
+ );
208
+
209
+ methods!(
210
+ rbBPEDecoder,
211
+ _rtself,
212
+
213
+ fn bpe_decoder_new() -> AnyObject {
214
+ let decoder = decoders::bpe::BPEDecoder::default();
215
+ Module::from_existing("Tokenizers")
216
+ .get_nested_class("BPEDecoder")
217
+ .wrap_data(decoder, &*BPE_DECODER_WRAPPER)
218
+ }
219
+ );
220
+
221
+ methods!(
222
+ rbBertPreTokenizer,
223
+ _rtself,
224
+
225
+ fn bert_pre_tokenizer_new() -> AnyObject {
226
+ let pre_tokenizer = BertPreTokenizer;
227
+ Module::from_existing("Tokenizers")
228
+ .get_nested_class("BertPreTokenizer")
229
+ .wrap_data(pre_tokenizer, &*BERT_PRE_TOKENIZER_WRAPPER)
230
+ }
231
+ );
232
+
233
+ methods!(
234
+ rbBertNormalizer,
235
+ _rtself,
236
+
237
+ fn bert_normalizer_new() -> AnyObject {
238
+ let normalizer = BertNormalizer::default();
239
+ Module::from_existing("Tokenizers")
240
+ .get_nested_class("BertNormalizer")
241
+ .wrap_data(normalizer, &*BERT_NORMALIZER_WRAPPER)
242
+ }
243
+ );
244
+
245
+ #[allow(non_snake_case)]
246
+ #[no_mangle]
247
+ pub extern "C" fn Init_ext() {
248
+ let mut m = Module::new("Tokenizers");
249
+
250
+ m.define(|klass| {
251
+ klass.def_self("_from_pretrained", tokenizers_from_pretrained);
252
+ klass.define_nested_class("BPE", None);
253
+ klass.define_nested_class("Tokenizer", None);
254
+ klass.define_nested_class("Encoding", None);
255
+ klass.define_nested_class("BPEDecoder", None);
256
+ klass.define_nested_class("BertPreTokenizer", None);
257
+ klass.define_nested_class("BertNormalizer", None);
258
+ });
259
+
260
+ m.get_nested_class("BPE").define(|klass| {
261
+ klass.def_self("new", bpe_new);
262
+ });
263
+
264
+ m.get_nested_class("Tokenizer").define(|klass| {
265
+ klass.def_self("new", tokenizer_new);
266
+ klass.def("add_special_tokens", tokenizer_add_special_tokens);
267
+ klass.def("encode", tokenizer_encode);
268
+ klass.def("decode", tokenizer_decode);
269
+ klass.def("decoder=", tokenizer_decoder_set);
270
+ klass.def("pre_tokenizer=", tokenizer_pre_tokenizer_set);
271
+ klass.def("normalizer=", tokenizer_normalizer_set);
272
+ });
273
+
274
+ m.get_nested_class("Encoding").define(|klass| {
275
+ klass.def("ids", encoding_ids);
276
+ klass.def("tokens", encoding_tokens);
277
+ });
278
+
279
+ m.get_nested_class("BPEDecoder").define(|klass| {
280
+ klass.def_self("new", bpe_decoder_new);
281
+ });
282
+
283
+ m.get_nested_class("BertPreTokenizer").define(|klass| {
284
+ klass.def_self("new", bert_pre_tokenizer_new);
285
+ });
286
+
287
+ m.get_nested_class("BertNormalizer").define(|klass| {
288
+ klass.def_self("new", bert_normalizer_new);
289
+ });
290
+ }
metadata ADDED
@@ -0,0 +1,53 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: tokenizers
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Andrew Kane
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2022-03-19 00:00:00.000000000 Z
12
+ dependencies: []
13
+ description:
14
+ email: andrew@ankane.org
15
+ executables: []
16
+ extensions:
17
+ - ext/tokenizers/extconf.rb
18
+ extra_rdoc_files: []
19
+ files:
20
+ - CHANGELOG.md
21
+ - Cargo.lock
22
+ - Cargo.toml
23
+ - LICENSE.txt
24
+ - README.md
25
+ - ext/tokenizers/extconf.rb
26
+ - lib/tokenizers.rb
27
+ - lib/tokenizers/char_bpe_tokenizer.rb
28
+ - lib/tokenizers/version.rb
29
+ - src/lib.rs
30
+ homepage: https://github.com/ankane/tokenizers-ruby
31
+ licenses:
32
+ - Apache-2.0
33
+ metadata: {}
34
+ post_install_message:
35
+ rdoc_options: []
36
+ require_paths:
37
+ - lib
38
+ required_ruby_version: !ruby/object:Gem::Requirement
39
+ requirements:
40
+ - - ">="
41
+ - !ruby/object:Gem::Version
42
+ version: '2.7'
43
+ required_rubygems_version: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ requirements: []
49
+ rubygems_version: 3.3.7
50
+ signing_key:
51
+ specification_version: 4
52
+ summary: Fast state-of-the-art tokenizers for Ruby
53
+ test_files: []