tokenizers 0.5.1-aarch64-linux-musl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (41) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.md +95 -0
  3. data/Cargo.lock +895 -0
  4. data/Cargo.toml +6 -0
  5. data/LICENSE-THIRD-PARTY.txt +17104 -0
  6. data/LICENSE.txt +202 -0
  7. data/README.md +105 -0
  8. data/lib/tokenizers/3.1/tokenizers.so +0 -0
  9. data/lib/tokenizers/3.2/tokenizers.so +0 -0
  10. data/lib/tokenizers/3.3/tokenizers.so +0 -0
  11. data/lib/tokenizers/char_bpe_tokenizer.rb +22 -0
  12. data/lib/tokenizers/decoders/bpe_decoder.rb +9 -0
  13. data/lib/tokenizers/decoders/ctc.rb +9 -0
  14. data/lib/tokenizers/decoders/metaspace.rb +9 -0
  15. data/lib/tokenizers/decoders/strip.rb +9 -0
  16. data/lib/tokenizers/decoders/word_piece.rb +9 -0
  17. data/lib/tokenizers/encoding.rb +19 -0
  18. data/lib/tokenizers/from_pretrained.rb +125 -0
  19. data/lib/tokenizers/models/bpe.rb +9 -0
  20. data/lib/tokenizers/models/unigram.rb +9 -0
  21. data/lib/tokenizers/models/word_level.rb +13 -0
  22. data/lib/tokenizers/models/word_piece.rb +9 -0
  23. data/lib/tokenizers/normalizers/bert_normalizer.rb +9 -0
  24. data/lib/tokenizers/normalizers/prepend.rb +9 -0
  25. data/lib/tokenizers/normalizers/strip.rb +9 -0
  26. data/lib/tokenizers/pre_tokenizers/byte_level.rb +9 -0
  27. data/lib/tokenizers/pre_tokenizers/digits.rb +9 -0
  28. data/lib/tokenizers/pre_tokenizers/metaspace.rb +9 -0
  29. data/lib/tokenizers/pre_tokenizers/punctuation.rb +9 -0
  30. data/lib/tokenizers/pre_tokenizers/split.rb +9 -0
  31. data/lib/tokenizers/processors/byte_level.rb +9 -0
  32. data/lib/tokenizers/processors/roberta_processing.rb +9 -0
  33. data/lib/tokenizers/processors/template_processing.rb +9 -0
  34. data/lib/tokenizers/tokenizer.rb +45 -0
  35. data/lib/tokenizers/trainers/bpe_trainer.rb +9 -0
  36. data/lib/tokenizers/trainers/unigram_trainer.rb +26 -0
  37. data/lib/tokenizers/trainers/word_level_trainer.rb +9 -0
  38. data/lib/tokenizers/trainers/word_piece_trainer.rb +26 -0
  39. data/lib/tokenizers/version.rb +3 -0
  40. data/lib/tokenizers.rb +61 -0
  41. metadata +84 -0
data/LICENSE.txt ADDED
@@ -0,0 +1,202 @@
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "[]"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright [yyyy] [name of copyright owner]
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
data/README.md ADDED
@@ -0,0 +1,105 @@
1
+ # Tokenizers Ruby
2
+
3
+ :slightly_smiling_face: Fast state-of-the-art [tokenizers](https://github.com/huggingface/tokenizers) for Ruby
4
+
5
+ [![Build Status](https://github.com/ankane/tokenizers-ruby/actions/workflows/build.yml/badge.svg)](https://github.com/ankane/tokenizers-ruby/actions)
6
+
7
+ ## Installation
8
+
9
+ Add this line to your application’s Gemfile:
10
+
11
+ ```ruby
12
+ gem "tokenizers"
13
+ ```
14
+
15
+ ## Getting Started
16
+
17
+ Load a pretrained tokenizer
18
+
19
+ ```ruby
20
+ tokenizer = Tokenizers.from_pretrained("bert-base-cased")
21
+ ```
22
+
23
+ Encode
24
+
25
+ ```ruby
26
+ encoded = tokenizer.encode("I can feel the magic, can you?")
27
+ encoded.tokens
28
+ encoded.ids
29
+ ```
30
+
31
+ Decode
32
+
33
+ ```ruby
34
+ tokenizer.decode(ids)
35
+ ```
36
+
37
+ ## Training
38
+
39
+ Create a tokenizer
40
+
41
+ ```ruby
42
+ tokenizer = Tokenizers::Tokenizer.new(Tokenizers::Models::BPE.new(unk_token: "[UNK]"))
43
+ ```
44
+
45
+ Set the pre-tokenizer
46
+
47
+ ```ruby
48
+ tokenizer.pre_tokenizer = Tokenizers::PreTokenizers::Whitespace.new
49
+ ```
50
+
51
+ Train the tokenizer ([example data](https://huggingface.co/docs/tokenizers/quicktour#build-a-tokenizer-from-scratch))
52
+
53
+ ```ruby
54
+ trainer = Tokenizers::Trainers::BpeTrainer.new(special_tokens: ["[UNK]", "[CLS]", "[SEP]", "[PAD]", "[MASK]"])
55
+ tokenizer.train(["wiki.train.raw", "wiki.valid.raw", "wiki.test.raw"], trainer)
56
+ ```
57
+
58
+ Encode
59
+
60
+ ```ruby
61
+ output = tokenizer.encode("Hello, y'all! How are you 😁 ?")
62
+ output.tokens
63
+ ```
64
+
65
+ Save the tokenizer to a file
66
+
67
+ ```ruby
68
+ tokenizer.save("tokenizer.json")
69
+ ```
70
+
71
+ Load a tokenizer from a file
72
+
73
+ ```ruby
74
+ tokenizer = Tokenizers.from_file("tokenizer.json")
75
+ ```
76
+
77
+ Check out the [Quicktour](https://huggingface.co/docs/tokenizers/quicktour) and equivalent [Ruby code](https://github.com/ankane/tokenizers-ruby/blob/master/test/quicktour_test.rb#L8) for more info
78
+
79
+ ## API
80
+
81
+ This library follows the [Tokenizers Python API](https://huggingface.co/docs/tokenizers/index). You can follow Python tutorials and convert the code to Ruby in many cases. Feel free to open an issue if you run into problems.
82
+
83
+ ## History
84
+
85
+ View the [changelog](https://github.com/ankane/tokenizers-ruby/blob/master/CHANGELOG.md)
86
+
87
+ ## Contributing
88
+
89
+ Everyone is encouraged to help improve this project. Here are a few ways you can help:
90
+
91
+ - [Report bugs](https://github.com/ankane/tokenizers-ruby/issues)
92
+ - Fix bugs and [submit pull requests](https://github.com/ankane/tokenizers-ruby/pulls)
93
+ - Write, clarify, or fix documentation
94
+ - Suggest or add new features
95
+
96
+ To get started with development:
97
+
98
+ ```sh
99
+ git clone https://github.com/ankane/tokenizers-ruby.git
100
+ cd tokenizers-ruby
101
+ bundle install
102
+ bundle exec rake compile
103
+ bundle exec rake download:files
104
+ bundle exec rake test
105
+ ```
Binary file
Binary file
Binary file
@@ -0,0 +1,22 @@
1
+ module Tokenizers
2
+ class CharBPETokenizer
3
+ def initialize(vocab, merges, unk_token: "<unk>", suffix: "</w>")
4
+ @tokenizer =
5
+ Tokenizer.new(
6
+ Models::BPE._from_file(vocab, merges, {unk_token: unk_token, end_of_word_suffix: suffix})
7
+ )
8
+ @tokenizer.add_special_tokens([unk_token])
9
+ @tokenizer.normalizer = Normalizers::BertNormalizer.new
10
+ @tokenizer.pre_tokenizer = PreTokenizers::BertPreTokenizer.new
11
+ @tokenizer.decoder = Decoders::BPEDecoder.new
12
+ end
13
+
14
+ def encode(text, **options)
15
+ @tokenizer.encode(text, **options)
16
+ end
17
+
18
+ def decode(ids)
19
+ @tokenizer.decode(ids)
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Decoders
3
+ class BPEDecoder
4
+ def self.new(suffix: "</w>")
5
+ _new(suffix)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Decoders
3
+ class CTC
4
+ def self.new(pad_token: "<pad>", word_delimiter_token: "|", cleanup: true)
5
+ _new(pad_token, word_delimiter_token, cleanup)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Decoders
3
+ class Metaspace
4
+ def self.new(replacement: "\u2581", prepend_scheme: "always", split: true)
5
+ _new(replacement, prepend_scheme, split)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Decoders
3
+ class Strip
4
+ def self.new(content: " ", start: 0, stop: 0)
5
+ _new(content, start, stop)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Decoders
3
+ class WordPiece
4
+ def self.new(prefix: '##', cleanup: true)
5
+ _new(prefix, cleanup)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,19 @@
1
+ module Tokenizers
2
+ class Encoding
3
+ def word_to_tokens(word_index, sequence_index = 0)
4
+ _word_to_tokens(word_index, sequence_index)
5
+ end
6
+
7
+ def word_to_chars(word_index, sequence_index = 0)
8
+ _word_to_chars(word_index, sequence_index)
9
+ end
10
+
11
+ def char_to_token(char_pos, sequence_index = 0)
12
+ _char_to_token(char_pos, sequence_index)
13
+ end
14
+
15
+ def char_to_word(char_pos, sequence_index = 0)
16
+ _char_to_word(word_index, sequence_index)
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,125 @@
1
+ module Tokenizers
2
+ module FromPretrained
3
+ # for user agent
4
+ TOKENIZERS_VERSION = "0.20.0"
5
+
6
+ # use Ruby for downloads
7
+ # this avoids the need to vendor OpenSSL on Linux
8
+ # and reduces the extension size by about half
9
+ def from_pretrained(identifier, revision: "main", auth_token: nil)
10
+ require "cgi"
11
+ require "digest"
12
+ require "fileutils"
13
+ require "json"
14
+ require "net/http"
15
+ require "open-uri"
16
+
17
+ cache_dir = ensure_cache_dir
18
+
19
+ options = {
20
+ open_timeout: 3,
21
+ read_timeout: 30
22
+ }
23
+ headers = {
24
+ "User-Agent" => "tokenizers/#{TOKENIZERS_VERSION}; bindings/Ruby; version/#{VERSION}"
25
+ }
26
+ if auth_token
27
+ headers["Authorization"] = "Bearer #{auth_token}"
28
+ end
29
+
30
+ url = "https://huggingface.co/%s/resolve/%s/tokenizer.json" % [identifier, revision].map { |v| CGI.escape(v) }
31
+
32
+ path =
33
+ begin
34
+ cached_path(cache_dir, url, headers, options)
35
+ rescue OpenURI::HTTPError
36
+ raise Error, "Model \"#{identifier}\" on the Hub doesn't have a tokenizer"
37
+ end
38
+
39
+ from_file(path)
40
+ end
41
+
42
+ private
43
+
44
+ # use same storage format as Rust version
45
+ # https://github.com/epwalsh/rust-cached-path
46
+ def cached_path(cache_dir, url, headers, options)
47
+ fsum = Digest::SHA256.hexdigest(url)
48
+ meta_paths = Dir[File.join(cache_dir, "#{fsum}.*.meta")]
49
+ meta = meta_paths.map { |f| JSON.parse(File.read(f)) }.max_by { |m| m["creation_time"] }
50
+ etag = meta["etag"] if meta
51
+
52
+ if etag
53
+ esum = Digest::SHA256.hexdigest(etag)
54
+ resource_path = File.join(cache_dir, "#{fsum}.#{esum}")
55
+ if File.exist?(resource_path)
56
+ uri = URI(url)
57
+ req = Net::HTTP::Head.new(uri)
58
+ headers.each do |k, v|
59
+ req[k] = v
60
+ end
61
+ res = Net::HTTP.start(uri.hostname, uri.port, options.merge(use_ssl: true)) do |http|
62
+ http.request(req)
63
+ end
64
+ if res["etag"] == etag
65
+ return resource_path
66
+ end
67
+ end
68
+ end
69
+
70
+ options[:content_length_proc] = ->(_) { puts "Downloading..." }
71
+
72
+ # string options are headers
73
+ tempfile = URI.parse(url).open(headers.merge(options))
74
+
75
+ etag = tempfile.meta["etag"]
76
+ esum = Digest::SHA256.hexdigest(etag)
77
+ resource_path = File.join(cache_dir, "#{fsum}.#{esum}")
78
+ meta_path = "#{resource_path}.meta"
79
+
80
+ meta = {
81
+ resource: url,
82
+ resource_path: resource_path,
83
+ meta_path: meta_path,
84
+ etag: etag,
85
+ expires: nil,
86
+ creation_time: Time.now.to_f
87
+ }
88
+
89
+ File.write("#{resource_path}.lock", "")
90
+ File.open(resource_path, "wb") { |f| IO.copy_stream(tempfile, f) }
91
+ File.write(meta_path, JSON.generate(meta))
92
+
93
+ resource_path
94
+ end
95
+
96
+ def cache_dir
97
+ if ENV["TOKENIZERS_CACHE"]
98
+ ENV["TOKENIZERS_CACHE"]
99
+ else
100
+ # use same directory as Rust version
101
+ # https://docs.rs/dirs/latest/dirs/fn.cache_dir.html
102
+ dir =
103
+ if Gem.win_platform?
104
+ ENV.fetch("LOCALAPPDATA")
105
+ elsif mac?
106
+ File.join(ENV.fetch("HOME"), "Library", "Caches")
107
+ else
108
+ ENV["XDG_CACHE_HOME"] || File.join(ENV.fetch("HOME"), ".cache")
109
+ end
110
+
111
+ File.join(dir, "huggingface", "tokenizers")
112
+ end
113
+ end
114
+
115
+ def ensure_cache_dir
116
+ dir = cache_dir
117
+ FileUtils.mkdir_p(dir)
118
+ dir
119
+ end
120
+
121
+ def mac?
122
+ RbConfig::CONFIG["host_os"] =~ /darwin/i
123
+ end
124
+ end
125
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Models
3
+ class BPE
4
+ def self.new(vocab: nil, merges: nil, **kwargs)
5
+ _new(vocab, merges, kwargs)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Models
3
+ class Unigram
4
+ def self.new(vocab: nil, unk_id: nil, byte_fallback: nil)
5
+ _new(vocab, unk_id, byte_fallback)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,13 @@
1
+ module Tokenizers
2
+ module Models
3
+ class WordLevel
4
+ def self.new(vocab: nil, unk_token: nil)
5
+ _new(vocab, unk_token)
6
+ end
7
+
8
+ def self.from_file(vocab, unk_token: nil)
9
+ _from_file(vocab, unk_token)
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Models
3
+ class WordPiece
4
+ def self.new(vocab: nil, **kwargs)
5
+ _new(vocab, kwargs)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Normalizers
3
+ class BertNormalizer
4
+ def self.new(clean_text: true, handle_chinese_chars: true, strip_accents: nil, lowercase: true)
5
+ _new(clean_text, handle_chinese_chars, strip_accents, lowercase)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Normalizers
3
+ class Prepend
4
+ def self.new(prepend: "▁")
5
+ _new(prepend)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Normalizers
3
+ class Strip
4
+ def self.new(left: true, right: true)
5
+ _new(left, right)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module PreTokenizers
3
+ class ByteLevel
4
+ def self.new(add_prefix_space: true, use_regex: true)
5
+ _new(add_prefix_space, use_regex)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module PreTokenizers
3
+ class Digits
4
+ def self.new(individual_digits: false)
5
+ _new(individual_digits)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module PreTokenizers
3
+ class Metaspace
4
+ def self.new(replacement: "\u2581", prepend_scheme: "always", split: true)
5
+ _new(replacement, prepend_scheme, split)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module PreTokenizers
3
+ class Punctuation
4
+ def self.new(behavior: "isolated")
5
+ _new(behavior)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module PreTokenizers
3
+ class Split
4
+ def self.new(pattern, behavior, invert: false)
5
+ _new(pattern, behavior, invert)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,9 @@
1
+ module Tokenizers
2
+ module Processors
3
+ class ByteLevel
4
+ def self.new(trim_offsets: true)
5
+ _new(trim_offsets)
6
+ end
7
+ end
8
+ end
9
+ end