ommlds-rs 0.0.0.dev473__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ommlds-rs might be problematic. Click here for more details.

@@ -0,0 +1,21 @@
1
+ Copyright 2023- wrmsr
2
+
3
+ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
4
+ following conditions are met:
5
+
6
+ 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
7
+ disclaimer.
8
+
9
+ 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
10
+ disclaimer in the documentation and/or other materials provided with the distribution.
11
+
12
+ 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
13
+ derived from this software without specific prior written permission.
14
+
15
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
16
+ INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
17
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
18
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
19
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
20
+ WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
21
+ THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1,30 @@
1
+ Metadata-Version: 2.4
2
+ Name: ommlds-rs
3
+ Version: 0.0.0.dev473
4
+ Summary: ommlds
5
+ Author: wrmsr
6
+ License-Expression: BSD-3-Clause
7
+ Project-URL: source, https://github.com/wrmsr/omlish
8
+ Classifier: Development Status :: 2 - Pre-Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: Operating System :: POSIX
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.13
14
+ Requires-Python: >=3.13
15
+ Description-Content-Type: text/markdown
16
+ License-File: LICENSE
17
+ Requires-Dist: ommlds==0.0.0.dev473
18
+ Dynamic: license-file
19
+
20
+ # Overview
21
+
22
+ ML / AI code.
23
+
24
+ # Notable packages
25
+
26
+ - **[cli](https://github.com/wrmsr/omlish/blob/master/ommlds/cli)** (cli: `om mc`) - A general purpose ai cli, inspired
27
+ and in the spirit of [simonw's](https://github.com/simonw/llm) and others.
28
+
29
+ - **[minichain](https://github.com/wrmsr/omlish/blob/master/ommlds/minichain)** - *A thing that does the things
30
+ langchain people use langchain to do.*
@@ -0,0 +1,11 @@
1
+ # Overview
2
+
3
+ ML / AI code.
4
+
5
+ # Notable packages
6
+
7
+ - **[cli](https://github.com/wrmsr/omlish/blob/master/ommlds/cli)** (cli: `om mc`) - A general purpose ai cli, inspired
8
+ and in the spirit of [simonw's](https://github.com/simonw/llm) and others.
9
+
10
+ - **[minichain](https://github.com/wrmsr/omlish/blob/master/ommlds/minichain)** - *A thing that does the things
11
+ langchain people use langchain to do.*
@@ -0,0 +1,458 @@
1
+ # This file is automatically @generated by Cargo.
2
+ # It is not intended for manual editing.
3
+ version = 4
4
+
5
+ [[package]]
6
+ name = "ahash"
7
+ version = "0.8.12"
8
+ source = "registry+https://github.com/rust-lang/crates.io-index"
9
+ checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
10
+ dependencies = [
11
+ "cfg-if",
12
+ "getrandom",
13
+ "once_cell",
14
+ "version_check",
15
+ "zerocopy",
16
+ ]
17
+
18
+ [[package]]
19
+ name = "aho-corasick"
20
+ version = "1.1.3"
21
+ source = "registry+https://github.com/rust-lang/crates.io-index"
22
+ checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
23
+ dependencies = [
24
+ "memchr",
25
+ ]
26
+
27
+ [[package]]
28
+ name = "arc-swap"
29
+ version = "1.7.1"
30
+ source = "registry+https://github.com/rust-lang/crates.io-index"
31
+ checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
32
+
33
+ [[package]]
34
+ name = "autocfg"
35
+ version = "1.5.0"
36
+ source = "registry+https://github.com/rust-lang/crates.io-index"
37
+ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
38
+
39
+ [[package]]
40
+ name = "bit-set"
41
+ version = "0.8.0"
42
+ source = "registry+https://github.com/rust-lang/crates.io-index"
43
+ checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
44
+ dependencies = [
45
+ "bit-vec",
46
+ ]
47
+
48
+ [[package]]
49
+ name = "bit-vec"
50
+ version = "0.8.0"
51
+ source = "registry+https://github.com/rust-lang/crates.io-index"
52
+ checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
53
+
54
+ [[package]]
55
+ name = "castaway"
56
+ version = "0.2.4"
57
+ source = "registry+https://github.com/rust-lang/crates.io-index"
58
+ checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a"
59
+ dependencies = [
60
+ "rustversion",
61
+ ]
62
+
63
+ [[package]]
64
+ name = "cfg-if"
65
+ version = "1.0.3"
66
+ source = "registry+https://github.com/rust-lang/crates.io-index"
67
+ checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9"
68
+
69
+ [[package]]
70
+ name = "compact_str"
71
+ version = "0.9.0"
72
+ source = "registry+https://github.com/rust-lang/crates.io-index"
73
+ checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a"
74
+ dependencies = [
75
+ "castaway",
76
+ "cfg-if",
77
+ "itoa",
78
+ "rustversion",
79
+ "ryu",
80
+ "static_assertions",
81
+ ]
82
+
83
+ [[package]]
84
+ name = "crossbeam-deque"
85
+ version = "0.8.6"
86
+ source = "registry+https://github.com/rust-lang/crates.io-index"
87
+ checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
88
+ dependencies = [
89
+ "crossbeam-epoch",
90
+ "crossbeam-utils",
91
+ ]
92
+
93
+ [[package]]
94
+ name = "crossbeam-epoch"
95
+ version = "0.9.18"
96
+ source = "registry+https://github.com/rust-lang/crates.io-index"
97
+ checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e"
98
+ dependencies = [
99
+ "crossbeam-utils",
100
+ ]
101
+
102
+ [[package]]
103
+ name = "crossbeam-utils"
104
+ version = "0.8.21"
105
+ source = "registry+https://github.com/rust-lang/crates.io-index"
106
+ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
107
+
108
+ [[package]]
109
+ name = "dary_heap"
110
+ version = "0.3.7"
111
+ source = "registry+https://github.com/rust-lang/crates.io-index"
112
+ checksum = "04d2cd9c18b9f454ed67da600630b021a8a80bf33f8c95896ab33aaf1c26b728"
113
+
114
+ [[package]]
115
+ name = "either"
116
+ version = "1.15.0"
117
+ source = "registry+https://github.com/rust-lang/crates.io-index"
118
+ checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
119
+
120
+ [[package]]
121
+ name = "equivalent"
122
+ version = "1.0.2"
123
+ source = "registry+https://github.com/rust-lang/crates.io-index"
124
+ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
125
+
126
+ [[package]]
127
+ name = "fancy-regex"
128
+ version = "0.16.1"
129
+ source = "registry+https://github.com/rust-lang/crates.io-index"
130
+ checksum = "bf04c5ec15464ace8355a7b440a33aece288993475556d461154d7a62ad9947c"
131
+ dependencies = [
132
+ "bit-set",
133
+ "regex-automata",
134
+ "regex-syntax",
135
+ ]
136
+
137
+ [[package]]
138
+ name = "getrandom"
139
+ version = "0.3.3"
140
+ source = "registry+https://github.com/rust-lang/crates.io-index"
141
+ checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
142
+ dependencies = [
143
+ "cfg-if",
144
+ "libc",
145
+ "r-efi",
146
+ "wasi",
147
+ ]
148
+
149
+ [[package]]
150
+ name = "hashbrown"
151
+ version = "0.15.5"
152
+ source = "registry+https://github.com/rust-lang/crates.io-index"
153
+ checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
154
+
155
+ [[package]]
156
+ name = "heck"
157
+ version = "0.5.0"
158
+ source = "registry+https://github.com/rust-lang/crates.io-index"
159
+ checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
160
+
161
+ [[package]]
162
+ name = "indexmap"
163
+ version = "2.11.0"
164
+ source = "registry+https://github.com/rust-lang/crates.io-index"
165
+ checksum = "f2481980430f9f78649238835720ddccc57e52df14ffce1c6f37391d61b563e9"
166
+ dependencies = [
167
+ "equivalent",
168
+ "hashbrown",
169
+ ]
170
+
171
+ [[package]]
172
+ name = "indoc"
173
+ version = "2.0.6"
174
+ source = "registry+https://github.com/rust-lang/crates.io-index"
175
+ checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
176
+
177
+ [[package]]
178
+ name = "itoa"
179
+ version = "1.0.15"
180
+ source = "registry+https://github.com/rust-lang/crates.io-index"
181
+ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
182
+
183
+ [[package]]
184
+ name = "libc"
185
+ version = "0.2.175"
186
+ source = "registry+https://github.com/rust-lang/crates.io-index"
187
+ checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543"
188
+
189
+ [[package]]
190
+ name = "log"
191
+ version = "0.4.28"
192
+ source = "registry+https://github.com/rust-lang/crates.io-index"
193
+ checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
194
+
195
+ [[package]]
196
+ name = "memchr"
197
+ version = "2.7.5"
198
+ source = "registry+https://github.com/rust-lang/crates.io-index"
199
+ checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
200
+
201
+ [[package]]
202
+ name = "memoffset"
203
+ version = "0.9.1"
204
+ source = "registry+https://github.com/rust-lang/crates.io-index"
205
+ checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a"
206
+ dependencies = [
207
+ "autocfg",
208
+ ]
209
+
210
+ [[package]]
211
+ name = "once_cell"
212
+ version = "1.21.3"
213
+ source = "registry+https://github.com/rust-lang/crates.io-index"
214
+ checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
215
+
216
+ [[package]]
217
+ name = "portable-atomic"
218
+ version = "1.11.1"
219
+ source = "registry+https://github.com/rust-lang/crates.io-index"
220
+ checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
221
+
222
+ [[package]]
223
+ name = "proc-macro2"
224
+ version = "1.0.101"
225
+ source = "registry+https://github.com/rust-lang/crates.io-index"
226
+ checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
227
+ dependencies = [
228
+ "unicode-ident",
229
+ ]
230
+
231
+ [[package]]
232
+ name = "pyo3"
233
+ version = "0.23.5"
234
+ source = "registry+https://github.com/rust-lang/crates.io-index"
235
+ checksum = "7778bffd85cf38175ac1f545509665d0b9b92a198ca7941f131f85f7a4f9a872"
236
+ dependencies = [
237
+ "cfg-if",
238
+ "indoc",
239
+ "libc",
240
+ "memoffset",
241
+ "once_cell",
242
+ "portable-atomic",
243
+ "pyo3-build-config",
244
+ "pyo3-ffi",
245
+ "pyo3-macros",
246
+ "unindent",
247
+ ]
248
+
249
+ [[package]]
250
+ name = "pyo3-build-config"
251
+ version = "0.23.5"
252
+ source = "registry+https://github.com/rust-lang/crates.io-index"
253
+ checksum = "94f6cbe86ef3bf18998d9df6e0f3fc1050a8c5efa409bf712e661a4366e010fb"
254
+ dependencies = [
255
+ "once_cell",
256
+ "target-lexicon",
257
+ ]
258
+
259
+ [[package]]
260
+ name = "pyo3-ffi"
261
+ version = "0.23.5"
262
+ source = "registry+https://github.com/rust-lang/crates.io-index"
263
+ checksum = "e9f1b4c431c0bb1c8fb0a338709859eed0d030ff6daa34368d3b152a63dfdd8d"
264
+ dependencies = [
265
+ "libc",
266
+ "pyo3-build-config",
267
+ ]
268
+
269
+ [[package]]
270
+ name = "pyo3-log"
271
+ version = "0.12.4"
272
+ source = "registry+https://github.com/rust-lang/crates.io-index"
273
+ checksum = "45192e5e4a4d2505587e27806c7b710c231c40c56f3bfc19535d0bb25df52264"
274
+ dependencies = [
275
+ "arc-swap",
276
+ "log",
277
+ "pyo3",
278
+ ]
279
+
280
+ [[package]]
281
+ name = "pyo3-macros"
282
+ version = "0.23.5"
283
+ source = "registry+https://github.com/rust-lang/crates.io-index"
284
+ checksum = "fbc2201328f63c4710f68abdf653c89d8dbc2858b88c5d88b0ff38a75288a9da"
285
+ dependencies = [
286
+ "proc-macro2",
287
+ "pyo3-macros-backend",
288
+ "quote",
289
+ "syn",
290
+ ]
291
+
292
+ [[package]]
293
+ name = "pyo3-macros-backend"
294
+ version = "0.23.5"
295
+ source = "registry+https://github.com/rust-lang/crates.io-index"
296
+ checksum = "fca6726ad0f3da9c9de093d6f116a93c1a38e417ed73bf138472cf4064f72028"
297
+ dependencies = [
298
+ "heck",
299
+ "proc-macro2",
300
+ "pyo3-build-config",
301
+ "quote",
302
+ "syn",
303
+ ]
304
+
305
+ [[package]]
306
+ name = "quote"
307
+ version = "1.0.40"
308
+ source = "registry+https://github.com/rust-lang/crates.io-index"
309
+ checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
310
+ dependencies = [
311
+ "proc-macro2",
312
+ ]
313
+
314
+ [[package]]
315
+ name = "r-efi"
316
+ version = "5.3.0"
317
+ source = "registry+https://github.com/rust-lang/crates.io-index"
318
+ checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
319
+
320
+ [[package]]
321
+ name = "rayon"
322
+ version = "1.11.0"
323
+ source = "registry+https://github.com/rust-lang/crates.io-index"
324
+ checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f"
325
+ dependencies = [
326
+ "either",
327
+ "rayon-core",
328
+ ]
329
+
330
+ [[package]]
331
+ name = "rayon-core"
332
+ version = "1.13.0"
333
+ source = "registry+https://github.com/rust-lang/crates.io-index"
334
+ checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91"
335
+ dependencies = [
336
+ "crossbeam-deque",
337
+ "crossbeam-utils",
338
+ ]
339
+
340
+ [[package]]
341
+ name = "regex-automata"
342
+ version = "0.4.10"
343
+ source = "registry+https://github.com/rust-lang/crates.io-index"
344
+ checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6"
345
+ dependencies = [
346
+ "aho-corasick",
347
+ "memchr",
348
+ "regex-syntax",
349
+ ]
350
+
351
+ [[package]]
352
+ name = "regex-syntax"
353
+ version = "0.8.6"
354
+ source = "registry+https://github.com/rust-lang/crates.io-index"
355
+ checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001"
356
+
357
+ [[package]]
358
+ name = "rustbpe"
359
+ version = "0.1.0"
360
+ dependencies = [
361
+ "ahash",
362
+ "compact_str",
363
+ "dary_heap",
364
+ "fancy-regex",
365
+ "indexmap",
366
+ "log",
367
+ "pyo3",
368
+ "pyo3-log",
369
+ "rayon",
370
+ ]
371
+
372
+ [[package]]
373
+ name = "rustversion"
374
+ version = "1.0.22"
375
+ source = "registry+https://github.com/rust-lang/crates.io-index"
376
+ checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
377
+
378
+ [[package]]
379
+ name = "ryu"
380
+ version = "1.0.20"
381
+ source = "registry+https://github.com/rust-lang/crates.io-index"
382
+ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
383
+
384
+ [[package]]
385
+ name = "static_assertions"
386
+ version = "1.1.0"
387
+ source = "registry+https://github.com/rust-lang/crates.io-index"
388
+ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
389
+
390
+ [[package]]
391
+ name = "syn"
392
+ version = "2.0.106"
393
+ source = "registry+https://github.com/rust-lang/crates.io-index"
394
+ checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
395
+ dependencies = [
396
+ "proc-macro2",
397
+ "quote",
398
+ "unicode-ident",
399
+ ]
400
+
401
+ [[package]]
402
+ name = "target-lexicon"
403
+ version = "0.12.16"
404
+ source = "registry+https://github.com/rust-lang/crates.io-index"
405
+ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
406
+
407
+ [[package]]
408
+ name = "unicode-ident"
409
+ version = "1.0.18"
410
+ source = "registry+https://github.com/rust-lang/crates.io-index"
411
+ checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
412
+
413
+ [[package]]
414
+ name = "unindent"
415
+ version = "0.2.4"
416
+ source = "registry+https://github.com/rust-lang/crates.io-index"
417
+ checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3"
418
+
419
+ [[package]]
420
+ name = "version_check"
421
+ version = "0.9.5"
422
+ source = "registry+https://github.com/rust-lang/crates.io-index"
423
+ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
424
+
425
+ [[package]]
426
+ name = "wasi"
427
+ version = "0.14.4+wasi-0.2.4"
428
+ source = "registry+https://github.com/rust-lang/crates.io-index"
429
+ checksum = "88a5f4a424faf49c3c2c344f166f0662341d470ea185e939657aaff130f0ec4a"
430
+ dependencies = [
431
+ "wit-bindgen",
432
+ ]
433
+
434
+ [[package]]
435
+ name = "wit-bindgen"
436
+ version = "0.45.1"
437
+ source = "registry+https://github.com/rust-lang/crates.io-index"
438
+ checksum = "5c573471f125075647d03df72e026074b7203790d41351cd6edc96f46bcccd36"
439
+
440
+ [[package]]
441
+ name = "zerocopy"
442
+ version = "0.8.26"
443
+ source = "registry+https://github.com/rust-lang/crates.io-index"
444
+ checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f"
445
+ dependencies = [
446
+ "zerocopy-derive",
447
+ ]
448
+
449
+ [[package]]
450
+ name = "zerocopy-derive"
451
+ version = "0.8.26"
452
+ source = "registry+https://github.com/rust-lang/crates.io-index"
453
+ checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181"
454
+ dependencies = [
455
+ "proc-macro2",
456
+ "quote",
457
+ "syn",
458
+ ]
@@ -0,0 +1,15 @@
1
+ [package]
2
+ name = "rustbpe"
3
+ version = "0.1.0"
4
+ edition = "2024"
5
+
6
+ [dependencies]
7
+ dary_heap = "0.3"
8
+ indexmap = "2.2"
9
+ fancy-regex = "0.16.1"
10
+ log = "0.4.28"
11
+ pyo3 = { version = "0.23.3", features = ["extension-module"] }
12
+ pyo3-log = "0.12.4"
13
+ ahash = "0.8.12"
14
+ rayon = "1.11.0"
15
+ compact_str = "0.9.0"
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Andrej Karpathy
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,9 @@
1
+ # https://github.com/karpathy/nanochat/tree/9467d83cf23dcc9a9b4ca6e35103142f48a55b27
2
+
3
+ ---
4
+
5
+ # rustbpe
6
+
7
+ > The missing tiktoken training code
8
+
9
+ A very lightweight Rust library for training a GPT tokenizer. The issue is that the inference library [tiktoken](https://github.com/openai/tiktoken) is great, but only does inference. Separately, the huggingface [tokenizers](https://github.com/huggingface/tokenizers) library does training, but it is rather bloated and really hard to navigate because it has to support all the different historical baggage of how people dealt with tokenizers over the years. More recently, I also wrote the [minbpe](https://github.com/karpathy/minbpe) library which does both training and inference, but only in inefficient Python. Basically what I really want is a non-fancy, super simple, but still relatively efficient training code for GPT tokenizer (more efficient than minbpe, much cleaner/simpler than tokenizers), and then export the trained vocab for inference with tiktoken. Does that make sense? So here we are. There are more opportunities for optimization here, I just stopped a bit early because unlike minbpe before it, rustbpe is now simple and fast enough, and not a significant bottleneck for nanochat.
@@ -0,0 +1,475 @@
1
+ use std::cmp::Ordering;
2
+ use std::collections::HashMap as StdHashMap;
3
+
4
+ use dary_heap::OctonaryHeap;
5
+ use fancy_regex::Regex;
6
+ use pyo3::prelude::*;
7
+
8
+ use ahash::{AHashMap, AHashSet};
9
+ use compact_str::CompactString;
10
+ use rayon::prelude::*;
11
+
12
+ // Default GPT-4 style regex pattern for splitting text
13
+ const GPT4_PATTERN: &str = r"'(?i:[sdmt]|ll|ve|re)|[^\r\n\p{L}\p{N}]?+\p{L}+|\p{N}{1,3}| ?[^\s\p{L}\p{N}]++[\r\n]*|\s*[\r\n]|\s+(?!\S)|\s+";
14
+
15
+ type Pair = (u32, u32);
16
+
17
+ /// A Byte Pair Encoding tokenizer that matches the GPT-4 style implementation
18
+ #[pyclass]
19
+ pub struct Tokenizer {
20
+ /// Maps pairs of token IDs to their merged token ID
21
+ pub merges: StdHashMap<Pair, u32>,
22
+ /// The regex pattern used for text splitting
23
+ pub pattern: String,
24
+ /// Compiled regex for efficiency
25
+ compiled_pattern: Regex,
26
+ }
27
+
28
+ // ------------------------ internal helpers ------------------------
29
+
30
+ #[derive(Clone, Debug)]
31
+ struct Word {
32
+ ids: Vec<u32>,
33
+ }
34
+
35
+ impl Word {
36
+ #[inline]
37
+ fn new(ids: Vec<u32>) -> Self {
38
+ Self { ids }
39
+ }
40
+
41
+ #[inline]
42
+ fn pairs<'a>(&'a self) -> impl Iterator<Item = Pair> + 'a {
43
+ self.ids.windows(2).map(|w| (w[0], w[1]))
44
+ }
45
+
46
+ /// Merge all non-overlapping occurrences of pair -> new_id.
47
+ /// Returns a small Vec of local pair-count deltas for THIS word only:
48
+ /// -1 for removed pairs, +1 for newly created pairs.
49
+ ///
50
+ /// NOTE: this version deliberately avoids a HashMap in the hot loop.
51
+ fn merge_pair(&mut self, pair: Pair, new_id: u32) -> Vec<(Pair, i32)> {
52
+ let (a, b) = pair;
53
+ let n = self.ids.len();
54
+ if n < 2 {
55
+ return Vec::new();
56
+ }
57
+
58
+ let mut out: Vec<u32> = Vec::with_capacity(n);
59
+ let mut deltas: Vec<(Pair, i32)> = Vec::with_capacity(6);
60
+
61
+ let mut i = 0;
62
+ while i < n {
63
+ if i + 1 < n && self.ids[i] == a && self.ids[i + 1] == b {
64
+ let left = out.last().copied();
65
+ let right = if i + 2 < n { Some(self.ids[i + 2]) } else { None };
66
+
67
+ // remove old pairs
68
+ if let Some(x) = left {
69
+ deltas.push(((x, a), -1));
70
+ deltas.push(((x, new_id), 1));
71
+ }
72
+ deltas.push(((a, b), -1));
73
+ if let Some(y) = right {
74
+ deltas.push(((b, y), -1));
75
+ deltas.push(((new_id, y), 1));
76
+ }
77
+
78
+ // write merged token
79
+ out.push(new_id);
80
+ i += 2; // skip 'a' and 'b'
81
+ } else {
82
+ out.push(self.ids[i]);
83
+ i += 1;
84
+ }
85
+ }
86
+
87
+ self.ids = out;
88
+ deltas
89
+ }
90
+ }
91
+
92
+ #[derive(Debug, Eq)]
93
+ struct MergeJob {
94
+ pair: Pair,
95
+ count: u64,
96
+ /// set of word indices where this pair may occur and needs processing
97
+ pos: AHashSet<usize>,
98
+ }
99
+
100
+ impl PartialEq for MergeJob {
101
+ fn eq(&self, other: &Self) -> bool {
102
+ self.count == other.count && self.pair == other.pair
103
+ }
104
+ }
105
+
106
+ impl PartialOrd for MergeJob {
107
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
108
+ Some(self.cmp(other))
109
+ }
110
+ }
111
+
112
+ impl Ord for MergeJob {
113
+ fn cmp(&self, other: &Self) -> Ordering {
114
+ // Max-heap by count; tie-break to ascending pair order (deterministic)
115
+ if self.count != other.count {
116
+ self.count.cmp(&other.count)
117
+ } else {
118
+ // ascending order on the pair when counts tie
119
+ other.pair.cmp(&self.pair)
120
+ }
121
+ }
122
+ }
123
+
124
+ #[inline]
125
+ fn count_pairs_parallel(
126
+ words: &[Word],
127
+ counts: &[i32],
128
+ ) -> (AHashMap<Pair, i32>, AHashMap<Pair, AHashSet<usize>>) {
129
+ words
130
+ .par_iter()
131
+ .enumerate()
132
+ .map(|(i, w)| {
133
+ let mut local_pc: AHashMap<Pair, i32> = AHashMap::new();
134
+ let mut local_wtu: AHashMap<Pair, AHashSet<usize>> = AHashMap::new();
135
+ if w.ids.len() >= 2 && counts[i] != 0 {
136
+ for (a, b) in w.pairs() {
137
+ *local_pc.entry((a, b)).or_default() += counts[i];
138
+ local_wtu.entry((a, b)).or_default().insert(i);
139
+ }
140
+ }
141
+ (local_pc, local_wtu)
142
+ })
143
+ .reduce(
144
+ || (AHashMap::new(), AHashMap::new()),
145
+ |(mut acc_pc, mut acc_wtu), (pc, wtu)| {
146
+ for (k, v) in pc {
147
+ *acc_pc.entry(k).or_default() += v;
148
+ }
149
+ for (k, s) in wtu {
150
+ acc_wtu.entry(k).or_default().extend(s);
151
+ }
152
+ (acc_pc, acc_wtu)
153
+ },
154
+ )
155
+ }
156
+
157
+ // ------------------------ END helpers ------------------------
158
+
159
+ impl Tokenizer {
160
+
161
+ /// Core incremental BPE training given unique words and their counts.
162
+ /// `words`: one entry per unique chunk (Vec<u32> of token-ids/bytes).
163
+ /// `counts`: same length as `words`, count per chunk.
164
+ fn train_core_incremental(&mut self, mut words: Vec<Word>, counts: Vec<i32>, vocab_size: u32) {
165
+ assert!(vocab_size >= 256, "vocab_size must be at least 256");
166
+ let num_merges = vocab_size - 256;
167
+ log::info!("Starting BPE training: {} merges to compute", num_merges);
168
+ self.merges.clear();
169
+
170
+ // ---- Initial pair_counts and where_to_update (parallel) ----
171
+ log::info!("Computing initial pair counts from {} unique sequences", words.len());
172
+ let (mut pair_counts, mut where_to_update) = count_pairs_parallel(&words, &counts);
173
+
174
+ // ---- Build heap ----
175
+ log::info!("Building heap with {} unique pairs", pair_counts.len());
176
+ let mut heap = OctonaryHeap::with_capacity(pair_counts.len());
177
+ for (pair, pos) in where_to_update.drain() {
178
+ let c = *pair_counts.get(&pair).unwrap_or(&0);
179
+ if c > 0 {
180
+ heap.push(MergeJob {
181
+ pair,
182
+ count: c as u64,
183
+ pos,
184
+ });
185
+ }
186
+ }
187
+
188
+ // ---- Merge loop ----
189
+ log::info!("Starting merge loop");
190
+ let mut merges_done = 0u32;
191
+ let mut last_log_percent = 0u32;
192
+
193
+ while merges_done < num_merges {
194
+ let Some(mut top) = heap.pop() else { break; };
195
+
196
+ // Lazy refresh
197
+ let current = *pair_counts.get(&top.pair).unwrap_or(&0);
198
+ if top.count != current as u64 {
199
+ top.count = current as u64;
200
+ if top.count > 0 {
201
+ heap.push(top);
202
+ }
203
+ continue;
204
+ }
205
+ if top.count == 0 {
206
+ break;
207
+ }
208
+
209
+ // Record merge
210
+ let new_id = 256 + merges_done;
211
+ self.merges.insert(top.pair, new_id);
212
+
213
+ // Merge this pair in all words where it occurs
214
+ let mut local_pos_updates: AHashMap<Pair, AHashSet<usize>> = AHashMap::new();
215
+ for &word_idx in &top.pos {
216
+ // Apply merge to this word and collect pair-count deltas
217
+ let changes = words[word_idx].merge_pair(top.pair, new_id);
218
+ // Update global pair counts based on this word's count
219
+ for (pair, delta) in changes {
220
+ let delta_total = delta * counts[word_idx];
221
+ if delta_total != 0 {
222
+ *pair_counts.entry(pair).or_default() += delta_total;
223
+ if delta > 0 {
224
+ local_pos_updates.entry(pair).or_default().insert(word_idx);
225
+ }
226
+ }
227
+ }
228
+ }
229
+
230
+ // Add the updated pair counts back to the heap
231
+ for (pair, pos) in local_pos_updates {
232
+ let cnt = *pair_counts.get(&pair).unwrap_or(&0);
233
+ if cnt > 0 {
234
+ heap.push(MergeJob {
235
+ pair,
236
+ count: cnt as u64,
237
+ pos,
238
+ });
239
+ }
240
+ }
241
+
242
+ merges_done += 1;
243
+
244
+ // Log progress every 1%
245
+ let current_percent = (merges_done * 100) / num_merges;
246
+ if current_percent > last_log_percent {
247
+ log::info!(
248
+ "Progress: {}% ({}/{} merges) - Last merge: {:?} -> {} (frequency: {})",
249
+ current_percent, merges_done, num_merges, top.pair, new_id, top.count
250
+ );
251
+ last_log_percent = current_percent;
252
+ }
253
+ }
254
+
255
+ log::info!("Finished training: {} merges completed", merges_done);
256
+ }
257
+ }
258
+
259
+ /// Public methods for the Tokenizer class that will be exposed to Python.
260
+ #[pymethods]
261
+ impl Tokenizer {
262
+ /// Create a new Tokenizer
263
+ #[new]
264
+ pub fn new() -> Self {
265
+ Self {
266
+ merges: StdHashMap::new(),
267
+ pattern: String::new(),
268
+ compiled_pattern: Regex::new("").expect("Empty regex should be valid"),
269
+ }
270
+ }
271
+
272
+ /// Train from a streaming iterator (parallel ingestion).
273
+ /// We refill a Rust Vec<String> buffer under the GIL, then release the GIL
274
+ /// to do the heavy splitting and counting **in parallel** with rayon.
275
+ #[pyo3(signature = (iterator, vocab_size, buffer_size=8192, pattern=None))]
276
+ #[pyo3(text_signature = "(self, iterator, vocab_size, buffer_size=8192, pattern=None)")]
277
+ pub fn train_from_iterator(
278
+ &mut self,
279
+ py: pyo3::Python<'_>,
280
+ iterator: &pyo3::Bound<'_, pyo3::PyAny>,
281
+ vocab_size: u32,
282
+ buffer_size: usize,
283
+ pattern: Option<String>,
284
+ ) -> PyResult<()> {
285
+ // Use provided pattern or default to GPT-4 pattern
286
+ let pattern_str = pattern.unwrap_or_else(|| GPT4_PATTERN.to_string());
287
+
288
+ // Update the stored pattern and compile it
289
+ self.pattern = pattern_str.clone();
290
+ self.compiled_pattern = Regex::new(&pattern_str)
291
+ .map_err(|e| pyo3::exceptions::PyValueError::new_err(format!("Invalid regex pattern: {}", e)))?;
292
+
293
+ // Prepare a true Python iterator object
294
+ let py_iter: pyo3::Py<pyo3::PyAny> = unsafe {
295
+ pyo3::Py::from_owned_ptr_or_err(py, pyo3::ffi::PyObject_GetIter(iterator.as_ptr()))?
296
+ };
297
+
298
+ // Global chunk counts
299
+ let mut counts: AHashMap<CompactString, i32> = AHashMap::new();
300
+
301
+ // Temporary buffer we refill under the GIL
302
+ let mut buf: Vec<String> = Vec::with_capacity(buffer_size);
303
+
304
+ log::info!("Processing sequences from iterator (buffer_size: {})", buffer_size);
305
+ let mut total_sequences = 0u64;
306
+
307
+ // Helper: refill `buf` with up to `buffer_size` strings from the Python iterator.
308
+ // Returns Ok(true) if the iterator is exhausted, Ok(false) otherwise.
309
+ let refill = |buf: &mut Vec<String>| -> PyResult<bool> {
310
+ pyo3::Python::with_gil(|py| {
311
+ buf.clear();
312
+ let it = py_iter.bind(py);
313
+ loop {
314
+ if buf.len() >= buffer_size {
315
+ return Ok(false);
316
+ }
317
+ // next(it)
318
+ let next_obj = unsafe {
319
+ pyo3::Bound::from_owned_ptr_or_opt(py, pyo3::ffi::PyIter_Next(it.as_ptr()))
320
+ };
321
+ match next_obj {
322
+ Some(obj) => {
323
+ let s: String = obj.extract()?;
324
+ buf.push(s);
325
+ }
326
+ None => {
327
+ if pyo3::PyErr::occurred(py) {
328
+ return Err(pyo3::PyErr::fetch(py));
329
+ } else {
330
+ return Ok(true); // exhausted
331
+ }
332
+ }
333
+ }
334
+ }
335
+ })
336
+ };
337
+
338
+ // Stream ingestion loop: refill under GIL, process without GIL (parallel)
339
+ loop {
340
+ let exhausted = refill(&mut buf)?;
341
+ if buf.is_empty() && exhausted {
342
+ break;
343
+ }
344
+
345
+ total_sequences += buf.len() as u64;
346
+
347
+ let pattern = self.compiled_pattern.clone();
348
+ let local: AHashMap<CompactString, i32> = py.allow_threads(|| {
349
+ buf.par_iter()
350
+ .map(|s| {
351
+ let mut m: AHashMap<CompactString, i32> = AHashMap::new();
352
+ for mat in pattern.find_iter(s) {
353
+ let piece = mat.expect("regex match failed").as_str();
354
+ *m.entry(CompactString::from(piece)).or_default() += 1;
355
+ }
356
+ m
357
+ })
358
+ .reduce(
359
+ || AHashMap::new(),
360
+ |mut a, b| {
361
+ for (k, v) in b {
362
+ *a.entry(k).or_default() += v;
363
+ }
364
+ a
365
+ },
366
+ )
367
+ });
368
+
369
+ // Merge local into global (single-threaded)
370
+ for (k, v) in local {
371
+ *counts.entry(k).or_default() += v;
372
+ }
373
+
374
+ if exhausted {
375
+ break;
376
+ }
377
+ }
378
+ log::info!("Processed {} sequences total, {} unique", total_sequences, counts.len());
379
+
380
+ // Materialize words & counts
381
+ let mut words = Vec::with_capacity(counts.len());
382
+ let mut cvec = Vec::with_capacity(counts.len());
383
+ for (chunk, c) in counts.into_iter() {
384
+ words.push(Word::new(chunk.as_bytes().iter().map(|&b| b as u32).collect()));
385
+ cvec.push(c);
386
+ }
387
+
388
+ self.train_core_incremental(words, cvec, vocab_size);
389
+ Ok(())
390
+ }
391
+
392
+ /// Return the regex pattern
393
+ pub fn get_pattern(&self) -> String {
394
+ self.pattern.clone()
395
+ }
396
+
397
+ /// Return the mergeable ranks (token bytes -> token id / rank)
398
+ pub fn get_mergeable_ranks(&self) -> Vec<(Vec<u8>, u32)> {
399
+ let mut mergeable_ranks = Vec::new();
400
+
401
+ // Build vocabulary incrementally from low to high token IDs
402
+ let mut token_bytes: Vec<Vec<u8>> = (0..256_u32).map(|i| vec![i as u8]).collect();
403
+
404
+ for (i, bytes) in token_bytes.iter().enumerate() {
405
+ mergeable_ranks.push((bytes.clone(), i as u32));
406
+ }
407
+
408
+ // Sort merges by token id (so we can reconstruct bytes progressively)
409
+ let mut sorted_merges: Vec<_> = self.merges.iter().collect();
410
+ sorted_merges.sort_by_key(|&(_, &token_id)| token_id);
411
+
412
+ for (&pair, &merged_id) in sorted_merges {
413
+ let (left, right) = pair;
414
+ let mut merged_bytes = token_bytes[left as usize].clone();
415
+ merged_bytes.extend(&token_bytes[right as usize]);
416
+
417
+ if token_bytes.len() <= merged_id as usize {
418
+ token_bytes.resize(merged_id as usize + 1, Vec::new());
419
+ }
420
+ token_bytes[merged_id as usize] = merged_bytes.clone();
421
+
422
+ mergeable_ranks.push((merged_bytes, merged_id));
423
+ }
424
+
425
+ mergeable_ranks
426
+ }
427
+
428
+ /// Encode a string into token IDs
429
+ pub fn encode(&self, text: &str) -> Vec<u32> {
430
+ let mut all_ids = Vec::new();
431
+
432
+ // Split text using the regex pattern
433
+ for m in self.compiled_pattern.find_iter(text) {
434
+ let chunk = m.expect("regex match failed").as_str();
435
+
436
+ // Convert chunk to bytes then to u32 IDs
437
+ let mut ids: Vec<u32> = chunk.bytes().map(|b| b as u32).collect();
438
+
439
+ // Apply merges iteratively
440
+ while ids.len() >= 2 {
441
+ // Find the best pair to merge
442
+ let mut best_pair: Option<(usize, Pair, u32)> = None;
443
+
444
+ for i in 0..ids.len() - 1 {
445
+ let pair: Pair = (ids[i], ids[i + 1]);
446
+ if let Some(&new_id) = self.merges.get(&pair) {
447
+ if best_pair.is_none() || new_id < best_pair.unwrap().2 {
448
+ best_pair = Some((i, pair, new_id));
449
+ }
450
+ }
451
+ }
452
+
453
+ // If we found a pair to merge, apply it
454
+ if let Some((idx, _pair, new_id)) = best_pair {
455
+ ids[idx] = new_id;
456
+ ids.remove(idx + 1);
457
+ } else {
458
+ // No more merges possible
459
+ break;
460
+ }
461
+ }
462
+
463
+ all_ids.extend(ids);
464
+ }
465
+
466
+ all_ids
467
+ }
468
+ }
469
+
470
+ #[pymodule]
471
+ fn rustbpe(m: &Bound<'_, PyModule>) -> PyResult<()> {
472
+ pyo3_log::init(); // forwards Rust `log` to Python's `logging`
473
+ m.add_class::<Tokenizer>()?;
474
+ Ok(())
475
+ }
@@ -0,0 +1,30 @@
1
+ Metadata-Version: 2.4
2
+ Name: ommlds-rs
3
+ Version: 0.0.0.dev473
4
+ Summary: ommlds
5
+ Author: wrmsr
6
+ License-Expression: BSD-3-Clause
7
+ Project-URL: source, https://github.com/wrmsr/omlish
8
+ Classifier: Development Status :: 2 - Pre-Alpha
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: Operating System :: POSIX
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.13
14
+ Requires-Python: >=3.13
15
+ Description-Content-Type: text/markdown
16
+ License-File: LICENSE
17
+ Requires-Dist: ommlds==0.0.0.dev473
18
+ Dynamic: license-file
19
+
20
+ # Overview
21
+
22
+ ML / AI code.
23
+
24
+ # Notable packages
25
+
26
+ - **[cli](https://github.com/wrmsr/omlish/blob/master/ommlds/cli)** (cli: `om mc`) - A general purpose ai cli, inspired
27
+ and in the spirit of [simonw's](https://github.com/simonw/llm) and others.
28
+
29
+ - **[minichain](https://github.com/wrmsr/omlish/blob/master/ommlds/minichain)** - *A thing that does the things
30
+ langchain people use langchain to do.*
@@ -0,0 +1,14 @@
1
+ LICENSE
2
+ README.md
3
+ pyproject.toml
4
+ setup.py
5
+ ommlds/nanochat/rustbpe/Cargo.lock
6
+ ommlds/nanochat/rustbpe/Cargo.toml
7
+ ommlds/nanochat/rustbpe/LICENSE
8
+ ommlds/nanochat/rustbpe/README.md
9
+ ommlds/nanochat/rustbpe/src/lib.rs
10
+ ommlds_rs.egg-info/PKG-INFO
11
+ ommlds_rs.egg-info/SOURCES.txt
12
+ ommlds_rs.egg-info/dependency_links.txt
13
+ ommlds_rs.egg-info/requires.txt
14
+ ommlds_rs.egg-info/top_level.txt
@@ -0,0 +1 @@
1
+ ommlds==0.0.0.dev473
@@ -0,0 +1,34 @@
1
+ [build-system]
2
+ requires = [
3
+ 'setuptools',
4
+ 'setuptools-rust',
5
+ ]
6
+ build-backend = 'setuptools.build_meta'
7
+
8
+ [project]
9
+ name = 'ommlds-rs'
10
+ authors = [
11
+ {name = 'wrmsr'},
12
+ ]
13
+ urls = {source = 'https://github.com/wrmsr/omlish'}
14
+ license = 'BSD-3-Clause'
15
+ readme = 'README.md'
16
+ requires-python = '>=3.13'
17
+ version = '0.0.0.dev473'
18
+ classifiers = [
19
+ 'Development Status :: 2 - Pre-Alpha',
20
+ 'Intended Audience :: Developers',
21
+ 'Operating System :: OS Independent',
22
+ 'Operating System :: POSIX',
23
+ 'Programming Language :: Python :: 3',
24
+ 'Programming Language :: Python :: 3.13',
25
+ ]
26
+ description = 'ommlds'
27
+ dependencies = [
28
+ 'ommlds == 0.0.0.dev473',
29
+ ]
30
+
31
+ [tool.setuptools]
32
+
33
+ [tool.setuptools.packages.find]
34
+ include = []
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,40 @@
1
+ import setuptools as st
2
+ import setuptools_rust as st_rs
3
+
4
+
5
+ def _patch_sdist():
6
+ def _sdist_add_defaults(old, self):
7
+ import os.path
8
+
9
+ old(self)
10
+
11
+ if self.distribution.rust_extensions and len(self.distribution.rust_extensions) > 0:
12
+ build_rust = self.get_finalized_command('build_rust') # noqa
13
+ for ext in build_rust.extensions:
14
+ ext_dir = os.path.dirname(ext.path)
15
+ for n in os.listdir(ext_dir):
16
+ if n.startswith('.') or n == 'target':
17
+ continue
18
+ p = os.path.join(ext_dir, n)
19
+ if os.path.isfile(p):
20
+ self.filelist.append(p)
21
+ elif os.path.isdir(p):
22
+ self.filelist.extend(os.path.join(dp, f) for dp, dn, fn in os.walk(p) for f in fn)
23
+
24
+ # Sadly, we can't just subclass sdist and override it via cmdclass because manifest_maker calls
25
+ # `sdist.add_defaults` as an unbound function, not a bound method:
26
+ # https://github.com/pypa/setuptools/blob/9c4d383631d3951fcae0afd73b5d08ff5a262976/setuptools/command/egg_info.py#L581
27
+ from setuptools.command.sdist import sdist # noqa
28
+ sdist.add_defaults = (lambda old: lambda sdist: _sdist_add_defaults(old, sdist))(sdist.add_defaults) # noqa
29
+
30
+ _patch_sdist()
31
+
32
+
33
+ st.setup(
34
+ rust_extensions=[
35
+ st_rs.RustExtension(
36
+ 'ommlds.nanochat.rustbpe',
37
+ path='ommlds/nanochat/rustbpe/Cargo.toml',
38
+ ),
39
+ ],
40
+ )