torch-rb 0.9.2 → 0.10.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: bd10ea76fc9cde319cad48a1b3bd298384c4f47812d9be5ee9016c44151458a0
4
- data.tar.gz: e8391c2bc4ccae67ca0c2e402e431e99a683023285952edbd3f011a18793dfb6
3
+ metadata.gz: 935c88d7ce2a1f9d0f12014dd4f79beee6d1c153bc2439f614f00f50ec60c286
4
+ data.tar.gz: d77145dd7944d0ec024e5bef00d6ba1d47e52efb74c1d731101b8b8e67c9a3c9
5
5
  SHA512:
6
- metadata.gz: c290b76e1d18e88c46ec2bd544471cc5922de4741a0095786ba00415b2fa6ae52023b0c296365a7af6a81143515c36ccd3dbe69d11aea5138fbcd81b6e687bf6
7
- data.tar.gz: bc35fd6f9c9ed38b12caf98158f61a6d9827e967c36f8fbcf46f64331cc96c03de3105f4bc0b5b70c911d25ce223ea0085c18537cff677e84bf3d564ce0058ab
6
+ metadata.gz: d9b4df4b90ff11b67d4c203a2e5f59037a6079dafb16ec02bae89a34f364ca86a5530dcb74adce48474c857ce2f2ce49323d96830fb30c843440b842b6e7fb50
7
+ data.tar.gz: a68bf5d89984861c847390a4aaea337bc86a43c01ee93de47f575b52a87b7281e3318fcf10d70f0db5013f9685a126a46d80e42f88bbf51080bd15fac1933f71
data/CHANGELOG.md CHANGED
@@ -1,3 +1,17 @@
1
+ ## 0.10.2 (2022-06-14)
2
+
3
+ - Improved numeric operations between scalars and tensors
4
+ - Fixed `dtype` of `cumsum` method
5
+
6
+ ## 0.10.1 (2022-04-12)
7
+
8
+ - Fixed `dtype`, `device`, and `layout` for `new_*` and `like_*` methods
9
+
10
+ ## 0.10.0 (2022-03-13)
11
+
12
+ - Updated LibTorch to 1.11.0
13
+ - Added `ParameterList`
14
+
1
15
  ## 0.9.2 (2022-02-03)
2
16
 
3
17
  - Added support for setting `nil` gradient
data/README.md CHANGED
@@ -7,6 +7,7 @@ Check out:
7
7
  - [TorchVision](https://github.com/ankane/torchvision) for computer vision tasks
8
8
  - [TorchText](https://github.com/ankane/torchtext) for text and NLP tasks
9
9
  - [TorchAudio](https://github.com/ankane/torchaudio) for audio tasks
10
+ - [TorchRec](https://github.com/ankane/torchrec-ruby) for recommendation systems
10
11
 
11
12
  [![Build Status](https://github.com/ankane/torch.rb/workflows/build/badge.svg?branch=master)](https://github.com/ankane/torch.rb/actions)
12
13
 
@@ -408,7 +409,8 @@ Here’s the list of compatible versions.
408
409
 
409
410
  Torch.rb | LibTorch
410
411
  --- | ---
411
- 0.9.0+ | 1.10.0+
412
+ 0.10.0+ | 1.11.0+
413
+ 0.9.0-0.9.2 | 1.10.0-1.10.2
412
414
  0.8.0-0.8.3 | 1.9.0-1.9.1
413
415
  0.6.0-0.7.0 | 1.8.0-1.8.1
414
416
  0.5.0-0.5.3 | 1.7.0-1.7.1
@@ -419,13 +421,25 @@ Torch.rb | LibTorch
419
421
 
420
422
  ### Homebrew
421
423
 
422
- For Mac, you can use Homebrew.
424
+ You can also use Homebrew.
423
425
 
424
426
  ```sh
425
427
  brew install libtorch
426
428
  ```
427
429
 
428
- Then install the gem (no need for `bundle config`).
430
+ For Mac ARM, run:
431
+
432
+ ```sh
433
+ bundle config build.torch-rb --with-torch-dir=/opt/homebrew
434
+ ```
435
+
436
+ And for Linux, run:
437
+
438
+ ```sh
439
+ bundle config build.torch-rb --with-torch-dir=/home/linuxbrew/.linuxbrew
440
+ ```
441
+
442
+ Then install the gem.
429
443
 
430
444
  ## Performance
431
445
 
data/codegen/function.rb CHANGED
@@ -37,7 +37,7 @@ class Function
37
37
  private
38
38
 
39
39
  def parse_func
40
- input, output = func.split(/\s*->\s*/)
40
+ input, _, output = func.rpartition(/\s+->\s+/)
41
41
  [generate_params(input), generate_retvals(output)]
42
42
  end
43
43
 
@@ -52,7 +52,7 @@ class Function
52
52
  next
53
53
  end
54
54
 
55
- type, name = i.split(/\s+/)
55
+ type, _, name = i.rpartition(/\s+/)
56
56
 
57
57
  if name.include?("=")
58
58
  name, default = name.split("=", 2)
@@ -60,7 +60,7 @@ class Function
60
60
 
61
61
  optional = false
62
62
  if type.include?("?")
63
- optional = true unless ["dtype", "device", "layout", "pin_memory"].include?(name)
63
+ optional = true
64
64
  type = type.delete("?")
65
65
  end
66
66
 
@@ -14,6 +14,7 @@ def generate_functions
14
14
  generate_files("fft", :define_singleton_method, functions[:fft])
15
15
  generate_files("linalg", :define_singleton_method, functions[:linalg])
16
16
  generate_files("special", :define_singleton_method, functions[:special])
17
+ generate_files("sparse", :define_singleton_method, functions[:sparse])
17
18
  end
18
19
 
19
20
  def load_functions
@@ -47,6 +48,7 @@ def group_functions(functions)
47
48
  linalg_functions, other_functions = other_functions.partition { |f| f.python_module == "linalg" }
48
49
  fft_functions, other_functions = other_functions.partition { |f| f.python_module == "fft" }
49
50
  special_functions, other_functions = other_functions.partition { |f| f.python_module == "special" }
51
+ sparse_functions, other_functions = other_functions.partition { |f| f.python_module == "sparse" }
50
52
  unexpected_functions, other_functions = other_functions.partition { |f| f.python_module }
51
53
  torch_functions = other_functions.select { |f| f.variants.include?("function") }
52
54
  tensor_functions = other_functions.select { |f| f.variants.include?("method") }
@@ -62,7 +64,8 @@ def group_functions(functions)
62
64
  nn: nn_functions,
63
65
  linalg: linalg_functions,
64
66
  fft: fft_functions,
65
- special: special_functions
67
+ special: special_functions,
68
+ sparse: sparse_functions
66
69
  }
67
70
  end
68
71
 
@@ -136,6 +139,7 @@ def generate_attach_def(name, type, def_method)
136
139
  ruby_name = ruby_name.sub(/\Afft_/, "") if type == "fft"
137
140
  ruby_name = ruby_name.sub(/\Alinalg_/, "") if type == "linalg"
138
141
  ruby_name = ruby_name.sub(/\Aspecial_/, "") if type == "special"
142
+ ruby_name = ruby_name.sub(/\Asparse_/, "") if type == "sparse"
139
143
  ruby_name = name if name.start_with?("__")
140
144
 
141
145
  # cast for Ruby < 2.7 https://github.com/thisMagpie/fftw/issues/22#issuecomment-49508900
@@ -289,7 +293,13 @@ def split_opt_params(params)
289
293
  end
290
294
 
291
295
  def generate_tensor_options(function, opt_params)
292
- code = "\n const auto options = TensorOptions()"
296
+ new_function = function.base_name.start_with?("new_")
297
+ like_function = function.base_name.end_with?("_like")
298
+
299
+ code = String.new("")
300
+ code << "\n auto self = _r.tensor(0);" if like_function
301
+ code << "\n const auto options = TensorOptions()"
302
+
293
303
  order = ["dtype", "device", "layout", "requires_grad", "pin_memory"]
294
304
  opt_params.sort_by { |v| order.index(v[:name]) }.each do |opt|
295
305
  i = opt[:position]
@@ -300,12 +310,24 @@ def generate_tensor_options(function, opt_params)
300
310
  if function.base_name == "arange"
301
311
  "dtype(_r.scalartypeOptional(#{i}))"
302
312
  else
303
- "dtype(_r.scalartype(#{i}))"
313
+ if new_function || like_function
314
+ "dtype(_r.scalartypeWithDefault(#{i}, self.scalar_type()))"
315
+ else
316
+ "dtype(_r.scalartype(#{i}))"
317
+ end
304
318
  end
305
319
  when "device"
306
- "device(_r.device(#{i}))"
320
+ if new_function || like_function
321
+ "device(_r.deviceWithDefault(#{i}, self.device()))"
322
+ else
323
+ "device(_r.device(#{i}))"
324
+ end
307
325
  when "layout"
308
- "layout(_r.layoutOptional(#{i}))"
326
+ if new_function || like_function
327
+ "layout(_r.layoutWithDefault(#{i}, self.layout()))"
328
+ else
329
+ "layout(_r.layoutOptional(#{i}))"
330
+ end
309
331
  when "requires_grad"
310
332
  "requires_grad(_r.toBool(#{i}))"
311
333
  when "pin_memory"