torch-rb 0.9.1 → 0.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f5224c74f6e74ed04396dfa0414400af5cb20bc5e654320421116723ffcb8e83
4
- data.tar.gz: 6a2881ddacb7610a231ebd5a1c24d0f71a2662f16f360102141dfd0893c13346
3
+ metadata.gz: f4665eec43d85fbf02ce75f4b268dbf001bfad7e3ae1ecace0e9911b651e2cc2
4
+ data.tar.gz: d11ee1386ce7feeea68333de6c361d8737a7164cfd1626abce3a511deecb2963
5
5
  SHA512:
6
- metadata.gz: 045432235e1c691ce85fb937a0562d93b1d9bc312fc648d40dafcc24857eeec4f84e6ceab397793171b0046ccfd785a6caeba37902925dcff1f73c760dd57cec
7
- data.tar.gz: 2520fa17dcd13be52aaf1256f431d2951f8113f862189b8691f877dcb48ac9f71ac70719600e27d8c8972494c0b7f11c0983c3330fed7ca6aeed552e8500ec22
6
+ metadata.gz: cf346bc03f36d4fc920151b0554c93c33a59d0fecd35c6f110dc862ad8b35c6b8641d306124505ddd012ce9d903363672e99772cc2bd7b981d962c9d00f08d3e
7
+ data.tar.gz: 265157846417fdc3c024e0f50d0b0a663d345ca423ad9233a7d0e722bf5134a8e15e1afce30248992155787484e80311e484c64c5787945b5f5a88625115479a
data/CHANGELOG.md CHANGED
@@ -1,3 +1,19 @@
1
+ ## 0.10.1 (2022-04-12)
2
+
3
+ - Fixed `dtype`, `device`, and `layout` for `new_*` and `like_*` methods
4
+
5
+ ## 0.10.0 (2022-03-13)
6
+
7
+ - Updated LibTorch to 1.11.0
8
+ - Added `ParameterList`
9
+
10
+ ## 0.9.2 (2022-02-03)
11
+
12
+ - Added support for setting `nil` gradient
13
+ - Added checks when setting gradient
14
+ - Fixed precision with `Torch.tensor` method
15
+ - Fixed memory issue when creating tensor for `ByteStorage`
16
+
1
17
  ## 0.9.1 (2022-02-02)
2
18
 
3
19
  - Moved `like` methods to C++
data/README.md CHANGED
@@ -7,6 +7,7 @@ Check out:
7
7
  - [TorchVision](https://github.com/ankane/torchvision) for computer vision tasks
8
8
  - [TorchText](https://github.com/ankane/torchtext) for text and NLP tasks
9
9
  - [TorchAudio](https://github.com/ankane/torchaudio) for audio tasks
10
+ - [TorchRec](https://github.com/ankane/torchrec-ruby) for recommendation systems
10
11
 
11
12
  [![Build Status](https://github.com/ankane/torch.rb/workflows/build/badge.svg?branch=master)](https://github.com/ankane/torch.rb/actions)
12
13
 
@@ -408,7 +409,8 @@ Here’s the list of compatible versions.
408
409
 
409
410
  Torch.rb | LibTorch
410
411
  --- | ---
411
- 0.9.0+ | 1.10.0+
412
+ 0.10.0+ | 1.11.0+
413
+ 0.9.0-0.9.2 | 1.10.0-1.10.2
412
414
  0.8.0-0.8.3 | 1.9.0-1.9.1
413
415
  0.6.0-0.7.0 | 1.8.0-1.8.1
414
416
  0.5.0-0.5.3 | 1.7.0-1.7.1
data/codegen/function.rb CHANGED
@@ -37,7 +37,7 @@ class Function
37
37
  private
38
38
 
39
39
  def parse_func
40
- input, output = func.split(/\s*->\s*/)
40
+ input, _, output = func.rpartition(/\s+->\s+/)
41
41
  [generate_params(input), generate_retvals(output)]
42
42
  end
43
43
 
@@ -52,7 +52,7 @@ class Function
52
52
  next
53
53
  end
54
54
 
55
- type, name = i.split(/\s+/)
55
+ type, _, name = i.rpartition(/\s+/)
56
56
 
57
57
  if name.include?("=")
58
58
  name, default = name.split("=", 2)
@@ -14,6 +14,7 @@ def generate_functions
14
14
  generate_files("fft", :define_singleton_method, functions[:fft])
15
15
  generate_files("linalg", :define_singleton_method, functions[:linalg])
16
16
  generate_files("special", :define_singleton_method, functions[:special])
17
+ generate_files("sparse", :define_singleton_method, functions[:sparse])
17
18
  end
18
19
 
19
20
  def load_functions
@@ -47,6 +48,7 @@ def group_functions(functions)
47
48
  linalg_functions, other_functions = other_functions.partition { |f| f.python_module == "linalg" }
48
49
  fft_functions, other_functions = other_functions.partition { |f| f.python_module == "fft" }
49
50
  special_functions, other_functions = other_functions.partition { |f| f.python_module == "special" }
51
+ sparse_functions, other_functions = other_functions.partition { |f| f.python_module == "sparse" }
50
52
  unexpected_functions, other_functions = other_functions.partition { |f| f.python_module }
51
53
  torch_functions = other_functions.select { |f| f.variants.include?("function") }
52
54
  tensor_functions = other_functions.select { |f| f.variants.include?("method") }
@@ -62,7 +64,8 @@ def group_functions(functions)
62
64
  nn: nn_functions,
63
65
  linalg: linalg_functions,
64
66
  fft: fft_functions,
65
- special: special_functions
67
+ special: special_functions,
68
+ sparse: sparse_functions
66
69
  }
67
70
  end
68
71
 
@@ -136,6 +139,7 @@ def generate_attach_def(name, type, def_method)
136
139
  ruby_name = ruby_name.sub(/\Afft_/, "") if type == "fft"
137
140
  ruby_name = ruby_name.sub(/\Alinalg_/, "") if type == "linalg"
138
141
  ruby_name = ruby_name.sub(/\Aspecial_/, "") if type == "special"
142
+ ruby_name = ruby_name.sub(/\Asparse_/, "") if type == "sparse"
139
143
  ruby_name = name if name.start_with?("__")
140
144
 
141
145
  # cast for Ruby < 2.7 https://github.com/thisMagpie/fftw/issues/22#issuecomment-49508900
@@ -289,7 +293,13 @@ def split_opt_params(params)
289
293
  end
290
294
 
291
295
  def generate_tensor_options(function, opt_params)
292
- code = "\n const auto options = TensorOptions()"
296
+ new_function = function.base_name.start_with?("new_")
297
+ like_function = function.base_name.end_with?("_like")
298
+
299
+ code = String.new("")
300
+ code << "\n auto self = _r.tensor(0);" if like_function
301
+ code << "\n const auto options = TensorOptions()"
302
+
293
303
  order = ["dtype", "device", "layout", "requires_grad", "pin_memory"]
294
304
  opt_params.sort_by { |v| order.index(v[:name]) }.each do |opt|
295
305
  i = opt[:position]
@@ -300,12 +310,24 @@ def generate_tensor_options(function, opt_params)
300
310
  if function.base_name == "arange"
301
311
  "dtype(_r.scalartypeOptional(#{i}))"
302
312
  else
303
- "dtype(_r.scalartype(#{i}))"
313
+ if new_function || like_function
314
+ "dtype(_r.scalartypeWithDefault(#{i}, self.scalar_type()))"
315
+ else
316
+ "dtype(_r.scalartype(#{i}))"
317
+ end
304
318
  end
305
319
  when "device"
306
- "device(_r.device(#{i}))"
320
+ if new_function || like_function
321
+ "device(_r.deviceWithDefault(#{i}, self.device()))"
322
+ else
323
+ "device(_r.device(#{i}))"
324
+ end
307
325
  when "layout"
308
- "layout(_r.layoutOptional(#{i}))"
326
+ if new_function || like_function
327
+ "layout(_r.layoutWithDefault(#{i}, self.layout()))"
328
+ else
329
+ "layout(_r.layoutOptional(#{i}))"
330
+ end
309
331
  when "requires_grad"
310
332
  "requires_grad(_r.toBool(#{i}))"
311
333
  when "pin_memory"