torch-rb 0.19.1 → 0.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,16 @@
1
1
  module Torch
2
2
  module NN
3
3
  class RNNBase < Module
4
- def initialize(mode, input_size, hidden_size, num_layers: 1, bias: true,
5
- batch_first: false, dropout: 0.0, bidirectional: false)
6
-
4
+ def initialize(
5
+ mode,
6
+ input_size,
7
+ hidden_size,
8
+ num_layers: 1,
9
+ bias: true,
10
+ batch_first: false,
11
+ dropout: 0.0,
12
+ bidirectional: false
13
+ )
7
14
  super()
8
15
  @mode = mode
9
16
  @input_size = input_size
@@ -7,13 +7,18 @@ module Torch
7
7
  module NN
8
8
  class Transformer < Module
9
9
  def initialize(
10
- d_model: 512, nhead: 8,
11
- num_encoder_layers: 6, num_decoder_layers: 6,
12
- dim_feedforward: 2048, dropout: 0.1, activation: :relu,
13
- custom_encoder: nil, custom_decoder: nil,
14
- layer_norm_eps: 1e-5, batch_first: false
10
+ d_model: 512,
11
+ nhead: 8,
12
+ num_encoder_layers: 6,
13
+ num_decoder_layers: 6,
14
+ dim_feedforward: 2048,
15
+ dropout: 0.1,
16
+ activation: :relu,
17
+ custom_encoder: nil,
18
+ custom_decoder: nil,
19
+ layer_norm_eps: 1e-5,
20
+ batch_first: false
15
21
  )
16
-
17
22
  super()
18
23
 
19
24
  @encoder =
@@ -60,11 +65,15 @@ module Torch
60
65
  end
61
66
 
62
67
  def forward(
63
- src, tgt,
64
- src_mask: nil, tgt_mask: nil, memory_mask: nil,
65
- src_key_padding_mask: nil, tgt_key_padding_mask: nil, memory_key_padding_mask: nil
68
+ src,
69
+ tgt,
70
+ src_mask: nil,
71
+ tgt_mask: nil,
72
+ memory_mask: nil,
73
+ src_key_padding_mask: nil,
74
+ tgt_key_padding_mask: nil,
75
+ memory_key_padding_mask: nil
66
76
  )
67
-
68
77
  if (!batch_first? && src.size(1) != tgt.size(1)) ||
69
78
  (batch_first? && src.size(0) != tgt.size(0))
70
79
 
@@ -2,11 +2,14 @@ module Torch
2
2
  module NN
3
3
  class TransformerDecoderLayer < Module
4
4
  def initialize(
5
- d_model, n_head,
6
- dim_feedforward: 2048, dropout: 0.1, activation: :relu,
7
- layer_norm_eps: 1e-5, batch_first: false
5
+ d_model,
6
+ n_head,
7
+ dim_feedforward: 2048,
8
+ dropout: 0.1,
9
+ activation: :relu,
10
+ layer_norm_eps: 1e-5,
11
+ batch_first: false
8
12
  )
9
-
10
13
  super()
11
14
 
12
15
  @self_attn = MultiheadAttention.new(d_model, n_head, dropout: dropout, batch_first: batch_first)
@@ -2,11 +2,14 @@ module Torch
2
2
  module NN
3
3
  class TransformerEncoderLayer < Module
4
4
  def initialize(
5
- d_model, n_head,
6
- dim_feedforward: 2048, dropout: 0.1, activation: :relu,
7
- layer_norm_eps: 1e-5, batch_first: false
5
+ d_model,
6
+ n_head,
7
+ dim_feedforward: 2048,
8
+ dropout: 0.1,
9
+ activation: :relu,
10
+ layer_norm_eps: 1e-5,
11
+ batch_first: false
8
12
  )
9
-
10
13
  super()
11
14
 
12
15
  @self_attn = MultiheadAttention.new(d_model, n_head, dropout: dropout, batch_first: batch_first)
data/lib/torch/tensor.rb CHANGED
@@ -159,6 +159,7 @@ module Torch
159
159
 
160
160
  # TODO better compare?
161
161
  def <=>(other)
162
+ other = other.item if other.is_a?(Tensor)
162
163
  item <=> other
163
164
  end
164
165
 
data/lib/torch/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Torch
2
- VERSION = "0.19.1"
2
+ VERSION = "0.21.0"
3
3
  end
data/lib/torch.rb CHANGED
@@ -13,6 +13,12 @@ require_relative "torch/inspector"
13
13
  require_relative "torch/tensor"
14
14
  require_relative "torch/version"
15
15
 
16
+ # distributions
17
+ require_relative "torch/distributions/distribution"
18
+ require_relative "torch/distributions/exponential_family"
19
+ require_relative "torch/distributions/normal"
20
+ require_relative "torch/distributions/utils"
21
+
16
22
  # optim
17
23
  require_relative "torch/optim/optimizer"
18
24
  require_relative "torch/optim/adadelta"
@@ -433,7 +439,7 @@ module Torch
433
439
  # TODO check each dimensions for consistency in future
434
440
  raise Error, "Inconsistent dimensions" if data.size != size.inject(1, :*)
435
441
 
436
- # TOOD move to C++
442
+ # TODO move to C++
437
443
  data = data.map { |v| v ? 1 : 0 } if options[:dtype] == :bool
438
444
 
439
445
  _tensor(data, size, tensor_options(**options))
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: torch-rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.19.1
4
+ version: 0.21.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrew Kane
8
8
  bindir: bin
9
9
  cert_chain: []
10
- date: 2025-02-10 00:00:00.000000000 Z
10
+ date: 1980-01-02 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: rice
@@ -64,6 +64,10 @@ files:
64
64
  - lib/torch-rb.rb
65
65
  - lib/torch.rb
66
66
  - lib/torch/device.rb
67
+ - lib/torch/distributions/distribution.rb
68
+ - lib/torch/distributions/exponential_family.rb
69
+ - lib/torch/distributions/normal.rb
70
+ - lib/torch/distributions/utils.rb
67
71
  - lib/torch/hub.rb
68
72
  - lib/torch/inspector.rb
69
73
  - lib/torch/nn/adaptive_avg_pool1d.rb
@@ -230,14 +234,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
230
234
  requirements:
231
235
  - - ">="
232
236
  - !ruby/object:Gem::Version
233
- version: '3.1'
237
+ version: '3.2'
234
238
  required_rubygems_version: !ruby/object:Gem::Requirement
235
239
  requirements:
236
240
  - - ">="
237
241
  - !ruby/object:Gem::Version
238
242
  version: '0'
239
243
  requirements: []
240
- rubygems_version: 3.6.2
244
+ rubygems_version: 3.6.9
241
245
  specification_version: 4
242
246
  summary: Deep learning for Ruby, powered by LibTorch
243
247
  test_files: []