torch-rb 0.20.0 → 0.21.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +5 -0
- data/README.md +12 -10
- data/codegen/native_functions.yaml +286 -244
- data/ext/torch/device.cpp +3 -0
- data/ext/torch/ext.cpp +1 -2
- data/ext/torch/ivalue.cpp +2 -0
- data/ext/torch/nn.cpp +3 -1
- data/ext/torch/ruby_arg_parser.cpp +7 -3
- data/ext/torch/ruby_arg_parser.h +5 -2
- data/ext/torch/templates.h +18 -36
- data/ext/torch/tensor.cpp +11 -8
- data/ext/torch/torch.cpp +6 -3
- data/ext/torch/utils.h +3 -1
- data/lib/torch/nn/conv1d.rb +11 -3
- data/lib/torch/nn/conv2d.rb +11 -3
- data/lib/torch/nn/conv3d.rb +11 -3
- data/lib/torch/nn/convnd.rb +1 -1
- data/lib/torch/nn/embedding.rb +10 -3
- data/lib/torch/nn/embedding_bag.rb +10 -3
- data/lib/torch/nn/functional.rb +20 -6
- data/lib/torch/nn/functional_attention.rb +30 -15
- data/lib/torch/nn/multihead_attention.rb +17 -7
- data/lib/torch/nn/rnn_base.rb +10 -3
- data/lib/torch/nn/transformer.rb +19 -10
- data/lib/torch/nn/transformer_decoder_layer.rb +7 -4
- data/lib/torch/nn/transformer_encoder_layer.rb +7 -4
- data/lib/torch/version.rb +1 -1
- data/lib/torch.rb +1 -1
- metadata +3 -3
@@ -2,11 +2,14 @@ module Torch
|
|
2
2
|
module NN
|
3
3
|
class TransformerDecoderLayer < Module
|
4
4
|
def initialize(
|
5
|
-
d_model,
|
6
|
-
|
7
|
-
|
5
|
+
d_model,
|
6
|
+
n_head,
|
7
|
+
dim_feedforward: 2048,
|
8
|
+
dropout: 0.1,
|
9
|
+
activation: :relu,
|
10
|
+
layer_norm_eps: 1e-5,
|
11
|
+
batch_first: false
|
8
12
|
)
|
9
|
-
|
10
13
|
super()
|
11
14
|
|
12
15
|
@self_attn = MultiheadAttention.new(d_model, n_head, dropout: dropout, batch_first: batch_first)
|
@@ -2,11 +2,14 @@ module Torch
|
|
2
2
|
module NN
|
3
3
|
class TransformerEncoderLayer < Module
|
4
4
|
def initialize(
|
5
|
-
d_model,
|
6
|
-
|
7
|
-
|
5
|
+
d_model,
|
6
|
+
n_head,
|
7
|
+
dim_feedforward: 2048,
|
8
|
+
dropout: 0.1,
|
9
|
+
activation: :relu,
|
10
|
+
layer_norm_eps: 1e-5,
|
11
|
+
batch_first: false
|
8
12
|
)
|
9
|
-
|
10
13
|
super()
|
11
14
|
|
12
15
|
@self_attn = MultiheadAttention.new(d_model, n_head, dropout: dropout, batch_first: batch_first)
|
data/lib/torch/version.rb
CHANGED
data/lib/torch.rb
CHANGED
@@ -439,7 +439,7 @@ module Torch
|
|
439
439
|
# TODO check each dimensions for consistency in future
|
440
440
|
raise Error, "Inconsistent dimensions" if data.size != size.inject(1, :*)
|
441
441
|
|
442
|
-
#
|
442
|
+
# TODO move to C++
|
443
443
|
data = data.map { |v| v ? 1 : 0 } if options[:dtype] == :bool
|
444
444
|
|
445
445
|
_tensor(data, size, tensor_options(**options))
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: torch-rb
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.21.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Andrew Kane
|
@@ -234,14 +234,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
|
|
234
234
|
requirements:
|
235
235
|
- - ">="
|
236
236
|
- !ruby/object:Gem::Version
|
237
|
-
version: '3.
|
237
|
+
version: '3.2'
|
238
238
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
239
239
|
requirements:
|
240
240
|
- - ">="
|
241
241
|
- !ruby/object:Gem::Version
|
242
242
|
version: '0'
|
243
243
|
requirements: []
|
244
|
-
rubygems_version: 3.6.
|
244
|
+
rubygems_version: 3.6.9
|
245
245
|
specification_version: 4
|
246
246
|
summary: Deep learning for Ruby, powered by LibTorch
|
247
247
|
test_files: []
|