onnx-red-chainer 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/Gemfile +6 -0
- data/Gemfile.lock +35 -0
- data/LICENSE.txt +21 -0
- data/README.md +39 -0
- data/Rakefile +10 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/exe/onnx-red-chainer +5 -0
- data/lib/onnx-chainer.rb +16 -0
- data/lib/onnx-chainer/cli.rb +36 -0
- data/lib/onnx-chainer/graph.rb +125 -0
- data/lib/onnx-chainer/operator.rb +24 -0
- data/lib/onnx-chainer/operators/gemm.rb +44 -0
- data/lib/onnx-chainer/operators/relu.rb +35 -0
- data/lib/onnx-chainer/proto/onnx.proto +483 -0
- data/lib/onnx-chainer/proto/onnx_pb.rb +170 -0
- data/lib/onnx-chainer/version.rb +3 -0
- data/onnx-red-chainer.gemspec +31 -0
- metadata +147 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: c8aa77d35c9473d25f6528d912569b9c87edd9f011ff84e5542590dff65ca4a6
|
4
|
+
data.tar.gz: ea3066bfb22cb81e82983347419143290347efdc79ebe2cce2d869ad90387de9
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 285dd52ec849dbe6be77653556748e55256e2c1d19fe93c474b782ea54b20154dbc4e23cfa881de69a49522d7fcf12766ea6193a06effe10091c18a4f625c2de
|
7
|
+
data.tar.gz: 51afc59d523458206594b2f60ee6750ab2212ae106bf28ff8eee3c5e2a35ef7b03c09d1acbb23ab153830d5e2fbdde3bc610675061917a8f8fbadceef3f1e524
|
data/Gemfile
ADDED
data/Gemfile.lock
ADDED
@@ -0,0 +1,35 @@
|
|
1
|
+
PATH
|
2
|
+
remote: .
|
3
|
+
specs:
|
4
|
+
onnx-red-chainer (0.1.0)
|
5
|
+
google-protobuf
|
6
|
+
numo-narray
|
7
|
+
red-chainer
|
8
|
+
|
9
|
+
GEM
|
10
|
+
remote: https://rubygems.org/
|
11
|
+
specs:
|
12
|
+
csv (3.0.8)
|
13
|
+
google-protobuf (3.7.1)
|
14
|
+
minitest (5.11.3)
|
15
|
+
numo-narray (0.9.1.4)
|
16
|
+
rake (10.5.0)
|
17
|
+
red-chainer (0.4.1)
|
18
|
+
numo-narray (>= 0.9.1.1)
|
19
|
+
red-datasets (>= 0.0.6)
|
20
|
+
red-datasets (0.0.8)
|
21
|
+
csv (>= 3.0.5)
|
22
|
+
rubyzip
|
23
|
+
rubyzip (1.2.2)
|
24
|
+
|
25
|
+
PLATFORMS
|
26
|
+
ruby
|
27
|
+
|
28
|
+
DEPENDENCIES
|
29
|
+
bundler (~> 1.17)
|
30
|
+
minitest (~> 5.0)
|
31
|
+
onnx-red-chainer!
|
32
|
+
rake (~> 10.0)
|
33
|
+
|
34
|
+
BUNDLED WITH
|
35
|
+
1.17.2
|
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2019 hatappi
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
13
|
+
all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
21
|
+
THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,39 @@
|
|
1
|
+
# Onnx RedChainer
|
2
|
+
|
3
|
+
Welcome to your new gem! In this directory, you'll find the files you need to be able to package up your Ruby library into a gem. Put your Ruby code in the file `lib/onnx/chainer`. To experiment with that code, run `bin/console` for an interactive prompt.
|
4
|
+
|
5
|
+
TODO: Delete this and the text above, and describe your gem
|
6
|
+
|
7
|
+
## Installation
|
8
|
+
|
9
|
+
Add this line to your application's Gemfile:
|
10
|
+
|
11
|
+
```ruby
|
12
|
+
gem 'onnx-red-chainer'
|
13
|
+
```
|
14
|
+
|
15
|
+
And then execute:
|
16
|
+
|
17
|
+
$ bundle
|
18
|
+
|
19
|
+
Or install it yourself as:
|
20
|
+
|
21
|
+
$ gem install onnx-red-chainer
|
22
|
+
|
23
|
+
## Usage
|
24
|
+
|
25
|
+
TODO: Write usage instructions here
|
26
|
+
|
27
|
+
## Development
|
28
|
+
|
29
|
+
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake test` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
|
30
|
+
|
31
|
+
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
|
32
|
+
|
33
|
+
## Contributing
|
34
|
+
|
35
|
+
Bug reports and pull requests are welcome on GitHub at https://github.com/[USERNAME]/onnx-red-chainer.
|
36
|
+
|
37
|
+
## License
|
38
|
+
|
39
|
+
The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
|
data/Rakefile
ADDED
data/bin/console
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require "bundler/setup"
|
4
|
+
require "onnx/chainer"
|
5
|
+
|
6
|
+
# You can add fixtures and/or initialization code here to make experimenting
|
7
|
+
# with your gem easier. You can also use a different console, if you like.
|
8
|
+
|
9
|
+
# (If you use this, don't forget to add pry to your Gemfile!)
|
10
|
+
# require "pry"
|
11
|
+
# Pry.start
|
12
|
+
|
13
|
+
require "irb"
|
14
|
+
IRB.start(__FILE__)
|
data/bin/setup
ADDED
data/lib/onnx-chainer.rb
ADDED
@@ -0,0 +1,16 @@
|
|
1
|
+
require 'chainer'
|
2
|
+
require "onnx-chainer/version"
|
3
|
+
require "onnx-chainer/graph"
|
4
|
+
require "onnx-chainer/proto/onnx_pb"
|
5
|
+
|
6
|
+
module OnnxChainer
|
7
|
+
class Error < StandardError; end
|
8
|
+
|
9
|
+
def self.parse_file(onnx_path)
|
10
|
+
raise "File not found. #{onnx_path}" if onnx_path.nil? || !File.exists?(onnx_path)
|
11
|
+
|
12
|
+
m = Onnx::ModelProto.decode(File.read(onnx_path))
|
13
|
+
|
14
|
+
OnnxChainer::Graph.parse(m.graph)
|
15
|
+
end
|
16
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'onnx-chainer'
|
4
|
+
require 'json'
|
5
|
+
require 'optparse'
|
6
|
+
require 'pathname'
|
7
|
+
|
8
|
+
module OnnxChainer
|
9
|
+
class CLI
|
10
|
+
def self.start(argv)
|
11
|
+
new(argv).run
|
12
|
+
end
|
13
|
+
|
14
|
+
def initialize(argv)
|
15
|
+
@argv = argv.dup
|
16
|
+
@parser = OptionParser.new do |opts|
|
17
|
+
opts.banner = 'onnx-red-chainer [OPTIONS] FILE'
|
18
|
+
opts.version = VERSION
|
19
|
+
opts.on('-o', '--output_dir=OUTPUT_DIR', 'output path') { |v| @output = v }
|
20
|
+
opts.on('-m', '--model_name=MODEL_NAME', 'Model name') { |v| @model_name = v }
|
21
|
+
opts.on('-h', '--help', 'show help') { @help = true }
|
22
|
+
end
|
23
|
+
@onnx_path = argv.pop
|
24
|
+
@parser.parse!(argv)
|
25
|
+
end
|
26
|
+
|
27
|
+
def run
|
28
|
+
if @help || @argv.empty?
|
29
|
+
puts @parser.help
|
30
|
+
else
|
31
|
+
graph = OnnxChainer.parse_file(@onnx_path)
|
32
|
+
graph.export(output_dir: @output, model_name: @model_name)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,125 @@
|
|
1
|
+
require 'onnx-chainer/operators/gemm'
|
2
|
+
require 'onnx-chainer/operators/relu'
|
3
|
+
require 'numo/narray'
|
4
|
+
|
5
|
+
module OnnxChainer
|
6
|
+
class Graph
|
7
|
+
attr_reader :nodes, :input_names, :output_names
|
8
|
+
|
9
|
+
class << self
|
10
|
+
def parse(onnx_graph)
|
11
|
+
nodes = onnx_graph.node
|
12
|
+
initializers = onnx_graph.initializer
|
13
|
+
outputs = onnx_graph.output
|
14
|
+
|
15
|
+
# take out input
|
16
|
+
initializer_names = onnx_graph.initializer.map(&:name)
|
17
|
+
call_inputs = onnx_graph.input.reject { |i| initializer_names.include?(i.name) }
|
18
|
+
name = 'x'
|
19
|
+
input_names = call_inputs.each_with_object({}) do |i, hash|
|
20
|
+
hash[i.name] = name
|
21
|
+
name = name.succ
|
22
|
+
end
|
23
|
+
|
24
|
+
# parse each node
|
25
|
+
output_name_index = {}
|
26
|
+
nodes = nodes.map do |n|
|
27
|
+
output_name_index[n.op_type] ||= 1
|
28
|
+
klass = operator_klass(n.op_type)
|
29
|
+
i_names = n.input.reject { |i| initializers.map(&:name).include?(i) }
|
30
|
+
|
31
|
+
node = klass.parse(n, i_names, onnx_graph.input, output_name_index[n.op_type])
|
32
|
+
|
33
|
+
output_name_index[n.op_type] += 1
|
34
|
+
node
|
35
|
+
end
|
36
|
+
|
37
|
+
# take out output
|
38
|
+
output_names = {}
|
39
|
+
nodes.each { |n| output_names.merge!(n.output_names) }
|
40
|
+
|
41
|
+
# parameter
|
42
|
+
target = {}
|
43
|
+
onnx_graph.initializer.each do |initializer|
|
44
|
+
name = initializer.name
|
45
|
+
dtype = dtype(initializer.data_type)
|
46
|
+
|
47
|
+
arr = dtype.from_binary(initializer.raw_data).reshape(*initializer.dims)
|
48
|
+
|
49
|
+
n = name.split('_')
|
50
|
+
target["/@#{n[1].downcase}/@#{n[2].downcase}"] = dtype.from_binary(initializer.raw_data).reshape(*initializer.dims)
|
51
|
+
end
|
52
|
+
|
53
|
+
self.new(onnx_graph.name, nodes, input_names, output_names, target)
|
54
|
+
end
|
55
|
+
|
56
|
+
private
|
57
|
+
|
58
|
+
def operator_klass(op_type)
|
59
|
+
case op_type
|
60
|
+
when 'Gemm' then
|
61
|
+
return OnnxChainer::Operators::Gemm
|
62
|
+
when 'Relu' then
|
63
|
+
return OnnxChainer::Operators::Relu
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
def dtype(data_type)
|
68
|
+
if data_type == Onnx::TensorProto::DataType::FLOAT
|
69
|
+
Numo::SFloat
|
70
|
+
elsif data_type == Onnx::TensorProto::DataType::INT8
|
71
|
+
Numo::Int8
|
72
|
+
else
|
73
|
+
raise TypeError, 'unexpected value ' + data_type
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
def initialize(model_name, nodes, input_names, output_names, target)
|
79
|
+
@model_name = model_name
|
80
|
+
@nodes = nodes
|
81
|
+
@input_names = input_names
|
82
|
+
@output_names = output_names
|
83
|
+
@target = target
|
84
|
+
end
|
85
|
+
|
86
|
+
# export file
|
87
|
+
def export(output_dir: nil, model_name: nil)
|
88
|
+
model_name = model_name || @model_name
|
89
|
+
model_name = model_name.capitalize.gsub(/(?:^|_)(.)/){$1.upcase}
|
90
|
+
|
91
|
+
output_dir ||= '.'
|
92
|
+
FileUtils.mkdir(output_dir) unless Dir.exist?(output_dir)
|
93
|
+
|
94
|
+
s = <<EOS
|
95
|
+
require 'chainer'
|
96
|
+
|
97
|
+
class #{model_name} < Chainer::Chain
|
98
|
+
def initialize()
|
99
|
+
super()
|
100
|
+
init_scope do
|
101
|
+
#{@nodes.select(&:need_initialized).map(&:to_initialize_string).join("\n ")}
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
def call(#{@input_names.values.join(', ')})
|
106
|
+
#{
|
107
|
+
@nodes.map do |n|
|
108
|
+
args = n.input_names.map { |name| @input_names[name] || @output_names[name] }
|
109
|
+
n.to_call_string(args)
|
110
|
+
end.join("\n ")
|
111
|
+
}
|
112
|
+
end
|
113
|
+
end
|
114
|
+
EOS
|
115
|
+
|
116
|
+
File.open("#{output_dir}/model.rb", 'w') do |f|
|
117
|
+
f.puts(s)
|
118
|
+
end
|
119
|
+
|
120
|
+
File.open("#{output_dir}/resume", 'wb+') do |f|
|
121
|
+
Marshal.dump(@target, f)
|
122
|
+
end
|
123
|
+
end
|
124
|
+
end
|
125
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
module OnnxChainer
|
2
|
+
class Operator
|
3
|
+
attr_reader :need_initialized,
|
4
|
+
:output_names,
|
5
|
+
:input_names
|
6
|
+
|
7
|
+
class << self
|
8
|
+
def parse(node)
|
9
|
+
end
|
10
|
+
end
|
11
|
+
|
12
|
+
def chainer_class
|
13
|
+
raise NotImplementedError
|
14
|
+
end
|
15
|
+
|
16
|
+
def to_initialize_string
|
17
|
+
raise NotImplementedError
|
18
|
+
end
|
19
|
+
|
20
|
+
def to_call_string
|
21
|
+
raise NotImplementedError
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
require 'onnx-chainer/operator'
|
2
|
+
|
3
|
+
module OnnxChainer
|
4
|
+
module Operators
|
5
|
+
class Gemm < Operator
|
6
|
+
class << self
|
7
|
+
def parse(node, input_names, inputs, output_name_index)
|
8
|
+
bias_name = node.input.find { |i| i.match(/_b$/) }
|
9
|
+
input = inputs.find { |i| i.name == bias_name }
|
10
|
+
output_shape = input.type.tensor_type.shape.dim.map(&:dim_value)
|
11
|
+
|
12
|
+
need_initialized = node.input.any? { |i| inputs.map(&:name).include?(i) }
|
13
|
+
|
14
|
+
output_names = {
|
15
|
+
node.output.first => "l#{output_name_index}"
|
16
|
+
}
|
17
|
+
instance_variable_name = "@l#{output_name_index}"
|
18
|
+
|
19
|
+
self.new(input_names: input_names, output_shape: output_shape, output_names: output_names, instance_variable_name: instance_variable_name, need_initialized: need_initialized)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def initialize(input_names:, output_shape:, output_names:, instance_variable_name:, need_initialized:)
|
24
|
+
@input_names = input_names
|
25
|
+
@output_shape = output_shape
|
26
|
+
@output_names = output_names
|
27
|
+
@instance_variable_name = instance_variable_name
|
28
|
+
@need_initialized = need_initialized
|
29
|
+
end
|
30
|
+
|
31
|
+
def chainer_class
|
32
|
+
::Chainer::Links::Connection::Linear
|
33
|
+
end
|
34
|
+
|
35
|
+
def to_initialize_string
|
36
|
+
"#{@instance_variable_name} = #{chainer_class}.new(nil, out_size: #{@output_shape})"
|
37
|
+
end
|
38
|
+
|
39
|
+
def to_call_string(args)
|
40
|
+
"#{@output_names.values.first} = #{@instance_variable_name}.(#{args.join(', ')})"
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
require 'onnx-chainer/operator'
|
2
|
+
|
3
|
+
module OnnxChainer
|
4
|
+
module Operators
|
5
|
+
class Relu < Operator
|
6
|
+
class << self
|
7
|
+
def parse(node, input_names, inputs, output_name_index)
|
8
|
+
need_initialized = node.input.any? { |i| inputs.map(&:name).include?(i) }
|
9
|
+
|
10
|
+
output_names = {
|
11
|
+
node.output.first => "r#{output_name_index}"
|
12
|
+
}
|
13
|
+
instance_variable_name = "@r#{output_name_index}"
|
14
|
+
|
15
|
+
self.new(input_names: input_names, output_names: output_names, instance_variable_name: instance_variable_name, need_initialized: need_initialized)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
def initialize(input_names:, output_names:, instance_variable_name:, need_initialized:)
|
20
|
+
@input_names = input_names
|
21
|
+
@output_names = output_names
|
22
|
+
@instance_variable_name = instance_variable_name
|
23
|
+
@need_initialized = need_initialized
|
24
|
+
end
|
25
|
+
|
26
|
+
def chainer_class
|
27
|
+
::Chainer::Functions::Activation::Relu
|
28
|
+
end
|
29
|
+
|
30
|
+
def to_call_string(args)
|
31
|
+
"#{@output_names.values.first} = #{chainer_class}.relu(#{args.join(', ')})"
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,483 @@
|
|
1
|
+
//
|
2
|
+
// WARNING: This file is automatically generated! Please edit onnx.in.proto.
|
3
|
+
//
|
4
|
+
|
5
|
+
|
6
|
+
// Copyright (c) ONNX Project Contributors.
|
7
|
+
// Licensed under the MIT license.
|
8
|
+
|
9
|
+
syntax = "proto2";
|
10
|
+
|
11
|
+
package onnx;
|
12
|
+
|
13
|
+
// Overview
|
14
|
+
//
|
15
|
+
// ONNX is an open specification that is comprised of the following components:
|
16
|
+
//
|
17
|
+
// 1) A definition of an extensible computation graph model.
|
18
|
+
// 2) Definitions of standard data types.
|
19
|
+
// 3) Definitions of built-in operators.
|
20
|
+
//
|
21
|
+
// This document describes the syntax of models and their computation graphs,
|
22
|
+
// as well as the standard data types. Together, they are referred to as the ONNX
|
23
|
+
// Intermediate Representation, or 'IR' for short.
|
24
|
+
//
|
25
|
+
// The normative semantic specification of the ONNX IR is found in docs/IR.md.
|
26
|
+
// Definitions of the built-in neural network operators may be found in docs/Operators.md.
|
27
|
+
|
28
|
+
// Notes
|
29
|
+
//
|
30
|
+
// Release
|
31
|
+
//
|
32
|
+
// We are still in the very early stage of defining ONNX. The current
|
33
|
+
// version of ONNX is a starting point. While we are actively working
|
34
|
+
// towards a complete spec, we would like to get the community involved
|
35
|
+
// by sharing our working version of ONNX.
|
36
|
+
//
|
37
|
+
// Protobuf compatibility
|
38
|
+
//
|
39
|
+
// To simplify framework compatibility, ONNX is defined using the subset of protobuf
|
40
|
+
// that is compatible with both protobuf v2 and v3. This means that we do not use any
|
41
|
+
// protobuf features that are only available in one of the two versions.
|
42
|
+
//
|
43
|
+
// Here are the most notable contortions we have to carry out to work around
|
44
|
+
// these limitations:
|
45
|
+
//
|
46
|
+
// - No 'map' (added protobuf 3.0). We instead represent mappings as lists
|
47
|
+
// of key-value pairs, where order does not matter and duplicates
|
48
|
+
// are not allowed.
|
49
|
+
|
50
|
+
|
51
|
+
// Versioning
|
52
|
+
//
|
53
|
+
// ONNX versioning is specified in docs/IR.md and elaborated on in docs/Versioning.md
|
54
|
+
//
|
55
|
+
// To be compatible with both proto2 and proto3, we will use a version number
|
56
|
+
// that is not defined by the default value but an explicit enum number.
|
57
|
+
enum Version {
|
58
|
+
// proto3 requires the first enum value to be zero.
|
59
|
+
// We add this just to appease the compiler.
|
60
|
+
_START_VERSION = 0;
|
61
|
+
// The version field is always serialized and we will use it to store the
|
62
|
+
// version that the graph is generated from. This helps us set up version
|
63
|
+
// control.
|
64
|
+
// For the IR, we are using simple numbers starting with with 0x00000001,
|
65
|
+
// which was the version we published on Oct 10, 2017.
|
66
|
+
IR_VERSION_2017_10_10 = 0x0000000000000001;
|
67
|
+
|
68
|
+
// IR_VERSION 2 published on Oct 30, 2017
|
69
|
+
// - Added type discriminator to AttributeProto to support proto3 users
|
70
|
+
IR_VERSION_2017_10_30 = 0x0000000000000002;
|
71
|
+
|
72
|
+
// IR VERSION 3 published on Nov 3, 2017
|
73
|
+
// - For operator versioning:
|
74
|
+
// - Added new message OperatorSetIdProto
|
75
|
+
// - Added opset_import in ModelProto
|
76
|
+
// - For vendor extensions, added domain in NodeProto
|
77
|
+
IR_VERSION_2017_11_3 = 0x0000000000000003;
|
78
|
+
|
79
|
+
// IR VERSION 4 published on Jan 22, 2019
|
80
|
+
// - Relax constraint that initializers should be a subset of graph inputs
|
81
|
+
// - Add type BFLOAT16
|
82
|
+
IR_VERSION = 0x0000000000000004;
|
83
|
+
}
|
84
|
+
|
85
|
+
// Attributes
|
86
|
+
//
|
87
|
+
// A named attribute containing either singular float, integer, string, graph,
|
88
|
+
// and tensor values, or repeated float, integer, string, graph, and tensor values.
|
89
|
+
// An AttributeProto MUST contain the name field, and *only one* of the
|
90
|
+
// following content fields, effectively enforcing a C/C++ union equivalent.
|
91
|
+
message AttributeProto {
|
92
|
+
|
93
|
+
// Note: this enum is structurally identical to the OpSchema::AttrType
|
94
|
+
// enum defined in schema.h. If you rev one, you likely need to rev the other.
|
95
|
+
enum AttributeType {
|
96
|
+
UNDEFINED = 0;
|
97
|
+
FLOAT = 1;
|
98
|
+
INT = 2;
|
99
|
+
STRING = 3;
|
100
|
+
TENSOR = 4;
|
101
|
+
GRAPH = 5;
|
102
|
+
|
103
|
+
FLOATS = 6;
|
104
|
+
INTS = 7;
|
105
|
+
STRINGS = 8;
|
106
|
+
TENSORS = 9;
|
107
|
+
GRAPHS = 10;
|
108
|
+
}
|
109
|
+
|
110
|
+
// The name field MUST be present for this version of the IR.
|
111
|
+
optional string name = 1; // namespace Attribute
|
112
|
+
|
113
|
+
// if ref_attr_name is not empty, ref_attr_name is the attribute name in parent function.
|
114
|
+
// In this case, this AttributeProto does not contain data, and it's a reference of attribute
|
115
|
+
// in parent scope.
|
116
|
+
// NOTE: This should ONLY be used in function (sub-graph). It's invalid to be used in main graph.
|
117
|
+
optional string ref_attr_name = 21;
|
118
|
+
|
119
|
+
// A human-readable documentation for this attribute. Markdown is allowed.
|
120
|
+
optional string doc_string = 13;
|
121
|
+
|
122
|
+
// The type field MUST be present for this version of the IR.
|
123
|
+
// For 0.0.1 versions of the IR, this field was not defined, and
|
124
|
+
// implementations needed to use has_field hueristics to determine
|
125
|
+
// which value field was in use. For IR_VERSION 0.0.2 or later, this
|
126
|
+
// field MUST be set and match the f|i|s|t|... field in use. This
|
127
|
+
// change was made to accomodate proto3 implementations.
|
128
|
+
optional AttributeType type = 20; // discriminator that indicates which field below is in use
|
129
|
+
|
130
|
+
// Exactly ONE of the following fields must be present for this version of the IR
|
131
|
+
optional float f = 2; // float
|
132
|
+
optional int64 i = 3; // int
|
133
|
+
optional bytes s = 4; // UTF-8 string
|
134
|
+
optional TensorProto t = 5; // tensor value
|
135
|
+
optional GraphProto g = 6; // graph
|
136
|
+
// Do not use field below, it's deprecated.
|
137
|
+
// optional ValueProto v = 12; // value - subsumes everything but graph
|
138
|
+
|
139
|
+
repeated float floats = 7; // list of floats
|
140
|
+
repeated int64 ints = 8; // list of ints
|
141
|
+
repeated bytes strings = 9; // list of UTF-8 strings
|
142
|
+
repeated TensorProto tensors = 10; // list of tensors
|
143
|
+
repeated GraphProto graphs = 11; // list of graph
|
144
|
+
}
|
145
|
+
|
146
|
+
// Defines information on value, including the name, the type, and
|
147
|
+
// the shape of the value.
|
148
|
+
message ValueInfoProto {
|
149
|
+
// This field MUST be present in this version of the IR.
|
150
|
+
optional string name = 1; // namespace Value
|
151
|
+
// This field MUST be present in this version of the IR.
|
152
|
+
optional TypeProto type = 2;
|
153
|
+
// A human-readable documentation for this value. Markdown is allowed.
|
154
|
+
optional string doc_string = 3;
|
155
|
+
}
|
156
|
+
|
157
|
+
// Nodes
|
158
|
+
//
|
159
|
+
// Computation graphs are made up of a DAG of nodes, which represent what is
|
160
|
+
// commonly called a "layer" or "pipeline stage" in machine learning frameworks.
|
161
|
+
//
|
162
|
+
// For example, it can be a node of type "Conv" that takes in an image, a filter
|
163
|
+
// tensor and a bias tensor, and produces the convolved output.
|
164
|
+
message NodeProto {
|
165
|
+
repeated string input = 1; // namespace Value
|
166
|
+
repeated string output = 2; // namespace Value
|
167
|
+
|
168
|
+
// An optional identifier for this node in a graph.
|
169
|
+
// This field MAY be absent in ths version of the IR.
|
170
|
+
optional string name = 3; // namespace Node
|
171
|
+
|
172
|
+
// The symbolic identifier of the Operator to execute.
|
173
|
+
optional string op_type = 4; // namespace Operator
|
174
|
+
// The domain of the OperatorSet that specifies the operator named by op_type.
|
175
|
+
optional string domain = 7; // namespace Domain
|
176
|
+
|
177
|
+
// Additional named attributes.
|
178
|
+
repeated AttributeProto attribute = 5;
|
179
|
+
|
180
|
+
// A human-readable documentation for this node. Markdown is allowed.
|
181
|
+
optional string doc_string = 6;
|
182
|
+
}
|
183
|
+
|
184
|
+
// Models
|
185
|
+
//
|
186
|
+
// ModelProto is a top-level file/container format for bundling a ML model and
|
187
|
+
// associating its computation graph with metadata.
|
188
|
+
//
|
189
|
+
// The semantics of the model are described by the associated GraphProto.
|
190
|
+
message ModelProto {
|
191
|
+
// The version of the IR this model targets. See Version enum above.
|
192
|
+
// This field MUST be present.
|
193
|
+
optional int64 ir_version = 1;
|
194
|
+
|
195
|
+
// The OperatorSets this model relies on.
|
196
|
+
// All ModelProtos MUST have at least one entry that
|
197
|
+
// specifies which version of the ONNX OperatorSet is
|
198
|
+
// being imported.
|
199
|
+
//
|
200
|
+
// All nodes in the ModelProto's graph will bind against the operator
|
201
|
+
// with the same-domain/same-op_type operator with the HIGHEST version
|
202
|
+
// in the referenced operator sets.
|
203
|
+
repeated OperatorSetIdProto opset_import = 8;
|
204
|
+
|
205
|
+
// The name of the framework or tool used to generate this model.
|
206
|
+
// This field SHOULD be present to indicate which implementation/tool/framework
|
207
|
+
// emitted the model.
|
208
|
+
optional string producer_name = 2;
|
209
|
+
|
210
|
+
// The version of the framework or tool used to generate this model.
|
211
|
+
// This field SHOULD be present to indicate which implementation/tool/framework
|
212
|
+
// emitted the model.
|
213
|
+
optional string producer_version = 3;
|
214
|
+
|
215
|
+
// Domain name of the model.
|
216
|
+
// We use reverse domain names as name space indicators. For example:
|
217
|
+
// `com.facebook.fair` or `com.microsoft.cognitiveservices`
|
218
|
+
//
|
219
|
+
// Together with `model_version` and GraphProto.name, this forms the unique identity of
|
220
|
+
// the graph.
|
221
|
+
optional string domain = 4;
|
222
|
+
|
223
|
+
// The version of the graph encoded. See Version enum below.
|
224
|
+
optional int64 model_version = 5;
|
225
|
+
|
226
|
+
// A human-readable documentation for this model. Markdown is allowed.
|
227
|
+
optional string doc_string = 6;
|
228
|
+
|
229
|
+
// The parameterized graph that is evaluated to execute the model.
|
230
|
+
optional GraphProto graph = 7;
|
231
|
+
|
232
|
+
// Named metadata values; keys should be distinct.
|
233
|
+
repeated StringStringEntryProto metadata_props = 14;
|
234
|
+
};
|
235
|
+
|
236
|
+
// StringStringEntryProto follows the pattern for cross-proto-version maps.
|
237
|
+
// See https://developers.google.com/protocol-buffers/docs/proto3#maps
|
238
|
+
message StringStringEntryProto {
|
239
|
+
optional string key = 1;
|
240
|
+
optional string value= 2;
|
241
|
+
};
|
242
|
+
|
243
|
+
// Graphs
|
244
|
+
//
|
245
|
+
// A graph defines the computational logic of a model and is comprised of a parameterized
|
246
|
+
// list of nodes that form a directed acyclic graph based on their inputs and outputs.
|
247
|
+
// This is the equivalent of the "network" or "graph" in many deep learning
|
248
|
+
// frameworks.
|
249
|
+
message GraphProto {
|
250
|
+
// The nodes in the graph, sorted topologically.
|
251
|
+
repeated NodeProto node = 1;
|
252
|
+
|
253
|
+
// The name of the graph.
|
254
|
+
optional string name = 2; // namespace Graph
|
255
|
+
|
256
|
+
// A list of named tensor values, used to specify constant inputs of the graph.
|
257
|
+
// Each TensorProto entry must have a distinct name (within the list) that
|
258
|
+
// MAY also appear in the input list.
|
259
|
+
repeated TensorProto initializer = 5;
|
260
|
+
|
261
|
+
// A human-readable documentation for this graph. Markdown is allowed.
|
262
|
+
optional string doc_string = 10;
|
263
|
+
|
264
|
+
// The inputs and outputs of the graph.
|
265
|
+
repeated ValueInfoProto input = 11;
|
266
|
+
repeated ValueInfoProto output = 12;
|
267
|
+
|
268
|
+
// Information for the values in the graph. The ValueInfoProto.name's
|
269
|
+
// must be distinct. It is optional for a value to appear in value_info list.
|
270
|
+
repeated ValueInfoProto value_info = 13;
|
271
|
+
|
272
|
+
// DO NOT USE the following fields, they were deprecated from earlier versions.
|
273
|
+
// repeated string input = 3;
|
274
|
+
// repeated string output = 4;
|
275
|
+
// optional int64 ir_version = 6;
|
276
|
+
// optional int64 producer_version = 7;
|
277
|
+
// optional string producer_tag = 8;
|
278
|
+
// optional string domain = 9;
|
279
|
+
}
|
280
|
+
|
281
|
+
// Tensors
|
282
|
+
//
|
283
|
+
// A serialized tensor value.
|
284
|
+
message TensorProto {
|
285
|
+
enum DataType {
|
286
|
+
UNDEFINED = 0;
|
287
|
+
// Basic types.
|
288
|
+
FLOAT = 1; // float
|
289
|
+
UINT8 = 2; // uint8_t
|
290
|
+
INT8 = 3; // int8_t
|
291
|
+
UINT16 = 4; // uint16_t
|
292
|
+
INT16 = 5; // int16_t
|
293
|
+
INT32 = 6; // int32_t
|
294
|
+
INT64 = 7; // int64_t
|
295
|
+
STRING = 8; // string
|
296
|
+
BOOL = 9; // bool
|
297
|
+
|
298
|
+
// IEEE754 half-precision floating-point format (16 bits wide).
|
299
|
+
// This format has 1 sign bit, 5 exponent bits, and 10 mantissa bits.
|
300
|
+
FLOAT16 = 10;
|
301
|
+
|
302
|
+
DOUBLE = 11;
|
303
|
+
UINT32 = 12;
|
304
|
+
UINT64 = 13;
|
305
|
+
COMPLEX64 = 14; // complex with float32 real and imaginary components
|
306
|
+
COMPLEX128 = 15; // complex with float64 real and imaginary components
|
307
|
+
|
308
|
+
// Non-IEEE floating-point format based on IEEE754 single-precision
|
309
|
+
// floating-point number truncated to 16 bits.
|
310
|
+
// This format has 1 sign bit, 8 exponent bits, and 7 mantissa bits.
|
311
|
+
BFLOAT16 = 16;
|
312
|
+
|
313
|
+
// Future extensions go here.
|
314
|
+
}
|
315
|
+
|
316
|
+
// The shape of the tensor.
|
317
|
+
repeated int64 dims = 1;
|
318
|
+
|
319
|
+
// The data type of the tensor.
|
320
|
+
// This field MUST have a valid TensorProto.DataType value
|
321
|
+
optional int32 data_type = 2;
|
322
|
+
|
323
|
+
// For very large tensors, we may want to store them in chunks, in which
|
324
|
+
// case the following fields will specify the segment that is stored in
|
325
|
+
// the current TensorProto.
|
326
|
+
message Segment {
|
327
|
+
optional int64 begin = 1;
|
328
|
+
optional int64 end = 2;
|
329
|
+
}
|
330
|
+
optional Segment segment = 3;
|
331
|
+
|
332
|
+
// Tensor content must be organized in row-major order.
|
333
|
+
//
|
334
|
+
// Depending on the data_type field, exactly one of the fields below with
|
335
|
+
// name ending in _data is used to store the elements of the tensor.
|
336
|
+
|
337
|
+
// For float and complex64 values
|
338
|
+
// Complex64 tensors are encoded as a single array of floats,
|
339
|
+
// with the real components appearing in odd numbered positions,
|
340
|
+
// and the corresponding imaginary component apparing in the
|
341
|
+
// subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
|
342
|
+
// is encoded as [1.0, 2.0 ,3.0 ,4.0]
|
343
|
+
// When this field is present, the data_type field MUST be FLOAT or COMPLEX64.
|
344
|
+
repeated float float_data = 4 [packed = true];
|
345
|
+
|
346
|
+
// For int32, uint8, int8, uint16, int16, bool, and float16 values
|
347
|
+
// float16 values must be bit-wise converted to an uint16_t prior
|
348
|
+
// to writing to the buffer.
|
349
|
+
// When this field is present, the data_type field MUST be
|
350
|
+
// INT32, INT16, INT8, UINT16, UINT8, BOOL, or FLOAT16
|
351
|
+
repeated int32 int32_data = 5 [packed = true];
|
352
|
+
|
353
|
+
// For strings.
|
354
|
+
// Each element of string_data is a UTF-8 encoded Unicode
|
355
|
+
// string. No trailing null, no leading BOM. The protobuf "string"
|
356
|
+
// scalar type is not used to match ML community conventions.
|
357
|
+
// When this field is present, the data_type field MUST be STRING
|
358
|
+
repeated bytes string_data = 6;
|
359
|
+
|
360
|
+
// For int64.
|
361
|
+
// When this field is present, the data_type field MUST be INT64
|
362
|
+
repeated int64 int64_data = 7 [packed = true];
|
363
|
+
|
364
|
+
// Optionally, a name for the tensor.
|
365
|
+
optional string name = 8; // namespace Value
|
366
|
+
|
367
|
+
// A human-readable documentation for this tensor. Markdown is allowed.
|
368
|
+
optional string doc_string = 12;
|
369
|
+
|
370
|
+
// Serializations can either use one of the fields above, or use this
|
371
|
+
// raw bytes field. The only exception is the string case, where one is
|
372
|
+
// required to store the content in the repeated bytes string_data field.
|
373
|
+
//
|
374
|
+
// When this raw_data field is used to store tensor value, elements MUST
|
375
|
+
// be stored in as fixed-width, little-endian order.
|
376
|
+
// Floating-point data types MUST be stored in IEEE 754 format.
|
377
|
+
// Complex64 elements must be written as two consecutive FLOAT values, real component first.
|
378
|
+
// Complex128 elements must be written as two consecutive DOUBLE values, real component first.
|
379
|
+
// Boolean type MUST be written one byte per tensor element (00000001 for true, 00000000 for false).
|
380
|
+
//
|
381
|
+
// Note: the advantage of specific field rather than the raw_data field is
|
382
|
+
// that in some cases (e.g. int data), protobuf does a better packing via
|
383
|
+
// variable length storage, and may lead to smaller binary footprint.
|
384
|
+
// When this field is present, the data_type field MUST NOT be STRING or UNDEFINED
|
385
|
+
optional bytes raw_data = 9;
|
386
|
+
|
387
|
+
// Data can be stored inside the protobuf file using type-specific fields or raw_data.
|
388
|
+
// Alternatively, raw bytes data can be stored in an external file, using the external_data field.
|
389
|
+
// external_data stores key-value pairs describing data location. Recognized keys are:
|
390
|
+
// - "location" (required) - POSIX filesystem path relative to the directory where the ONNX
|
391
|
+
// protobuf model was stored
|
392
|
+
// - "offset" (optional) - position of byte at which stored data begins. Integer stored as string.
|
393
|
+
// Offset values SHOULD be multiples 4096 (page size) to enable mmap support.
|
394
|
+
// - "length" (optional) - number of bytes containing data. Integer stored as string.
|
395
|
+
// - "checksum" (optional) - SHA1 digest of file specified in under 'location' key.
|
396
|
+
repeated StringStringEntryProto external_data = 13;
|
397
|
+
|
398
|
+
// Location of the data for this tensor. MUST be one of:
|
399
|
+
// - DEFAULT - data stored inside the protobuf message. Data is stored in raw_data (if set) otherwise in type-specified field.
|
400
|
+
// - EXTERNAL - data stored in an external location as described by external_data field.
|
401
|
+
enum DataLocation {
|
402
|
+
DEFAULT = 0;
|
403
|
+
EXTERNAL = 1;
|
404
|
+
}
|
405
|
+
|
406
|
+
// If value not set, data is stored in raw_data (if set) otherwise in type-specified field.
|
407
|
+
optional DataLocation data_location = 14;
|
408
|
+
|
409
|
+
// For double
|
410
|
+
// Complex128 tensors are encoded as a single array of doubles,
|
411
|
+
// with the real components appearing in odd numbered positions,
|
412
|
+
// and the corresponding imaginary component apparing in the
|
413
|
+
// subsequent even numbered position. (e.g., [1.0 + 2.0i, 3.0 + 4.0i]
|
414
|
+
// is encoded as [1.0, 2.0 ,3.0 ,4.0]
|
415
|
+
// When this field is present, the data_type field MUST be DOUBLE or COMPLEX128
|
416
|
+
repeated double double_data = 10 [packed = true];
|
417
|
+
|
418
|
+
// For uint64 and uint32 values
|
419
|
+
// When this field is present, the data_type field MUST be
|
420
|
+
// UINT32 or UINT64
|
421
|
+
repeated uint64 uint64_data = 11 [packed = true];
|
422
|
+
}
|
423
|
+
|
424
|
+
// Defines a tensor shape. A dimension can be either an integer value
|
425
|
+
// or a symbolic variable. A symbolic variable represents an unknown
|
426
|
+
// dimension.
|
427
|
+
message TensorShapeProto {
|
428
|
+
message Dimension {
|
429
|
+
oneof value {
|
430
|
+
int64 dim_value = 1;
|
431
|
+
string dim_param = 2; // namespace Shape
|
432
|
+
};
|
433
|
+
// Standard denotation can optionally be used to denote tensor
|
434
|
+
// dimensions with standard semantic descriptions to ensure
|
435
|
+
// that operations are applied to the correct axis of a tensor.
|
436
|
+
// Refer to https://github.com/onnx/onnx/blob/master/docs/DimensionDenotation.md#denotation-definition
|
437
|
+
// for pre-defined dimension denotations.
|
438
|
+
optional string denotation = 3;
|
439
|
+
};
|
440
|
+
repeated Dimension dim = 1;
|
441
|
+
}
|
442
|
+
|
443
|
+
// Types
|
444
|
+
//
|
445
|
+
// The standard ONNX data types.
|
446
|
+
message TypeProto {
|
447
|
+
|
448
|
+
message Tensor {
|
449
|
+
// This field MUST NOT have the value of UNDEFINED
|
450
|
+
// This field MUST have a valid TensorProto.DataType value
|
451
|
+
// This field MUST be present for this version of the IR.
|
452
|
+
optional int32 elem_type = 1;
|
453
|
+
optional TensorShapeProto shape = 2;
|
454
|
+
}
|
455
|
+
|
456
|
+
|
457
|
+
oneof value {
|
458
|
+
// The type of a tensor.
|
459
|
+
Tensor tensor_type = 1;
|
460
|
+
|
461
|
+
}
|
462
|
+
|
463
|
+
// An optional denotation can be used to denote the whole
|
464
|
+
// type with a standard semantic description as to what is
|
465
|
+
// stored inside. Refer to https://github.com/onnx/onnx/blob/master/docs/TypeDenotation.md#type-denotation-definition
|
466
|
+
// for pre-defined type denotations.
|
467
|
+
optional string denotation = 6;
|
468
|
+
}
|
469
|
+
|
470
|
+
// Operator Sets
|
471
|
+
//
|
472
|
+
// OperatorSets are uniquely identified by a (domain, opset_version) pair.
|
473
|
+
message OperatorSetIdProto {
|
474
|
+
// The domain of the operator set being identified.
|
475
|
+
// The empty string ("") or absence of this field implies the operator
|
476
|
+
// set that is defined as part of the ONNX specification.
|
477
|
+
// This field MUST be present in this version of the IR when referring to any other operator set.
|
478
|
+
optional string domain = 1;
|
479
|
+
|
480
|
+
// The version of the operator set being identified.
|
481
|
+
// This field MUST be present in this version of the IR.
|
482
|
+
optional int64 version = 2;
|
483
|
+
}
|
@@ -0,0 +1,170 @@
|
|
1
|
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
2
|
+
# source: lib/onnx.proto
|
3
|
+
|
4
|
+
require 'google/protobuf'
|
5
|
+
|
6
|
+
Google::Protobuf::DescriptorPool.generated_pool.build do
|
7
|
+
add_file("lib/onnx.proto", :syntax => :proto2) do
|
8
|
+
add_message "onnx.AttributeProto" do
|
9
|
+
optional :name, :string, 1
|
10
|
+
optional :ref_attr_name, :string, 21
|
11
|
+
optional :doc_string, :string, 13
|
12
|
+
optional :type, :enum, 20, "onnx.AttributeProto.AttributeType"
|
13
|
+
optional :f, :float, 2
|
14
|
+
optional :i, :int64, 3
|
15
|
+
optional :s, :bytes, 4
|
16
|
+
optional :t, :message, 5, "onnx.TensorProto"
|
17
|
+
optional :g, :message, 6, "onnx.GraphProto"
|
18
|
+
repeated :floats, :float, 7
|
19
|
+
repeated :ints, :int64, 8
|
20
|
+
repeated :strings, :bytes, 9
|
21
|
+
repeated :tensors, :message, 10, "onnx.TensorProto"
|
22
|
+
repeated :graphs, :message, 11, "onnx.GraphProto"
|
23
|
+
end
|
24
|
+
add_enum "onnx.AttributeProto.AttributeType" do
|
25
|
+
value :UNDEFINED, 0
|
26
|
+
value :FLOAT, 1
|
27
|
+
value :INT, 2
|
28
|
+
value :STRING, 3
|
29
|
+
value :TENSOR, 4
|
30
|
+
value :GRAPH, 5
|
31
|
+
value :FLOATS, 6
|
32
|
+
value :INTS, 7
|
33
|
+
value :STRINGS, 8
|
34
|
+
value :TENSORS, 9
|
35
|
+
value :GRAPHS, 10
|
36
|
+
end
|
37
|
+
add_message "onnx.ValueInfoProto" do
|
38
|
+
optional :name, :string, 1
|
39
|
+
optional :type, :message, 2, "onnx.TypeProto"
|
40
|
+
optional :doc_string, :string, 3
|
41
|
+
end
|
42
|
+
add_message "onnx.NodeProto" do
|
43
|
+
repeated :input, :string, 1
|
44
|
+
repeated :output, :string, 2
|
45
|
+
optional :name, :string, 3
|
46
|
+
optional :op_type, :string, 4
|
47
|
+
optional :domain, :string, 7
|
48
|
+
repeated :attribute, :message, 5, "onnx.AttributeProto"
|
49
|
+
optional :doc_string, :string, 6
|
50
|
+
end
|
51
|
+
add_message "onnx.ModelProto" do
|
52
|
+
optional :ir_version, :int64, 1
|
53
|
+
repeated :opset_import, :message, 8, "onnx.OperatorSetIdProto"
|
54
|
+
optional :producer_name, :string, 2
|
55
|
+
optional :producer_version, :string, 3
|
56
|
+
optional :domain, :string, 4
|
57
|
+
optional :model_version, :int64, 5
|
58
|
+
optional :doc_string, :string, 6
|
59
|
+
optional :graph, :message, 7, "onnx.GraphProto"
|
60
|
+
repeated :metadata_props, :message, 14, "onnx.StringStringEntryProto"
|
61
|
+
end
|
62
|
+
add_message "onnx.StringStringEntryProto" do
|
63
|
+
optional :key, :string, 1
|
64
|
+
optional :value, :string, 2
|
65
|
+
end
|
66
|
+
add_message "onnx.GraphProto" do
|
67
|
+
repeated :node, :message, 1, "onnx.NodeProto"
|
68
|
+
optional :name, :string, 2
|
69
|
+
repeated :initializer, :message, 5, "onnx.TensorProto"
|
70
|
+
optional :doc_string, :string, 10
|
71
|
+
repeated :input, :message, 11, "onnx.ValueInfoProto"
|
72
|
+
repeated :output, :message, 12, "onnx.ValueInfoProto"
|
73
|
+
repeated :value_info, :message, 13, "onnx.ValueInfoProto"
|
74
|
+
end
|
75
|
+
add_message "onnx.TensorProto" do
|
76
|
+
repeated :dims, :int64, 1
|
77
|
+
optional :data_type, :int32, 2
|
78
|
+
optional :segment, :message, 3, "onnx.TensorProto.Segment"
|
79
|
+
repeated :float_data, :float, 4
|
80
|
+
repeated :int32_data, :int32, 5
|
81
|
+
repeated :string_data, :bytes, 6
|
82
|
+
repeated :int64_data, :int64, 7
|
83
|
+
optional :name, :string, 8
|
84
|
+
optional :doc_string, :string, 12
|
85
|
+
optional :raw_data, :bytes, 9
|
86
|
+
repeated :external_data, :message, 13, "onnx.StringStringEntryProto"
|
87
|
+
optional :data_location, :enum, 14, "onnx.TensorProto.DataLocation"
|
88
|
+
repeated :double_data, :double, 10
|
89
|
+
repeated :uint64_data, :uint64, 11
|
90
|
+
end
|
91
|
+
add_message "onnx.TensorProto.Segment" do
|
92
|
+
optional :begin, :int64, 1
|
93
|
+
optional :end, :int64, 2
|
94
|
+
end
|
95
|
+
add_enum "onnx.TensorProto.DataType" do
|
96
|
+
value :UNDEFINED, 0
|
97
|
+
value :FLOAT, 1
|
98
|
+
value :UINT8, 2
|
99
|
+
value :INT8, 3
|
100
|
+
value :UINT16, 4
|
101
|
+
value :INT16, 5
|
102
|
+
value :INT32, 6
|
103
|
+
value :INT64, 7
|
104
|
+
value :STRING, 8
|
105
|
+
value :BOOL, 9
|
106
|
+
value :FLOAT16, 10
|
107
|
+
value :DOUBLE, 11
|
108
|
+
value :UINT32, 12
|
109
|
+
value :UINT64, 13
|
110
|
+
value :COMPLEX64, 14
|
111
|
+
value :COMPLEX128, 15
|
112
|
+
value :BFLOAT16, 16
|
113
|
+
end
|
114
|
+
add_enum "onnx.TensorProto.DataLocation" do
|
115
|
+
value :DEFAULT, 0
|
116
|
+
value :EXTERNAL, 1
|
117
|
+
end
|
118
|
+
add_message "onnx.TensorShapeProto" do
|
119
|
+
repeated :dim, :message, 1, "onnx.TensorShapeProto.Dimension"
|
120
|
+
end
|
121
|
+
add_message "onnx.TensorShapeProto.Dimension" do
|
122
|
+
optional :denotation, :string, 3
|
123
|
+
oneof :value do
|
124
|
+
optional :dim_value, :int64, 1
|
125
|
+
optional :dim_param, :string, 2
|
126
|
+
end
|
127
|
+
end
|
128
|
+
add_message "onnx.TypeProto" do
|
129
|
+
optional :denotation, :string, 6
|
130
|
+
oneof :value do
|
131
|
+
optional :tensor_type, :message, 1, "onnx.TypeProto.Tensor"
|
132
|
+
end
|
133
|
+
end
|
134
|
+
add_message "onnx.TypeProto.Tensor" do
|
135
|
+
optional :elem_type, :int32, 1
|
136
|
+
optional :shape, :message, 2, "onnx.TensorShapeProto"
|
137
|
+
end
|
138
|
+
add_message "onnx.OperatorSetIdProto" do
|
139
|
+
optional :domain, :string, 1
|
140
|
+
optional :version, :int64, 2
|
141
|
+
end
|
142
|
+
add_enum "onnx.Version" do
|
143
|
+
value :_START_VERSION, 0
|
144
|
+
value :IR_VERSION_2017_10_10, 1
|
145
|
+
value :IR_VERSION_2017_10_30, 2
|
146
|
+
value :IR_VERSION_2017_11_3, 3
|
147
|
+
value :IR_VERSION, 4
|
148
|
+
end
|
149
|
+
end
|
150
|
+
end
|
151
|
+
|
152
|
+
module Onnx
|
153
|
+
AttributeProto = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.AttributeProto").msgclass
|
154
|
+
AttributeProto::AttributeType = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.AttributeProto.AttributeType").enummodule
|
155
|
+
ValueInfoProto = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.ValueInfoProto").msgclass
|
156
|
+
NodeProto = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.NodeProto").msgclass
|
157
|
+
ModelProto = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.ModelProto").msgclass
|
158
|
+
StringStringEntryProto = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.StringStringEntryProto").msgclass
|
159
|
+
GraphProto = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.GraphProto").msgclass
|
160
|
+
TensorProto = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.TensorProto").msgclass
|
161
|
+
TensorProto::Segment = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.TensorProto.Segment").msgclass
|
162
|
+
TensorProto::DataType = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.TensorProto.DataType").enummodule
|
163
|
+
TensorProto::DataLocation = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.TensorProto.DataLocation").enummodule
|
164
|
+
TensorShapeProto = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.TensorShapeProto").msgclass
|
165
|
+
TensorShapeProto::Dimension = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.TensorShapeProto.Dimension").msgclass
|
166
|
+
TypeProto = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.TypeProto").msgclass
|
167
|
+
TypeProto::Tensor = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.TypeProto.Tensor").msgclass
|
168
|
+
OperatorSetIdProto = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.OperatorSetIdProto").msgclass
|
169
|
+
Version = Google::Protobuf::DescriptorPool.generated_pool.lookup("onnx.Version").enummodule
|
170
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
|
2
|
+
lib = File.expand_path("../lib", __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require "onnx-chainer/version"
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = "onnx-red-chainer"
|
8
|
+
spec.version = OnnxChainer::VERSION
|
9
|
+
spec.authors = ["hatappi"]
|
10
|
+
spec.email = ["hatappi@hatappi.me"]
|
11
|
+
|
12
|
+
spec.summary = "Automatically generate Ruby code from ONNX"
|
13
|
+
spec.description = "Automatically generate Ruby code from ONNX"
|
14
|
+
spec.homepage = "https://github.com/hatappi/onnx-red-chainer"
|
15
|
+
spec.license = "MIT"
|
16
|
+
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
|
17
|
+
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
|
18
|
+
end
|
19
|
+
spec.bindir = "exe"
|
20
|
+
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
|
21
|
+
spec.require_paths = ["lib"]
|
22
|
+
|
23
|
+
spec.add_dependency 'google-protobuf'
|
24
|
+
spec.add_dependency 'red-chainer'
|
25
|
+
spec.add_dependency "numo-narray"
|
26
|
+
|
27
|
+
spec.add_development_dependency "bundler", "~> 1.17"
|
28
|
+
spec.add_development_dependency "rake", "~> 10.0"
|
29
|
+
spec.add_development_dependency "minitest", "~> 5.0"
|
30
|
+
end
|
31
|
+
|
metadata
ADDED
@@ -0,0 +1,147 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: onnx-red-chainer
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- hatappi
|
8
|
+
autorequire:
|
9
|
+
bindir: exe
|
10
|
+
cert_chain: []
|
11
|
+
date: 2019-04-11 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: google-protobuf
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '0'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ">="
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: red-chainer
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - ">="
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: numo-narray
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - ">="
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: bundler
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '1.17'
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '1.17'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: rake
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '10.0'
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '10.0'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: minitest
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - "~>"
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '5.0'
|
90
|
+
type: :development
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - "~>"
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '5.0'
|
97
|
+
description: Automatically generate Ruby code from ONNX
|
98
|
+
email:
|
99
|
+
- hatappi@hatappi.me
|
100
|
+
executables:
|
101
|
+
- onnx-red-chainer
|
102
|
+
extensions: []
|
103
|
+
extra_rdoc_files: []
|
104
|
+
files:
|
105
|
+
- Gemfile
|
106
|
+
- Gemfile.lock
|
107
|
+
- LICENSE.txt
|
108
|
+
- README.md
|
109
|
+
- Rakefile
|
110
|
+
- bin/console
|
111
|
+
- bin/setup
|
112
|
+
- exe/onnx-red-chainer
|
113
|
+
- lib/onnx-chainer.rb
|
114
|
+
- lib/onnx-chainer/cli.rb
|
115
|
+
- lib/onnx-chainer/graph.rb
|
116
|
+
- lib/onnx-chainer/operator.rb
|
117
|
+
- lib/onnx-chainer/operators/gemm.rb
|
118
|
+
- lib/onnx-chainer/operators/relu.rb
|
119
|
+
- lib/onnx-chainer/proto/onnx.proto
|
120
|
+
- lib/onnx-chainer/proto/onnx_pb.rb
|
121
|
+
- lib/onnx-chainer/version.rb
|
122
|
+
- onnx-red-chainer.gemspec
|
123
|
+
homepage: https://github.com/hatappi/onnx-red-chainer
|
124
|
+
licenses:
|
125
|
+
- MIT
|
126
|
+
metadata: {}
|
127
|
+
post_install_message:
|
128
|
+
rdoc_options: []
|
129
|
+
require_paths:
|
130
|
+
- lib
|
131
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
132
|
+
requirements:
|
133
|
+
- - ">="
|
134
|
+
- !ruby/object:Gem::Version
|
135
|
+
version: '0'
|
136
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
137
|
+
requirements:
|
138
|
+
- - ">="
|
139
|
+
- !ruby/object:Gem::Version
|
140
|
+
version: '0'
|
141
|
+
requirements: []
|
142
|
+
rubyforge_project:
|
143
|
+
rubygems_version: 2.7.6
|
144
|
+
signing_key:
|
145
|
+
specification_version: 4
|
146
|
+
summary: Automatically generate Ruby code from ONNX
|
147
|
+
test_files: []
|