tensor_stream 0.9.8 → 0.9.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +31 -14
  3. data/lib/tensor_stream.rb +4 -0
  4. data/lib/tensor_stream/constant.rb +41 -0
  5. data/lib/tensor_stream/control_flow.rb +2 -1
  6. data/lib/tensor_stream/dynamic_stitch.rb +3 -1
  7. data/lib/tensor_stream/evaluator/operation_helpers/array_ops_helper.rb +4 -4
  8. data/lib/tensor_stream/evaluator/ruby/array_ops.rb +74 -23
  9. data/lib/tensor_stream/evaluator/ruby/math_ops.rb +45 -43
  10. data/lib/tensor_stream/evaluator/ruby/nn_ops.rb +31 -30
  11. data/lib/tensor_stream/evaluator/ruby/random_ops.rb +6 -6
  12. data/lib/tensor_stream/evaluator/ruby_evaluator.rb +46 -111
  13. data/lib/tensor_stream/graph.rb +61 -12
  14. data/lib/tensor_stream/graph_builder.rb +3 -3
  15. data/lib/tensor_stream/graph_deserializers/yaml_loader.rb +38 -0
  16. data/lib/tensor_stream/graph_serializers/packer.rb +8 -0
  17. data/lib/tensor_stream/graph_serializers/pbtext.rb +62 -27
  18. data/lib/tensor_stream/graph_serializers/serializer.rb +2 -2
  19. data/lib/tensor_stream/graph_serializers/yaml.rb +27 -0
  20. data/lib/tensor_stream/helpers/infer_shape.rb +15 -9
  21. data/lib/tensor_stream/helpers/op_helper.rb +17 -6
  22. data/lib/tensor_stream/helpers/string_helper.rb +32 -1
  23. data/lib/tensor_stream/helpers/tensor_mixins.rb +135 -0
  24. data/lib/tensor_stream/math_gradients.rb +19 -12
  25. data/lib/tensor_stream/monkey_patches/float.rb +7 -0
  26. data/lib/tensor_stream/monkey_patches/integer.rb +7 -0
  27. data/lib/tensor_stream/monkey_patches/patch.rb +8 -8
  28. data/lib/tensor_stream/nn/nn_ops.rb +1 -1
  29. data/lib/tensor_stream/operation.rb +98 -36
  30. data/lib/tensor_stream/ops.rb +65 -13
  31. data/lib/tensor_stream/placeholder.rb +2 -2
  32. data/lib/tensor_stream/session.rb +15 -3
  33. data/lib/tensor_stream/tensor.rb +15 -172
  34. data/lib/tensor_stream/tensor_shape.rb +3 -1
  35. data/lib/tensor_stream/train/saver.rb +12 -10
  36. data/lib/tensor_stream/trainer.rb +7 -2
  37. data/lib/tensor_stream/utils.rb +13 -11
  38. data/lib/tensor_stream/utils/freezer.rb +37 -0
  39. data/lib/tensor_stream/variable.rb +17 -11
  40. data/lib/tensor_stream/variable_scope.rb +3 -1
  41. data/lib/tensor_stream/version.rb +1 -1
  42. data/samples/iris.rb +3 -4
  43. data/samples/linear_regression.rb +9 -5
  44. data/samples/logistic_regression.rb +11 -9
  45. data/samples/mnist_data.rb +8 -10
  46. metadata +8 -4
@@ -11,13 +11,13 @@ module TensorStream
11
11
  @is_const = false
12
12
 
13
13
  @name = [@graph.get_name_scope, options[:name] || build_name].compact.reject(&:empty?).join('/')
14
- @graph.add_node(self)
14
+ @op = Graph.get_default_graph.add_op!(:placeholder, data_type: @data_type, shape: @shape, internal_name: @name)
15
15
  end
16
16
 
17
17
  private
18
18
 
19
19
  def build_name
20
- "Placeholder#{graph.get_placeholder_counter}:#{@rank}"
20
+ "Placeholder#{graph.get_placeholder_counter}"
21
21
  end
22
22
  end
23
23
  end
@@ -56,8 +56,20 @@ module TensorStream
56
56
 
57
57
  # scan for placeholders and assign value
58
58
  if options[:feed_dict]
59
- options[:feed_dict].keys.each do |k|
60
- context[k.name.to_sym] = options[:feed_dict][k] if k.is_a?(Placeholder)
59
+ options[:feed_dict].each_key do |k|
60
+ if k.is_a?(Placeholder)
61
+ context[k.name.to_sym] = options[:feed_dict][k]
62
+ elsif k.is_a?(String)
63
+ target_graph = args[0].graph
64
+ node = target_graph.get_node(k)
65
+ if node.operation == :placeholder
66
+ context[k.to_sym] = options[:feed_dict][k]
67
+ else
68
+ raise "Cannot find placeholder with the name of #{k}"
69
+ end
70
+ else
71
+ raise "Invalid placeholder type passed key must be a string or a placeholder type"
72
+ end
61
73
  end
62
74
  end
63
75
 
@@ -125,7 +137,7 @@ module TensorStream
125
137
  end
126
138
  end
127
139
  else
128
- run_with_session_context(tensor_arr, session_context, context)
140
+ run_with_session_context(tensor_arr.op, session_context, context)
129
141
  end
130
142
  end
131
143
 
@@ -4,40 +4,11 @@ module TensorStream
4
4
  # Base class that defines a tensor like interface
5
5
  class Tensor
6
6
  include OpHelper
7
- attr_reader :graph
8
- attr_accessor :name, :data_type, :shape, :rank, :native_buffer, :is_const,
9
- :value, :breakpoint, :internal, :source, :given_name,
10
- :consumers, :outputs, :device
11
-
12
- def initialize(data_type, rank, shape, options = {})
13
- setup_initial_state(options)
14
- @data_type = data_type
15
- @rank = rank
16
- @breakpoint = false
17
- @shape = TensorShape.new(shape, rank)
18
- @value = nil
19
-
20
- @is_const = options[:const] || false
21
- @internal = options[:internal]
22
- @name = [@graph.get_name_scope, options[:name] || build_name].compact.reject(&:empty?).join('/')
23
- @given_name = @name
24
-
25
- if options[:value]
26
- if options[:value].is_a?(Array)
27
- # check if single dimenstion array is passed
28
- options[:value] = reshape(options[:value], shape.reverse.dup) if shape.size >= 2 && !options[:value].empty? && !options[:value][0].is_a?(Array)
29
-
30
- @value = options[:value].map { |v| v.is_a?(Tensor) ? Tensor.cast_dtype(v, @data_type) : v }
31
- elsif !shape.empty?
32
- @value = reshape(Tensor.cast_dtype(options[:value], @data_type), shape.dup)
33
- else
34
- @value = Tensor.cast_dtype(options[:value], @data_type)
35
- end
36
- @shape = TensorShape.new(shape_eval(@value))
37
- end
7
+ include TensorMixins
38
8
 
39
- @graph.add_node(self)
40
- end
9
+ attr_reader :graph, :value
10
+ attr_accessor :name, :data_type, :shape, :rank, :native_buffer, :is_const,
11
+ :internal, :source, :given_name, :outputs, :op
41
12
 
42
13
  def internal?
43
14
  !!@internal
@@ -47,120 +18,18 @@ module TensorStream
47
18
  @data_type
48
19
  end
49
20
 
21
+ def consumers
22
+ op.consumers
23
+ end
24
+
50
25
  def self.reset_counters
51
26
  @const_counter = 0
52
27
  @var_counter = 0
53
28
  @placeholder_counter = 0
54
29
  end
55
30
 
56
- def +(other)
57
- _a, other = TensorStream.check_data_types(self, other)
58
- _op(:add, self, other)
59
- end
60
-
61
- def [](index)
62
- _op(:index, self, index)
63
- end
64
-
65
- def *(other)
66
- _a, other = TensorStream.check_data_types(self, other)
67
- _op(:mul, self, TensorStream.convert_to_tensor(other, dtype: data_type))
68
- end
69
-
70
- def **(other)
71
- _a, other = TensorStream.check_data_types(self, other)
72
- _op(:pow, self, TensorStream.convert_to_tensor(other, dtype: data_type))
73
- end
74
-
75
- def /(other)
76
- _a, other = TensorStream.check_data_types(self, other)
77
- _op(:div, self, TensorStream.convert_to_tensor(other, dtype: data_type))
78
- end
79
-
80
- def -(other)
81
- _a, other = TensorStream.check_data_types(self, other)
82
- _op(:sub, self, TensorStream.convert_to_tensor(other, dtype: data_type))
83
- end
84
-
85
- def -@
86
- _op(:negate, self, nil)
87
- end
88
-
89
- def %(other)
90
- TensorStream.mod(self, other)
91
- end
92
-
93
- def floor
94
- TensorStream.floor(self)
95
- end
96
-
97
- def ceil
98
- TensorStream.ceil(self)
99
- end
100
-
101
- def zero?
102
- _op(:equal, self, TensorStream.constant(0, dtype: data_type, name: 'equal/is_zero?'))
103
- end
104
-
105
- def ==(other)
106
- _a, other = TensorStream.check_data_types(self, other)
107
- _op(:equal, self, other)
108
- end
109
-
110
- def <(other)
111
- _a, other = TensorStream.check_data_types(self, other)
112
- _op(:less, self, other)
113
- end
114
-
115
- def !=(other)
116
- _a, other = TensorStream.check_data_types(self, other)
117
- _op(:not_equal, self, other)
118
- end
119
-
120
- def >(other)
121
- _a, other = TensorStream.check_data_types(self, other)
122
- _op(:greater, self, other)
123
- end
124
-
125
- def >=(other)
126
- _a, other = TensorStream.check_data_types(self, other)
127
- _op(:greater_equal, self, other)
128
- end
129
-
130
- def <=(other)
131
- _a, other = TensorStream.check_data_types(self, other)
132
- _op(:less_equal, self, other)
133
- end
134
-
135
- def and(other)
136
- _a, other = TensorStream.check_data_types(self, other)
137
- _op(:logical_and, self, other)
138
- end
139
-
140
- def matmul(other)
141
- _a, other = TensorStream.check_data_types(self, other)
142
- _op(:mat_mul, self, other)
143
- end
144
-
145
- def dot(other)
146
- _a, other = TensorStream.check_data_types(self, other)
147
- _op(:mat_mul, self, other)
148
- end
149
-
150
- ##
151
- # Apply a reduction to tensor
152
- def reduce(op_type)
153
- reduce_op = case op_type.to_sym
154
- when :+
155
- :sum
156
- when :*
157
- :prod
158
- else
159
- raise "unsupported reduce op type #{op_type}"
160
- end
161
- raise "blocks are not supported for tensors" if block_given?
162
-
163
- _op(reduce_op, self, nil)
31
+ def device
32
+ @op.device
164
33
  end
165
34
 
166
35
  def collect(&block)
@@ -171,27 +40,12 @@ module TensorStream
171
40
  @name
172
41
  end
173
42
 
174
- def const_value
175
- return nil unless is_const
176
-
177
- @value
178
- end
179
-
180
- def op
181
- @op ||= is_const ? _op(:const, self, nil, name: name) : _op(:variable, self, nil, name: name)
182
- end
183
-
184
43
  def eval(options = {})
185
44
  Session.default_session.run(self, options)
186
45
  end
187
46
 
188
47
  def to_h
189
48
  {
190
- name: @name,
191
- value: hashify_tensor(@value),
192
- dtype: @data_type,
193
- shape: @shape,
194
- const: !!is_const,
195
49
  }
196
50
  end
197
51
 
@@ -295,19 +149,6 @@ module TensorStream
295
149
  @source = format_source(caller_locations)
296
150
  end
297
151
 
298
- def add_consumer(consumer)
299
- @consumers ||= Set.new
300
- @consumers << consumer.name if consumer.name != name
301
- end
302
-
303
- def setup_output(consumer)
304
- @outputs << consumer.name unless @outputs.include?(consumer.name)
305
- end
306
-
307
- def propagate_consumer(consumer)
308
- add_consumer(consumer)
309
- end
310
-
311
152
  def propagate_outputs
312
153
  # nop
313
154
  end
@@ -322,20 +163,22 @@ module TensorStream
322
163
  end
323
164
  end
324
165
 
325
- def reshape(arr, shape)
166
+ def _reshape(arr, shape)
326
167
  if arr.is_a?(Array)
327
168
  return arr if shape.size < 2
169
+
328
170
  slice = shape.shift
329
171
  arr.each_slice(slice).collect do |s|
330
- reshape(s, shape)
172
+ _reshape(s, shape)
331
173
  end
332
174
  else
333
175
  return arr if shape.empty?
176
+
334
177
  slice = shape.shift
335
178
  return arr if slice.nil?
336
179
 
337
180
  Array.new(slice) do
338
- reshape(arr, shape.dup)
181
+ _reshape(arr, shape.dup)
339
182
  end
340
183
  end
341
184
  end
@@ -9,6 +9,8 @@ module TensorStream
9
9
  end
10
10
 
11
11
  def to_s
12
+ return "" if @shape.nil?
13
+
12
14
  dimensions = @shape.collect do |r|
13
15
  "Dimension(#{r})"
14
16
  end.join(',')
@@ -24,7 +26,7 @@ module TensorStream
24
26
  end
25
27
 
26
28
  def scalar?
27
- shape.size.zero?
29
+ known? && shape.size.zero?
28
30
  end
29
31
 
30
32
  def known?
@@ -7,7 +7,7 @@ module TensorStream
7
7
  class Saver
8
8
  include TensorStream::OpHelper
9
9
 
10
- def save(session, outputfile, global_step: nil,
10
+ def save(session, outputdir, global_step: nil,
11
11
  latest_filename: nil,
12
12
  meta_graph_suffix: 'meta',
13
13
  write_meta_graph: true,
@@ -33,20 +33,22 @@ module TensorStream
33
33
  }
34
34
  end
35
35
 
36
- basename = File.basename(outputfile)
37
- path = File.dirname(outputfile)
38
-
39
- new_filename = File.join(path, [basename, gs].compact.join('-'))
36
+ FileUtils.mkdir_p(outputdir)
37
+ basename = 'model'
38
+ File.write(File.join(outputdir, "#{basename}.meta"), { "gs" => gs }.to_json)
39
+ new_filename = File.join(outputdir, [basename, gs, '.ckpt'].compact.join('-'))
40
40
  File.write(new_filename, output_dump.to_yaml)
41
41
  if write_meta_graph
42
- graph_filename = "#{basename}.pbtext"
43
- TensorStream.train.write_graph(graph, path, graph_filename)
42
+ graph_filename = "#{basename}.yaml"
43
+ TensorStream.train.write_graph(graph, outputdir, graph_filename, serializer: :yaml)
44
44
  end
45
- path
45
+ outputdir
46
46
  end
47
47
 
48
- def restore(_session, inputfile)
49
- input_dump = YAML.safe_load(File.read(inputfile))
48
+ def restore(_session, modelpath)
49
+ meta_data = JSON.parse(File.read(File.join(modelpath, "model.meta")))
50
+ gs = meta_data['gs']
51
+ input_dump = YAML.safe_load(File.read(File.join(modelpath, ['model', gs, '.ckpt'].compact.join('-'))))
50
52
 
51
53
  vars = TensorStream::Graph.get_default_graph.get_collection(GraphKeys::GLOBAL_VARIABLES)
52
54
  vars.each do |variable|
@@ -13,12 +13,17 @@ module TensorStream
13
13
  module Trainer
14
14
  extend TensorStream::Train::Utils
15
15
  extend TensorStream::Train::LearningRateDecay
16
+ extend TensorStream::StringHelper
16
17
 
17
- def self.write_graph(graph, path, filename, as_text: true, serializer: TensorStream::Pbtext)
18
+ def self.write_graph(graph, path, filename, as_text: true, serializer: :yaml)
18
19
  raise "only supports as_text=true for now" unless as_text
19
20
 
21
+ serializer = constantize("TensorStream::#{camelize(serializer.to_s)}") if serializer.is_a?(Symbol)
22
+
20
23
  new_filename = File.join(path, filename)
21
- File.write(new_filename, serializer.new.get_string(graph))
24
+ serializer.new.get_string(graph).tap do |str|
25
+ File.write(new_filename, str)
26
+ end
22
27
  end
23
28
  end
24
29
  end
@@ -45,7 +45,7 @@ module TensorStream
45
45
  # Creates a variable
46
46
  # A variable maintains state across sessions
47
47
  def variable(value, name: nil, initializer: nil, graph: nil, dtype: nil, trainable: true)
48
- op = Operation.new(:assign, nil, value)
48
+ op = Graph.get_default_graph.add_op(:assign, nil, value)
49
49
  common_options = {
50
50
  initializer: initializer || op,
51
51
  name: name,
@@ -54,15 +54,16 @@ module TensorStream
54
54
  trainable: trainable
55
55
  }
56
56
  tensor = if value.is_a?(String)
57
- TensorStream::Variable.new(dtype || :string, 0, [], get_variable_scope, common_options)
57
+ i_var(dtype || :string, 0, [], get_variable_scope, common_options)
58
58
  elsif value.is_a?(Integer)
59
- TensorStream::Variable.new(dtype || :int32, 0, [], get_variable_scope, common_options)
59
+ i_var(dtype || :int32, 0, [], get_variable_scope, common_options)
60
60
  elsif value.is_a?(Float)
61
- TensorStream::Variable.new(dtype || :float32, 0, [], get_variable_scope, common_options)
61
+ i_var(dtype || :float32, 0, [], get_variable_scope, common_options)
62
62
  else
63
- TensorStream::Variable.new(dtype || :float32, 0, nil, get_variable_scope, common_options)
63
+ i_var(dtype || :float32, 0, nil, get_variable_scope, common_options)
64
64
  end
65
- op.inputs[0] = tensor
65
+ op.set_input(0, tensor.op)
66
+ Graph.get_default_graph.add_node(op)
66
67
  tensor
67
68
  end
68
69
 
@@ -163,13 +164,13 @@ module TensorStream
163
164
  shared_options = { const: true, value: value, name: name, internal: internal }
164
165
 
165
166
  if value.is_a?(Float)
166
- TensorStream::Tensor.new(dtype || :float32, 0, shape || [], shared_options)
167
+ TensorStream::Constant.new(dtype || :float32, 0, shape || [], shared_options)
167
168
  elsif value.is_a?(Integer)
168
- TensorStream::Tensor.new(dtype || :int32, 0, shape || [], shared_options)
169
+ TensorStream::Constant.new(dtype || :int32, 0, shape || [], shared_options)
169
170
  elsif value.is_a?(String)
170
- TensorStream::Tensor.new(dtype || :string, 0, shape || [], shared_options)
171
+ TensorStream::Constant.new(dtype || :string, 0, shape || [], shared_options)
171
172
  elsif !!value == value
172
- TensorStream::Tensor.new(dtype || :boolean, 0, shape || [], shared_options)
173
+ TensorStream::Constant.new(dtype || :boolean, 0, shape || [], shared_options)
173
174
  elsif value.is_a?(Array)
174
175
  dimension = shape || shape_eval(value)
175
176
  rank = dimension.size
@@ -179,7 +180,7 @@ module TensorStream
179
180
  value = Tensor.cast_dtype(value, cur_dtype) unless dtype.nil?
180
181
 
181
182
  shared_options[:value] = value
182
- TensorStream::Tensor.new(cur_dtype, rank, dimension, shared_options)
183
+ TensorStream::Constant.new(cur_dtype, rank, dimension, shared_options)
183
184
  end
184
185
  end
185
186
 
@@ -239,6 +240,7 @@ module TensorStream
239
240
  def convert_to_tensor(value, dtype: nil, name: nil)
240
241
  return value if value.is_a?(Tensor)
241
242
  return convert_to_tensor(value.call) if value.is_a?(Proc)
243
+ # raise "Invalid tensor value" if value.nil?
242
244
 
243
245
  if value.is_a?(Array) && value[0].is_a?(Tensor)
244
246
  return TensorStream.stack(value) if value.size > 1
@@ -0,0 +1,37 @@
1
+ module TensorStream
2
+ class Freezer
3
+ include OpHelper
4
+
5
+ ##
6
+ # Utility class to convert variables to constants for production deployment
7
+ #
8
+ def convert(model_file, checkpoint_file, output_file)
9
+ TensorStream.graph.as_default do |current_graph|
10
+ YamlLoader.new.load_from_string(File.read(model_file))
11
+ saver = TensorStream::Train::Saver.new
12
+ saver.restore(nil, checkpoint_file)
13
+ output_buffer = TensorStream::Yaml.new.get_string(current_graph) do |graph, node_key|
14
+ node = graph.get_tensor_by_name(node_key)
15
+ if node.operation == :variable_v2
16
+ value = node.container
17
+ options = {
18
+ value: value,
19
+ data_type: node.data_type,
20
+ shape: shape_eval(value)
21
+ }
22
+ const_op = TensorStream::Operation.new(current_graph, inputs: [], options: options)
23
+ const_op.name = node.name
24
+ const_op.operation = :const
25
+ const_op.data_type = node.data_type
26
+ const_op.shape = TensorShape.new(shape_eval(value))
27
+
28
+ const_op
29
+ else
30
+ node
31
+ end
32
+ end
33
+ File.write(output_file, output_buffer)
34
+ end
35
+ end
36
+ end
37
+ end