Module: TensorStream::OpHelper
- Included in:
- TensorStream, Debugging, Evaluator::RubyEvaluator, Freezer, Graph, GraphBuilder, Images, InferShape, MathGradients, Maths, NN, Operation, Pbtext, Tensor, Tensor, Train::AdadeltaOptimizer, Train::AdagradOptimizer, Train::AdamOptimizer, Train::GradientDescentOptimizer, Train::LearningRateDecay, Train::MomentumOptimizer, Train::RMSPropOptimizer, Train::Saver, VariableScope, Yaml
- Defined in:
- lib/tensor_stream/helpers/op_helper.rb
Overview
module that contains helper functions useful for ops
Instance Method Summary collapse
- #_op(code, *args) ⇒ Object
- #cons(value, options = {}) ⇒ Object
- #format_source(trace) ⇒ Object
- #fp_type?(type) ⇒ Boolean
- #i_cons(value, options = {}) ⇒ Object
-
#i_op(code, *args) ⇒ Object
same as op but with a marker that it was internal generated.
- #i_var(data_type, rank, shape, variable_scope, options = {}) ⇒ Object
- #int_type?(type) ⇒ Boolean
- #reduced_shape(input_shape, axes) ⇒ Object
- #shape_eval(input, output_type = :int32) ⇒ Object
- #shape_full_specified(tensor) ⇒ Object
- #shapes_fully_specified_and_equal(x, y) ⇒ Object
Instance Method Details
#_op(code, *args) ⇒ Object
4 5 6 7 8 9 10 11 12 13 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 4 def _op(code, *args) default_graph = Graph.get_default_graph op = default_graph.add_op!(code.to_sym, *args) if !default_graph.get_dependency_scope.nil? i_op(:identity, op, default_graph.get_dependency_scope, name: [op.name, "tuple", "control_dependency"].join("/")) else op end end |
#cons(value, options = {}) ⇒ Object
35 36 37 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 35 def cons(value, = {}) TensorStream.constant(value, ) end |
#format_source(trace) ⇒ Object
66 67 68 69 70 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 66 def format_source(trace) grad_source = trace.detect { |c| c.to_s.include?(File.join("lib", "tensor_stream", "math_gradients")) } source = trace.reject { |c| c.to_s.include?(File.join("lib", "tensor_stream")) }.first [grad_source, trace].compact.join("\n") end |
#fp_type?(type) ⇒ Boolean
58 59 60 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 58 def fp_type?(type) TensorStream::Ops::FLOATING_POINT_TYPES.include?(type) end |
#i_cons(value, options = {}) ⇒ Object
39 40 41 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 39 def i_cons(value, = {}) TensorStream.constant(value, .merge(internal: true)) end |
#i_op(code, *args) ⇒ Object
same as op but with a marker that it was internal generated
16 17 18 19 20 21 22 23 24 25 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 16 def i_op(code, *args) = if args.last.is_a?(Hash) args.pop else {} end args << .merge(internal: true) Graph.get_default_graph.add_op!(code.to_sym, *args) end |
#i_var(data_type, rank, shape, variable_scope, options = {}) ⇒ Object
27 28 29 30 31 32 33 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 27 def i_var(data_type, rank, shape, variable_scope, = {}) new_var = Variable.new(data_type) new_var.prepare(rank, shape, variable_scope, ) new_var.op = new_var.graph.add_variable!(new_var, .merge(shape: @shape, data_type: data_type)) new_var end |
#int_type?(type) ⇒ Boolean
62 63 64 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 62 def int_type?(type) TensorStream::Ops::INTEGER_TYPES.include?(type) end |
#reduced_shape(input_shape, axes) ⇒ Object
87 88 89 90 91 92 93 94 95 96 97 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 87 def reduced_shape(input_shape, axes) input_shape = TensorStream.convert_to_tensor(input_shape) axes = TensorStream.convert_to_tensor(axes) input_rank = i_op(:size, input_shape) axes = TensorStream.range(0, input_rank) if axes.nil? axes = (axes + input_rank) % input_rank axes_shape = i_op(:shape, axes) TensorStream.dynamic_stitch([TensorStream.range(0, input_rank), axes], [input_shape, i_op(:fill, axes_shape, 1)]) end |
#shape_eval(input, output_type = :int32) ⇒ Object
43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 43 def shape_eval(input, output_type = :int32) return [] unless input.is_a?(Array) arr = [] arr_ptr = input Kernel.loop do arr << (TensorStream::Ops::FLOATING_POINT_TYPES.include?(output_type) ? arr_ptr.size.to_f : arr_ptr.size) arr_ptr = arr_ptr[0] break unless arr_ptr.is_a?(Array) end arr end |
#shape_full_specified(tensor) ⇒ Object
79 80 81 82 83 84 85 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 79 def shape_full_specified(tensor) return false if tensor.shape.nil? return false if tensor.shape.shape.nil? tensor.shape.shape.each { |s| return false if s.nil? || (s < 0) } true end |
#shapes_fully_specified_and_equal(x, y) ⇒ Object
72 73 74 75 76 77 |
# File 'lib/tensor_stream/helpers/op_helper.rb', line 72 def shapes_fully_specified_and_equal(x, y) return false if !shape_full_specified(x) || !shape_full_specified(y) return false if x.shape.shape != y.shape.shape true end |