Class: DNN::Optimizers::Optimizer
- Inherits:
-
Object
- Object
- DNN::Optimizers::Optimizer
- Defined in:
- lib/dnn/core/optimizers.rb
Overview
Super class of all optimizer classes.
Instance Attribute Summary collapse
-
#clip_norm ⇒ Object
Returns the value of attribute clip_norm.
Class Method Summary collapse
Instance Method Summary collapse
-
#initialize(clip_norm: nil) ⇒ Optimizer
constructor
A new instance of Optimizer.
- #load_hash(hash) ⇒ Object
- #to_hash(merge_hash = nil) ⇒ Object
-
#update(layers) ⇒ Object
Update layers has params.
Constructor Details
#initialize(clip_norm: nil) ⇒ Optimizer
Returns a new instance of Optimizer.
18 19 20 |
# File 'lib/dnn/core/optimizers.rb', line 18 def initialize(clip_norm: nil) @clip_norm = clip_norm end |
Instance Attribute Details
#clip_norm ⇒ Object
Returns the value of attribute clip_norm.
6 7 8 |
# File 'lib/dnn/core/optimizers.rb', line 6 def clip_norm @clip_norm end |
Class Method Details
.from_hash(hash) ⇒ Object
8 9 10 11 12 13 14 15 |
# File 'lib/dnn/core/optimizers.rb', line 8 def self.from_hash(hash) return nil unless hash optimizer_class = DNN.const_get(hash[:class]) optimizer = optimizer_class.allocate raise DNN_Error, "#{optimizer.class} is not an instance of #{self} class." unless optimizer.is_a?(self) optimizer.load_hash(hash) optimizer end |
Instance Method Details
#load_hash(hash) ⇒ Object
55 56 57 |
# File 'lib/dnn/core/optimizers.rb', line 55 def load_hash(hash) initialize(clip_norm: hash[:clip_norm]) end |
#to_hash(merge_hash = nil) ⇒ Object
34 35 36 37 38 |
# File 'lib/dnn/core/optimizers.rb', line 34 def to_hash(merge_hash = nil) hash = { class: self.class.name, clip_norm: @clip_norm } hash.merge!(merge_hash) if merge_hash hash end |
#update(layers) ⇒ Object
Update layers has params.
23 24 25 26 27 28 29 30 31 32 |
# File 'lib/dnn/core/optimizers.rb', line 23 def update(layers) target_params = layers.select { |layer| layer.is_a?(Layers::TrainableLayer) && layer.trainable } .map { |layer| layer.get_params.values }.flatten.compact .select(&:grad) clip_grads(target_params) if @clip_norm update_params(target_params) target_params.each do |param| param.grad = Xumo::SFloat[0] end end |