Class: Chainer::Optimizers::Adam

Inherits:
GradientMethod show all
Defined in:
lib/chainer/optimizers/adam.rb

Instance Attribute Summary

Attributes inherited from Chainer::Optimizer

#target

Instance Method Summary collapse

Methods inherited from GradientMethod

#call_hooks, #reallocate_cleared_grads, #setup, #update

Methods inherited from Chainer::Optimizer

#_call_hook, #add_hook, #call_hooks, #serialize, #setup

Constructor Details

#initialize(alpha: nil, beta1: nil, beta2: nil, eps: nil) ⇒ Adam

Returns a new instance of Adam.



44
45
46
47
48
49
50
# File 'lib/chainer/optimizers/adam.rb', line 44

def initialize(alpha: nil, beta1: nil, beta2: nil, eps: nil)
  super()
  @hyperparam.instance_variable_set('@alpha', alpha || 0.001)
  @hyperparam.instance_variable_set('@beta1', beta1 || 0.9)
  @hyperparam.instance_variable_set('@beta2', beta2 || 0.999)
  @hyperparam.instance_variable_set('@eps', eps || 1e-8)
end

Instance Method Details

#create_update_ruleObject



52
53
54
# File 'lib/chainer/optimizers/adam.rb', line 52

def create_update_rule
  AdamRule.new(parent_hyperparam: @hyperparam)
end

#lrObject



56
57
58
59
60
# File 'lib/chainer/optimizers/adam.rb', line 56

def lr
  fix1 = 1.0 - (@hyperparam.beta1 ** @t)
  fix2 = 1.0 - (@hyperparam.beta2 ** @t)
  @hyperparam.alpha * Math.sqrt(fix2) / fix1
end