Class: Chainer::Functions::Activation::TanhGrad

Inherits:
Chainer::FunctionNode show all
Defined in:
lib/chainer/functions/activation/tanh.rb

Instance Attribute Summary

Attributes inherited from Chainer::FunctionNode

#inputs, #outputs, #rank

Instance Method Summary collapse

Methods inherited from Chainer::FunctionNode

#apply, #backward_accumulate, #forward_cpu, #get_retained_inputs, #get_retained_outputs, #label, #output_data, #retain_inputs, #retain_outputs, #unchain

Constructor Details

#initialize(x) ⇒ TanhGrad

Returns a new instance of TanhGrad.



49
50
51
52
53
54
55
56
57
# File 'lib/chainer/functions/activation/tanh.rb', line 49

def initialize(x)
  super()

  # The original input `x` is only required for cuDNN.
  # If it is None, this class does not use cuDNN.
  # Note that x must be c-contiguous and it is checked
  # in Tanh.forward_gpu.
  @x = x
end

Instance Method Details

#backward(indexes, grad_outputs) ⇒ Object



67
68
69
70
71
72
73
74
75
# File 'lib/chainer/functions/activation/tanh.rb', line 67

def backward(indexes, grad_outputs)
  y, gy = get_retained_inputs
  g = grad_outputs[0]

  y_mul_g = y * g
  grad_y = -2 * gy * y_mul_g
  ggy = g - y * y_mul_g
  [grad_y, ggy]
end

#forward(inputs) ⇒ Object



59
60
61
62
63
64
65
# File 'lib/chainer/functions/activation/tanh.rb', line 59

def forward(inputs)
  retain_inputs([0, 1])
  y, gy = inputs

  one = y.class.new.fill(1)
  [Utils::Array.force_array(gy * (one - y * y))]
end