Class: Newral::Functions::Line

Inherits:
Base
  • Object
show all
Defined in:
lib/newral/functions/line.rb

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Methods inherited from Base

#calculate_descent, #calculate_error, #calculate_for_center_distance, #error_gradient_approximation, #find_minimum, #move_random, #move_several

Constructor Details

#initialize(factor: 1, bias: 0, center: nil) ⇒ Line

Returns a new instance of Line.



8
9
10
11
12
# File 'lib/newral/functions/line.rb', line 8

def initialize( factor: 1, bias: 0, center: nil )
  @factor = factor
  @bias = bias
  @center = center.dup if center
end

Instance Attribute Details

#centerObject

Returns the value of attribute center.



6
7
8
# File 'lib/newral/functions/line.rb', line 6

def center
  @center
end

Class Method Details

.create_random(low_range: -9,, high_range: 9) ⇒ Object



18
19
20
21
22
# File 'lib/newral/functions/line.rb', line 18

def self.create_random( low_range: -9, high_range: 9 )
  factor= low_range+rand(high_range-low_range)
  bias = low_range+rand(high_range-low_range)
  self.new( factor: factor, bias: bias )
end

Instance Method Details

#calculate(input) ⇒ Object



14
15
16
# File 'lib/newral/functions/line.rb', line 14

def calculate( input ) 
  @factor*input+@bias
end

#move(direction: 0, step: 0.01, step_percentage: nil) ⇒ Object



28
29
30
31
32
33
34
35
36
# File 'lib/newral/functions/line.rb', line 28

def move( direction: 0, step:0.01, step_percentage: nil )
  case direction
    when 0 then @bias=(step_percentage ? @bias*(1+step_percentage.to_f/100) : @bias+step )
    when 1 then @factor=(step_percentage ? @factor*(1+step_percentage.to_f/100)  : @factor+step )
  else 
    raise Errors::InvalidDirection
  end
  self
end

#move_with_gradient(input: [], output: [], learning_rate: 0.01) ⇒ Object



38
39
40
41
42
43
44
45
46
47
48
49
# File 'lib/newral/functions/line.rb', line 38

def move_with_gradient( input:[], output:[], learning_rate: 0.01 )
   bias_gradient = 0
   factor_gradient = 0
   input.each_with_index do |x,idx|
     bias_gradient = bias_gradient-2.0/input.size*( output[idx]-( @factor*x + @bias ))
     factor_gradient = factor_gradient-2.0/input.size*x*( output[idx]-(@factor*x+@bias) )
     # b_gradient += -(2/N) * (points[i].y - ((m_current*points[i].x) + b_current))
      # m_gradient += -(2/N) * points[i].x * (points[i].y - ((m_current * points[i].x) + b_current))
    end 
  @bias = @bias - (learning_rate * bias_gradient)
  @factor = @factor - (learning_rate * factor_gradient)
end

#number_of_directionsObject



24
25
26
# File 'lib/newral/functions/line.rb', line 24

def number_of_directions
  2 
end