Class: AprendizajeMaquina::ClasificacionLogistica

Inherits:
Object
  • Object
show all
Defined in:
lib/aprendizaje_maquina/clasificacion_logistica.rb

Instance Method Summary collapse

Constructor Details

#initialize(x, y, theta) ⇒ ClasificacionLogistica

Returns a new instance of ClasificacionLogistica.

[View source]

3
4
5
6
7
8
9
10
11
12
13
14
# File 'lib/aprendizaje_maquina/clasificacion_logistica.rb', line 3

def initialize(x,y,theta)
  @x = x
  @y = y
  @theta = theta
  if y.is_a? Matrix
    @m = y.row_count
  elsif y.is_a? Vector
    @m = y.size
  else
    @m = y.length
  end
end

Instance Method Details

#predict(x) ⇒ Object

[View source]

70
71
72
73
74
75
76
77
78
79
# File 'lib/aprendizaje_maquina/clasificacion_logistica.rb', line 70

def predict(x)
  hipo = x * @theta 
  var = 0
  hipo.map { |x| var = x.is_a?(Integer) ? x.to_i : x.to_f }
  if sigmoid(var) >= 0.5 
    1
  else
    0
  end
end

#train(iterations, alpha = nil, type_of_train) ⇒ Object

[View source]

16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
# File 'lib/aprendizaje_maquina/clasificacion_logistica.rb', line 16

def train(iterations,alpha = nil,type_of_train)
  case type_of_train
  when 'Grad' then
    # gradiente de descenso
    @cost_history = []
    for i in 0..iterations
      x = @x * @theta
      hx = x.map { |e| sigmoid(e) }
      @theta = @theta - alpha / @m * @x.transpose * (hx - @y)
      costo = 0
      cost.to_a.map{ |e| costo = e }
      @cost_history << ["iteracion: #{i}",costo]
    end
    @cost_history
    "theta values => #{@theta} | cost => #{costo}"
  when 'Newm' then
    # metodo de newton
    @cost_history = []
    for i in 0..iterations
      x = @x * @theta
      hx = x.map { |e| sigmoid(e) }
      uno_menos_hx = hx.map{ |e| (1-e) }
      escalar = []
      for u in 0...hx.size
        escalar << hx[u] * uno_menos_hx[u]
      end
      gradiente = (1.0/@m) * @x.transpose * (hx - @y)
      hessian = (1.0/@m) * @x.transpose * sumatoria(escalar) * @x
      inversa = (1.0/hessian.det) * (hessian.adjugate)
      @theta = @theta - inversa * gradiente
      costo = 0
      cost.to_a.map{ |e| costo = e }
      @cost_history << ["iteracion: #{i}",costo]
    end
    @cost_history
    "theta values => #{@theta} | cost => #{costo}"
  when 'SGD' then
    # Stochastic Gradient Descent
    @cost_history = []
    for i in 0..iterations
      for i in 0..i
        x = matrix(@x.to_a.map{|e| e.shuffle })*@theta
        hx = x.map {|e| sigmoid(e) }
        @theta = @theta - alpha / @m * @x.transpose * (hx - @y)
        costo = 0
        cost.to_a.map{|e| costo = e }
        @cost_history << ["iteracion: #{i}",costo]
      end
    end
    @cost_history
    "theta values => #{@theta} | cost => #{costo}"
  end     
end