forked from yusugomori/DeepLearning
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathLogisticRegression.rb
More file actions
executable file
·135 lines (106 loc) · 2.22 KB
/
LogisticRegression.rb
File metadata and controls
executable file
·135 lines (106 loc) · 2.22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
#!/usr/bin/env ruby
# -*- coding: utf-8 -*-
class LogisticRegression
def initialize(n, n_in, n_out)
@N = n
@n_in = n_in
@n_out = n_out
@W = Array.new(@n_out).map{ Array.new(@n_in, 0) }
@b = Array.new(@n_out, 0)
end
def train(x, y, lr)
p_y_given_x = Array.new(@n_out, 0)
dy = Array.new(@n_out, 0)
for i in 0...@n_out
for j in 0...@n_in
p_y_given_x[i] += @W[i][j].to_f * x[j]
end
p_y_given_x[i] += @b[i]
end
softmax(p_y_given_x)
for i in 0...@n_out
dy[i] = y[i].to_f - p_y_given_x[i]
for j in 0...@n_in
@W[i][j] += lr * dy[i] * x[j] / @N
end
@b[i] += lr * dy[i] / @N
end
end
def softmax(x)
max = 0.0
sum = 0.0
for i in 0...@n_out
if max < x[i]
max = x[i]
end
end
for i in 0...@n_out
x[i] = Math.exp(x[i] - max)
sum += x[i]
end
for i in 0...@n_out
x[i] /= sum
end
end
def predict(x, y)
for i in 0...@n_out
y[i] = 0
for j in 0...@n_in
y[i] += @W[i][j] * x[j]
end
y[i] += @b[i]
end
softmax(y)
end
end
def test_lr
learning_rate = 0.1
n_epochs = 500
train_N = 6
test_N = 3
n_in = 6
n_out = 2
train_X = [
[1, 1, 1, 0, 0, 0],
[1, 0, 1, 0, 0, 0],
[1, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 1, 0],
[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0]
]
train_Y = [
[1, 0],
[1, 0],
[1, 0],
[0, 1],
[0, 1],
[0, 1]
]
# construct
classifier = LogisticRegression.new(train_N, n_in, n_out)
# train
for epoch in 0...n_epochs
for i in 0...train_N
classifier.train(train_X[i], train_N[i], learning_rate)
end
# learning_rate *= 0.95
end
# test data
test_X = [
[1, 1, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 0],
[1, 1, 1, 1, 1, 0],
]
test_Y = Array.new(test_N).map{ Array.new(n_out, 0) }
# test
for i in 0...test_N
classifier.predict(test_X[i], test_Y[i])
for j in 0...n_out
printf "%.5f ", test_Y[i][j]
end
puts
end
end
if __FILE__ == $0
test_lr()
end