forked from yusugomori/DeepLearning
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathDropout.java
More file actions
225 lines (163 loc) · 6.21 KB
/
Dropout.java
File metadata and controls
225 lines (163 loc) · 6.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
package DeepLearning;
import java.util.Random;
import java.util.List;
import java.util.ArrayList;
public class Dropout {
public int N;
public int n_in;
public int[] hidden_layer_sizes;
public int n_out;
public int n_layers;
public HiddenLayer[] hiddenLayers;
public LogisticRegression logisticLayer;
public Random rng;
public Dropout(int N, int n_in, int[] hidden_layer_sizes, int n_out, Random rng, String activation) {
this.N = N;
this.n_in = n_in;
this.hidden_layer_sizes = hidden_layer_sizes;
this.n_layers = hidden_layer_sizes.length;
this.n_out = n_out;
this.hiddenLayers = new HiddenLayer[n_layers];
if (rng == null) rng = new Random(1234);
this.rng = rng;
if (activation == null) activation = "ReLU";
// construct multi-layer
int input_size;
for(int i=0; i<this.n_layers; i++) {
// layer_size
if(i == 0) {
input_size = n_in;
} else {
input_size = hidden_layer_sizes[i-1];
}
// construct hiddenLayer
this.hiddenLayers[i] = new HiddenLayer(N, input_size, hidden_layer_sizes[i], null, null, rng, activation);
}
// construct logisticLayer
this.logisticLayer = new LogisticRegression(N, hidden_layer_sizes[this.n_layers-1], n_out);
}
public void train(int epochs, double[][] train_X, int[][] train_Y, boolean dropout, double p_dropout, double lr) {
List<int[]> dropout_masks;
List<double[]> layer_inputs;
double[] layer_input;
double[] layer_output = new double[0];
for(int epoch=0; epoch<epochs; epoch++) {
for(int n=0; n<N; n++) {
dropout_masks = new ArrayList<>(n_layers);
layer_inputs = new ArrayList<>(n_layers+1); // +1 for logistic layer
// forward hiddenLayers
for(int i=0; i<n_layers; i++) {
if(i == 0) layer_input = train_X[n];
else layer_input = layer_output.clone();
layer_inputs.add(layer_input.clone());
layer_output = new double[hidden_layer_sizes[i]];
hiddenLayers[i].forward(layer_input, layer_output);
if(dropout) {
int[] mask;
mask = hiddenLayers[i].dropout(layer_output.length, p_dropout, rng);
for(int j=0; j<layer_output.length; j++) layer_output[j] *= mask[j];
dropout_masks.add(mask.clone());
}
}
// forward & backward logisticLayer
double[] logistic_layer_dy; // = new double[n_out];
logistic_layer_dy = logisticLayer.train(layer_output, train_Y[n], lr); //, logistic_layer_dy);
layer_inputs.add(layer_output.clone());
// backward hiddenLayers
double[] prev_dy = logistic_layer_dy;
double[][] prev_W;
double[] dy = new double[0];
for(int i=n_layers-1; i>=0; i--) {
if(i == n_layers-1) {
prev_W = logisticLayer.W;
} else {
prev_dy = dy.clone();
prev_W = hiddenLayers[i+1].W;
}
if(dropout) {
for(int j=0; j<prev_dy.length; j++) {
prev_dy[j] *= dropout_masks.get(i)[j];
}
}
dy = new double[hidden_layer_sizes[i]];
hiddenLayers[i].backward(layer_inputs.get(i), dy, layer_inputs.get(i+1), prev_dy, prev_W, lr);
}
}
}
}
public void pretest(double p_dropout) {
for(int i=0; i<n_layers; i++) {
int in;
int out;
if (i == 0) in = n_in;
else in = hidden_layer_sizes[i];
if (i == n_layers - 1) out = n_out;
else out = hidden_layer_sizes[i+1];
for (int l = 0; l < out; l++) {
for (int m = 0; m < in; m++) {
hiddenLayers[i].W[l][m] *= 1 - p_dropout;
}
}
}
}
public void predict(double[] x, double[] y) {
double[] layer_input;
double[] layer_output = new double[0];
for(int i=0; i<n_layers; i++) {
if(i == 0) layer_input = x;
else layer_input = layer_output.clone();
layer_output = new double[hidden_layer_sizes[i]];
hiddenLayers[i].forward(layer_input, layer_output);
}
logisticLayer.predict(layer_output, y);
}
private static void test_dropout() {
Random rng = new Random(123);
double learning_rate = 0.1;
int n_epochs = 5000;
int train_N = 4;
int test_N = 4;
int n_in = 2;
int[] hidden_layer_sizes = {10, 10};
int n_out = 2;
boolean dropout = true;
double p_dropout = 0.5;
double[][] train_X = {
{0., 0.},
{0., 1.},
{1., 0.},
{1., 1.},
};
int[][] train_Y = {
{0, 1},
{1, 0},
{1, 0},
{0, 1},
};
// construct Dropout
Dropout classifier = new Dropout(train_N, n_in, hidden_layer_sizes, n_out, rng, "ReLU");
// train
classifier.train(n_epochs, train_X, train_Y, dropout, p_dropout, learning_rate);
// pretest
if(dropout) classifier.pretest(p_dropout);
// test data
double[][] test_X = {
{0., 0.},
{0., 1.},
{1., 0.},
{1., 1.},
};
double[][] test_Y = new double[test_N][n_out];
// test
for(int i=0; i<test_N; i++) {
classifier.predict(test_X[i], test_Y[i]);
for(int j=0; j<n_out; j++) {
System.out.print(test_Y[i][j] + " ");
}
System.out.println();
}
}
public static void main(String[] args) {
test_dropout();
}
}