forked from yusugomori/DeepLearning
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathLogisticRegression.java
More file actions
131 lines (105 loc) · 3.13 KB
/
LogisticRegression.java
File metadata and controls
131 lines (105 loc) · 3.13 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
package DeepLearning;
public class LogisticRegression {
public int N;
public int n_in;
public int n_out;
public double[][] W;
public double[] b;
public LogisticRegression(int N, int n_in, int n_out) {
this.N = N;
this.n_in = n_in;
this.n_out = n_out;
W = new double[n_out][n_in];
b = new double[n_out];
}
public double[] train(double[] x, int[] y, double lr) {
double[] p_y_given_x = new double[n_out];
double[] dy = new double[n_out];
for(int i=0; i<n_out; i++) {
p_y_given_x[i] = 0;
for(int j=0; j<n_in; j++) {
p_y_given_x[i] += W[i][j] * x[j];
}
p_y_given_x[i] += b[i];
}
softmax(p_y_given_x);
for(int i=0; i<n_out; i++) {
dy[i] = y[i] - p_y_given_x[i];
for(int j=0; j<n_in; j++) {
W[i][j] += lr * dy[i] * x[j] / N;
}
b[i] += lr * dy[i] / N;
}
return dy;
}
public void softmax(double[] x) {
double max = 0.0;
double sum = 0.0;
for(int i=0; i<n_out; i++) if(max < x[i]) max = x[i];
for(int i=0; i<n_out; i++) {
x[i] = Math.exp(x[i] - max);
sum += x[i];
}
for(int i=0; i<n_out; i++) x[i] /= sum;
}
public void predict(double[] x, double[] y) {
for(int i=0; i<n_out; i++) {
y[i] = 0.;
for(int j=0; j<n_in; j++) {
y[i] += W[i][j] * x[j];
}
y[i] += b[i];
}
softmax(y);
}
private static void test_lr() {
double learning_rate = 0.1;
int n_epochs = 500;
int train_N = 6;
int test_N = 2;
int n_in = 6;
int n_out = 2;
double[][] train_X = {
{1., 1., 1., 0., 0., 0.},
{1., 0., 1., 0., 0., 0.},
{1., 1., 1., 0., 0., 0.},
{0., 0., 1., 1., 1., 0.},
{0., 0., 1., 1., 0., 0.},
{0., 0., 1., 1., 1., 0.}
};
int[][] train_Y = {
{1, 0},
{1, 0},
{1, 0},
{0, 1},
{0, 1},
{0, 1}
};
// construct
LogisticRegression classifier = new LogisticRegression(train_N, n_in, n_out);
// train
for(int epoch=0; epoch<n_epochs; epoch++) {
for(int i=0; i<train_N; i++) {
classifier.train(train_X[i], train_Y[i], learning_rate);
}
//learning_rate *= 0.95;
}
// test data
double[][] test_X = {
{1., 0., 1., 0., 0., 0.},
{0., 0., 1., 1., 1., 0.}
};
double[][] test_Y = new double[test_N][n_out];
// test
for(int i=0; i<test_N; i++) {
classifier.predict(test_X[i], test_Y[i]);
for(int j=0; j<n_out; j++) {
System.out.print(test_Y[i][j] + " ");
}
System.out.println();
}
}
public static void main(String[] args) {
test_lr();
}
}