@@ -31,7 +31,6 @@ class DenseLayer:
31
31
"""
32
32
Layers of BP neural network
33
33
"""
34
-
35
34
def __init__ (
36
35
self , units , activation = None , learning_rate = None , is_input_layer = False
37
36
):
@@ -58,6 +57,7 @@ def initializer(self, back_units):
58
57
self .activation = sigmoid
59
58
60
59
def cal_gradient (self ):
60
+ # activation function may be sigmoid or linear
61
61
if self .activation == sigmoid :
62
62
gradient_mat = np .dot (self .output , (1 - self .output ).T )
63
63
gradient_activation = np .diag (np .diag (gradient_mat ))
@@ -78,7 +78,6 @@ def forward_propagation(self, xdata):
78
78
return self .output
79
79
80
80
def back_propagation (self , gradient ):
81
-
82
81
gradient_activation = self .cal_gradient () # i * i 维
83
82
gradient = np .asmatrix (np .dot (gradient .T , gradient_activation ))
84
83
@@ -89,19 +88,17 @@ def back_propagation(self, gradient):
89
88
self .gradient_weight = np .dot (gradient .T , self ._gradient_weight .T )
90
89
self .gradient_bias = gradient * self ._gradient_bias
91
90
self .gradient = np .dot (gradient , self ._gradient_x ).T
92
- # ----------------------upgrade
93
- # -----------the Negative gradient direction --------
91
+ # upgrade: the Negative gradient direction
94
92
self .weight = self .weight - self .learn_rate * self .gradient_weight
95
93
self .bias = self .bias - self .learn_rate * self .gradient_bias .T
96
-
94
+ # updates the weights and bias according to learning rate (0.3 if undefined)
97
95
return self .gradient
98
96
99
97
100
98
class BPNN :
101
99
"""
102
100
Back Propagation Neural Network model
103
101
"""
104
-
105
102
def __init__ (self ):
106
103
self .layers = []
107
104
self .train_mse = []
@@ -144,8 +141,7 @@ def train(self, xdata, ydata, train_round, accuracy):
144
141
loss , gradient = self .cal_loss (_ydata , _xdata )
145
142
all_loss = all_loss + loss
146
143
147
- # back propagation
148
- # the input_layer does not upgrade
144
+ # back propagation: the input_layer does not upgrade
149
145
for layer in self .layers [:0 :- 1 ]:
150
146
gradient = layer .back_propagation (gradient )
151
147
@@ -176,7 +172,6 @@ def plot_loss(self):
176
172
177
173
178
174
def example ():
179
-
180
175
x = np .random .randn (10 , 10 )
181
176
y = np .asarray (
182
177
[
@@ -192,17 +187,11 @@ def example():
192
187
[0.1 , 0.5 ],
193
188
]
194
189
)
195
-
196
190
model = BPNN ()
197
- model .add_layer (DenseLayer (10 ))
198
- model .add_layer (DenseLayer (20 ))
199
- model .add_layer (DenseLayer (30 ))
200
- model .add_layer (DenseLayer (2 ))
201
-
191
+ for i in (10 , 20 , 30 , 2 ):
192
+ model .add_layer (DenseLayer (i ))
202
193
model .build ()
203
-
204
194
model .summary ()
205
-
206
195
model .train (xdata = x , ydata = y , train_round = 100 , accuracy = 0.01 )
207
196
208
197
0 commit comments