Skip to content

Commit 1b39858

Browse files
y4shbcclauss
andcommitted
Update back_propagation_neural_network.py (TheAlgorithms#1342)
* Update back_propagation_neural_network.py Added comments below functions * Update back_propagation_neural_network.py Co-authored-by: Christian Clauss <[email protected]>
1 parent 725834b commit 1b39858

File tree

1 file changed

+6
-17
lines changed

1 file changed

+6
-17
lines changed

neural_network/back_propagation_neural_network.py

+6-17
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@ class DenseLayer:
3131
"""
3232
Layers of BP neural network
3333
"""
34-
3534
def __init__(
3635
self, units, activation=None, learning_rate=None, is_input_layer=False
3736
):
@@ -58,6 +57,7 @@ def initializer(self, back_units):
5857
self.activation = sigmoid
5958

6059
def cal_gradient(self):
60+
# activation function may be sigmoid or linear
6161
if self.activation == sigmoid:
6262
gradient_mat = np.dot(self.output, (1 - self.output).T)
6363
gradient_activation = np.diag(np.diag(gradient_mat))
@@ -78,7 +78,6 @@ def forward_propagation(self, xdata):
7878
return self.output
7979

8080
def back_propagation(self, gradient):
81-
8281
gradient_activation = self.cal_gradient() # i * i 维
8382
gradient = np.asmatrix(np.dot(gradient.T, gradient_activation))
8483

@@ -89,19 +88,17 @@ def back_propagation(self, gradient):
8988
self.gradient_weight = np.dot(gradient.T, self._gradient_weight.T)
9089
self.gradient_bias = gradient * self._gradient_bias
9190
self.gradient = np.dot(gradient, self._gradient_x).T
92-
# ----------------------upgrade
93-
# -----------the Negative gradient direction --------
91+
# upgrade: the Negative gradient direction
9492
self.weight = self.weight - self.learn_rate * self.gradient_weight
9593
self.bias = self.bias - self.learn_rate * self.gradient_bias.T
96-
94+
# updates the weights and bias according to learning rate (0.3 if undefined)
9795
return self.gradient
9896

9997

10098
class BPNN:
10199
"""
102100
Back Propagation Neural Network model
103101
"""
104-
105102
def __init__(self):
106103
self.layers = []
107104
self.train_mse = []
@@ -144,8 +141,7 @@ def train(self, xdata, ydata, train_round, accuracy):
144141
loss, gradient = self.cal_loss(_ydata, _xdata)
145142
all_loss = all_loss + loss
146143

147-
# back propagation
148-
# the input_layer does not upgrade
144+
# back propagation: the input_layer does not upgrade
149145
for layer in self.layers[:0:-1]:
150146
gradient = layer.back_propagation(gradient)
151147

@@ -176,7 +172,6 @@ def plot_loss(self):
176172

177173

178174
def example():
179-
180175
x = np.random.randn(10, 10)
181176
y = np.asarray(
182177
[
@@ -192,17 +187,11 @@ def example():
192187
[0.1, 0.5],
193188
]
194189
)
195-
196190
model = BPNN()
197-
model.add_layer(DenseLayer(10))
198-
model.add_layer(DenseLayer(20))
199-
model.add_layer(DenseLayer(30))
200-
model.add_layer(DenseLayer(2))
201-
191+
for i in (10, 20, 30, 2):
192+
model.add_layer(DenseLayer(i))
202193
model.build()
203-
204194
model.summary()
205-
206195
model.train(xdata=x, ydata=y, train_round=100, accuracy=0.01)
207196

208197

0 commit comments

Comments
 (0)