Skip to content

Commit 8dfcc1b

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent ce32466 commit 8dfcc1b

File tree

1 file changed

+14
-5
lines changed

1 file changed

+14
-5
lines changed

neural_network/artificial_neural_network.py

+14-5
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE) as the loss function.
55
- Example demonstrates solving the XOR problem.
66
"""
7+
78
import numpy as np
89

910

@@ -31,7 +32,9 @@ def feedforward(self, X):
3132
self.hidden_output = self.sigmoid(self.hidden_input)
3233

3334
# Output layer
34-
self.final_input = np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
35+
self.final_input = (
36+
np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
37+
)
3538
self.final_output = self.sigmoid(self.final_input)
3639

3740
return self.final_output
@@ -45,18 +48,24 @@ def backpropagation(self, X, y, output):
4548
hidden_error = output_gradient.dot(self.weights_hidden_output.T)
4649
hidden_gradient = hidden_error * self.sigmoid_derivative(self.hidden_output)
4750
# Update weights and biases
48-
self.weights_hidden_output += self.hidden_output.T.dot(output_gradient) * self.learning_rate
49-
self.bias_output += np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
51+
self.weights_hidden_output += (
52+
self.hidden_output.T.dot(output_gradient) * self.learning_rate
53+
)
54+
self.bias_output += (
55+
np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
56+
)
5057
self.weights_input_hidden += X.T.dot(hidden_gradient) * self.learning_rate
51-
self.bias_hidden += np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
58+
self.bias_hidden += (
59+
np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
60+
)
5261

5362
def train(self, X, y, epochs=10000):
5463
for epoch in range(epochs):
5564
output = self.feedforward(X)
5665
self.backpropagation(X, y, output)
5766
if epoch % 1000 == 0:
5867
loss = np.mean(np.square(y - output))
59-
print(f'Epoch {epoch}, Loss: {loss}')
68+
print(f"Epoch {epoch}, Loss: {loss}")
6069

6170
def predict(self, X):
6271
return self.feedforward(X)

0 commit comments

Comments
 (0)