Skip to content

Commit b0ad505

Browse files
committed
modified: neural_network/artificial_neural_network.py
1 parent 7f53cdc commit b0ad505

File tree

1 file changed

+22
-30
lines changed

1 file changed

+22
-30
lines changed

neural_network/artificial_neural_network.py

+22-30
Original file line numberDiff line numberDiff line change
@@ -6,17 +6,13 @@ class SimpleANN:
66
Simple Artificial Neural Network (ANN)
77
88
- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
9-
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE) as the loss function.
9+
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
10+
as the loss function.
1011
- Example demonstrates solving the XOR problem.
1112
"""
1213

13-
def __init__(
14-
self,
15-
input_size: int,
16-
hidden_size: int,
17-
output_size: int,
18-
learning_rate: float = 0.1,
19-
) -> None:
14+
def __init__(self, input_size: int, hidden_size: int, output_size: int,
15+
learning_rate: float = 0.1) -> None:
2016
"""
2117
Initialize the neural network with random weights and biases.
2218
@@ -89,17 +85,16 @@ def feedforward(self, inputs: np.ndarray) -> np.ndarray:
8985
>>> ann.feedforward(inputs).shape
9086
(2, 1)
9187
"""
92-
self.hidden_input = np.dot(inputs, self.weights_input_hidden) + self.bias_hidden
88+
self.hidden_input = (np.dot(inputs, self.weights_input_hidden) +
89+
self.bias_hidden)
9390
self.hidden_output = self.sigmoid(self.hidden_input)
94-
self.final_input = (
95-
np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
96-
)
91+
self.final_input = (np.dot(self.hidden_output, self.weights_hidden_output) +
92+
self.bias_output)
9793
self.final_output = self.sigmoid(self.final_input)
9894
return self.final_output
9995

100-
def backpropagation(
101-
self, inputs: np.ndarray, targets: np.ndarray, outputs: np.ndarray
102-
) -> None:
96+
def backpropagation(self, inputs: np.ndarray, targets: np.ndarray,
97+
outputs: np.ndarray) -> None:
10398
"""
10499
Perform backpropagation to adjust the weights and biases.
105100
@@ -120,21 +115,18 @@ def backpropagation(
120115
hidden_error = output_gradient.dot(self.weights_hidden_output.T)
121116
hidden_gradient = hidden_error * self.sigmoid_derivative(self.hidden_output)
122117

123-
self.weights_hidden_output += (
124-
self.hidden_output.T.dot(output_gradient) * self.learning_rate
125-
)
126-
self.bias_output += (
127-
np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
128-
)
129-
130-
self.weights_input_hidden += inputs.T.dot(hidden_gradient) * self.learning_rate
131-
self.bias_hidden += (
132-
np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
133-
)
134-
135-
def train(
136-
self, inputs: np.ndarray, targets: np.ndarray, epochs: int = 10000
137-
) -> None:
118+
self.weights_hidden_output += (self.hidden_output.T.dot(output_gradient) *
119+
self.learning_rate)
120+
self.bias_output += (np.sum(output_gradient, axis=0, keepdims=True) *
121+
self.learning_rate)
122+
123+
self.weights_input_hidden += (inputs.T.dot(hidden_gradient) *
124+
self.learning_rate)
125+
self.bias_hidden += (np.sum(hidden_gradient, axis=0, keepdims=True) *
126+
self.learning_rate)
127+
128+
def train(self, inputs: np.ndarray, targets: np.ndarray,
129+
epochs: int = 10000) -> None:
138130
"""
139131
Train the neural network on the given input and target data.
140132

0 commit comments

Comments
 (0)