Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 79fed0c

Browse files
committedOct 7, 2024·
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent b0ad505 commit 79fed0c

File tree

1 file changed

+31
-22
lines changed

1 file changed

+31
-22
lines changed
 

‎neural_network/artificial_neural_network.py

Lines changed: 31 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,18 @@ class SimpleANN:
66
Simple Artificial Neural Network (ANN)
77
88
- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
9-
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
10-
as the loss function.
9+
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
10+
as the loss function.
1111
- Example demonstrates solving the XOR problem.
1212
"""
1313

14-
def __init__(self, input_size: int, hidden_size: int, output_size: int,
15-
learning_rate: float = 0.1) -> None:
14+
def __init__(
15+
self,
16+
input_size: int,
17+
hidden_size: int,
18+
output_size: int,
19+
learning_rate: float = 0.1,
20+
) -> None:
1621
"""
1722
Initialize the neural network with random weights and biases.
1823
@@ -85,16 +90,17 @@ def feedforward(self, inputs: np.ndarray) -> np.ndarray:
8590
>>> ann.feedforward(inputs).shape
8691
(2, 1)
8792
"""
88-
self.hidden_input = (np.dot(inputs, self.weights_input_hidden) +
89-
self.bias_hidden)
93+
self.hidden_input = np.dot(inputs, self.weights_input_hidden) + self.bias_hidden
9094
self.hidden_output = self.sigmoid(self.hidden_input)
91-
self.final_input = (np.dot(self.hidden_output, self.weights_hidden_output) +
92-
self.bias_output)
95+
self.final_input = (
96+
np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
97+
)
9398
self.final_output = self.sigmoid(self.final_input)
9499
return self.final_output
95100

96-
def backpropagation(self, inputs: np.ndarray, targets: np.ndarray,
97-
outputs: np.ndarray) -> None:
101+
def backpropagation(
102+
self, inputs: np.ndarray, targets: np.ndarray, outputs: np.ndarray
103+
) -> None:
98104
"""
99105
Perform backpropagation to adjust the weights and biases.
100106
@@ -115,18 +121,21 @@ def backpropagation(self, inputs: np.ndarray, targets: np.ndarray,
115121
hidden_error = output_gradient.dot(self.weights_hidden_output.T)
116122
hidden_gradient = hidden_error * self.sigmoid_derivative(self.hidden_output)
117123

118-
self.weights_hidden_output += (self.hidden_output.T.dot(output_gradient) *
119-
self.learning_rate)
120-
self.bias_output += (np.sum(output_gradient, axis=0, keepdims=True) *
121-
self.learning_rate)
122-
123-
self.weights_input_hidden += (inputs.T.dot(hidden_gradient) *
124-
self.learning_rate)
125-
self.bias_hidden += (np.sum(hidden_gradient, axis=0, keepdims=True) *
126-
self.learning_rate)
127-
128-
def train(self, inputs: np.ndarray, targets: np.ndarray,
129-
epochs: int = 10000) -> None:
124+
self.weights_hidden_output += (
125+
self.hidden_output.T.dot(output_gradient) * self.learning_rate
126+
)
127+
self.bias_output += (
128+
np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
129+
)
130+
131+
self.weights_input_hidden += inputs.T.dot(hidden_gradient) * self.learning_rate
132+
self.bias_hidden += (
133+
np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
134+
)
135+
136+
def train(
137+
self, inputs: np.ndarray, targets: np.ndarray, epochs: int = 10000
138+
) -> None:
130139
"""
131140
Train the neural network on the given input and target data.
132141

0 commit comments

Comments
 (0)
Please sign in to comment.