Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit f3e3a82

Browse files
committedOct 7, 2024
modified: neural_network/artificial_neural_network.py
1 parent b0ad505 commit f3e3a82

File tree

1 file changed

+34
-24
lines changed

1 file changed

+34
-24
lines changed
 

‎neural_network/artificial_neural_network.py

Lines changed: 34 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,12 @@ class SimpleANN:
66
Simple Artificial Neural Network (ANN)
77
88
- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
9-
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
10-
as the loss function.
9+
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
10+
as the loss function.
1111
- Example demonstrates solving the XOR problem.
1212
"""
1313

14-
def __init__(self, input_size: int, hidden_size: int, output_size: int,
14+
def __init__(self, input_size: int, hidden_size: int, output_size: int,
1515
learning_rate: float = 0.1) -> None:
1616
"""
1717
Initialize the neural network with random weights and biases.
@@ -63,7 +63,7 @@ def sigmoid_derivative(self, sigmoid_output: np.ndarray) -> np.ndarray:
6363
6464
Example:
6565
>>> ann = SimpleANN(2, 2, 1)
66-
>>> output = ann.sigmoid(np.array([0.5]))
66+
>>> output = ann.sigmoid(np.array([0])) # Use input 0 for testing
6767
>>> ann.sigmoid_derivative(output)
6868
array([0.25])
6969
"""
@@ -85,16 +85,16 @@ def feedforward(self, inputs: np.ndarray) -> np.ndarray:
8585
>>> ann.feedforward(inputs).shape
8686
(2, 1)
8787
"""
88-
self.hidden_input = (np.dot(inputs, self.weights_input_hidden) +
89-
self.bias_hidden)
88+
self.hidden_input = np.dot(inputs, self.weights_input_hidden) + self.bias_hidden
9089
self.hidden_output = self.sigmoid(self.hidden_input)
91-
self.final_input = (np.dot(self.hidden_output, self.weights_hidden_output) +
92-
self.bias_output)
90+
self.final_input = (
91+
np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
92+
)
9393
self.final_output = self.sigmoid(self.final_input)
9494
return self.final_output
9595

96-
def backpropagation(self, inputs: np.ndarray, targets: np.ndarray,
97-
outputs: np.ndarray) -> None:
96+
def backpropagation(self, inputs: np.ndarray, targets: np.ndarray,
97+
outputs: np.ndarray) -> None:
9898
"""
9999
Perform backpropagation to adjust the weights and biases.
100100
@@ -115,36 +115,41 @@ def backpropagation(self, inputs: np.ndarray, targets: np.ndarray,
115115
hidden_error = output_gradient.dot(self.weights_hidden_output.T)
116116
hidden_gradient = hidden_error * self.sigmoid_derivative(self.hidden_output)
117117

118-
self.weights_hidden_output += (self.hidden_output.T.dot(output_gradient) *
119-
self.learning_rate)
120-
self.bias_output += (np.sum(output_gradient, axis=0, keepdims=True) *
121-
self.learning_rate)
122-
123-
self.weights_input_hidden += (inputs.T.dot(hidden_gradient) *
124-
self.learning_rate)
125-
self.bias_hidden += (np.sum(hidden_gradient, axis=0, keepdims=True) *
126-
self.learning_rate)
127-
128-
def train(self, inputs: np.ndarray, targets: np.ndarray,
129-
epochs: int = 10000) -> None:
118+
self.weights_hidden_output += (
119+
self.hidden_output.T.dot(output_gradient) * self.learning_rate
120+
)
121+
self.bias_output += (
122+
np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
123+
)
124+
125+
self.weights_input_hidden += (
126+
inputs.T.dot(hidden_gradient) * self.learning_rate
127+
)
128+
self.bias_hidden += (
129+
np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
130+
)
131+
132+
def train(self, inputs: np.ndarray, targets: np.ndarray,
133+
epochs: int = 10000, verbose: bool = False) -> None:
130134
"""
131135
Train the neural network on the given input and target data.
132136
133137
Args:
134138
inputs (ndarray): Input features for training.
135139
targets (ndarray): True labels for training.
136140
epochs (int): Number of training iterations.
141+
verbose (bool): Whether to print loss every 1000 epochs.
137142
138143
Example:
139144
>>> ann = SimpleANN(2, 2, 1)
140145
>>> inputs = np.array([[0, 0], [1, 1]])
141146
>>> targets = np.array([[0], [1]])
142-
>>> ann.train(inputs, targets, epochs=1)
147+
>>> ann.train(inputs, targets, epochs=1, verbose=False)
143148
"""
144149
for epoch in range(epochs):
145150
outputs = self.feedforward(inputs)
146151
self.backpropagation(inputs, targets, outputs)
147-
if epoch % 1000 == 0:
152+
if verbose and epoch % 1000 == 0:
148153
loss = np.mean(np.square(targets - outputs))
149154
print(f"Epoch {epoch}, Loss: {loss}")
150155

@@ -165,3 +170,8 @@ def predict(self, inputs: np.ndarray) -> np.ndarray:
165170
(2, 1)
166171
"""
167172
return self.feedforward(inputs)
173+
174+
if __name__ == "__main__":
175+
import doctest
176+
177+
doctest.testmod()

0 commit comments

Comments
 (0)
Please sign in to comment.