Skip to content

Commit 1d219b3

Browse files
committed
modified: neural_network/artificial_neural_network.py
1 parent 636718c commit 1d219b3

File tree

1 file changed

+17
-47
lines changed

1 file changed

+17
-47
lines changed

neural_network/artificial_neural_network.py

+17-47
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,16 @@
11
import numpy as np
22

3-
4-
class ANN:
3+
class SimpleANN:
54
"""
65
Simple Artificial Neural Network (ANN)
76
87
- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
98
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
10-
as the loss function.
9+
as the loss function.
1110
- Example demonstrates solving the XOR problem.
1211
"""
1312

14-
def __init__(
15-
self,
16-
input_size: int,
17-
hidden_size: int,
18-
output_size: int,
19-
learning_rate: float = 0.1,
20-
) -> None:
13+
def __init__(self, input_size: int, hidden_size: int, output_size: int, learning_rate: float = 0.1) -> None:
2114
"""
2215
Initialize the neural network with random weights and biases.
2316
@@ -47,6 +40,7 @@ def sigmoid(self, value: np.ndarray) -> np.ndarray:
4740
ndarray: Activated output using sigmoid function.
4841
4942
Example:
43+
>>> from __main__ import SimpleANN
5044
>>> ann = SimpleANN(2, 2, 1)
5145
>>> ann.sigmoid(np.array([0]))
5246
array([0.5])
@@ -58,13 +52,13 @@ def sigmoid_derivative(self, sigmoid_output: np.ndarray) -> np.ndarray:
5852
Derivative of the sigmoid function.
5953
6054
Args:
61-
sigmoid_output (ndarray): Output after applying
62-
the sigmoid function.
55+
sigmoid_output (ndarray): Output after applying the sigmoid function.
6356
6457
Returns:
6558
ndarray: Derivative of the sigmoid function.
6659
6760
Example:
61+
>>> from __main__ import SimpleANN
6862
>>> ann = SimpleANN(2, 2, 1)
6963
>>> output = ann.sigmoid(np.array([0.5]))
7064
>>> ann.sigmoid_derivative(output)
@@ -83,22 +77,19 @@ def feedforward(self, inputs: np.ndarray) -> np.ndarray:
8377
ndarray: Output from the network after feedforward pass.
8478
8579
Example:
80+
>>> from __main__ import SimpleANN
8681
>>> ann = SimpleANN(2, 2, 1)
8782
>>> inputs = np.array([[0, 0], [1, 1]])
8883
>>> ann.feedforward(inputs).shape
8984
(2, 1)
9085
"""
9186
self.hidden_input = np.dot(inputs, self.weights_input_hidden) + self.bias_hidden
9287
self.hidden_output = self.sigmoid(self.hidden_input)
93-
self.final_input = (
94-
np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
95-
)
88+
self.final_input = np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
9689
self.final_output = self.sigmoid(self.final_input)
9790
return self.final_output
9891

99-
def backpropagation(
100-
self, inputs: np.ndarray, targets: np.ndarray, outputs: np.ndarray
101-
) -> None:
92+
def backpropagation(self, inputs: np.ndarray, targets: np.ndarray, outputs: np.ndarray) -> None:
10293
"""
10394
Perform backpropagation to adjust the weights and biases.
10495
@@ -108,6 +99,7 @@ def backpropagation(
10899
outputs (ndarray): Output predicted by the network.
109100
110101
Example:
102+
>>> from __main__ import SimpleANN
111103
>>> ann = SimpleANN(2, 2, 1)
112104
>>> inputs = np.array([[0, 0], [1, 1]])
113105
>>> outputs = ann.feedforward(inputs)
@@ -119,21 +111,13 @@ def backpropagation(
119111
hidden_error = output_gradient.dot(self.weights_hidden_output.T)
120112
hidden_gradient = hidden_error * self.sigmoid_derivative(self.hidden_output)
121113

122-
self.weights_hidden_output += (
123-
self.hidden_output.T.dot(output_gradient) * self.learning_rate
124-
)
125-
self.bias_output += (
126-
np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
127-
)
114+
self.weights_hidden_output += self.hidden_output.T.dot(output_gradient) * self.learning_rate
115+
self.bias_output += np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
128116

129117
self.weights_input_hidden += inputs.T.dot(hidden_gradient) * self.learning_rate
130-
self.bias_hidden += (
131-
np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
132-
)
118+
self.bias_hidden += np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
133119

134-
def train(
135-
self, inputs: np.ndarray, targets: np.ndarray, epochs: int = 10000
136-
) -> None:
120+
def train(self, inputs: np.ndarray, targets: np.ndarray, epochs: int = 10000) -> None:
137121
"""
138122
Train the neural network on the given input and target data.
139123
@@ -143,6 +127,7 @@ def train(
143127
epochs (int): Number of training iterations.
144128
145129
Example:
130+
>>> from __main__ import SimpleANN
146131
>>> ann = SimpleANN(2, 2, 1)
147132
>>> inputs = np.array([[0, 0], [1, 1]])
148133
>>> targets = np.array([[0], [1]])
@@ -153,7 +138,7 @@ def train(
153138
self.backpropagation(inputs, targets, outputs)
154139
if epoch % 1000 == 0:
155140
loss = np.mean(np.square(targets - outputs))
156-
print(f"Epoch {epoch}, Loss: {loss}")
141+
print(f'Epoch {epoch}, Loss: {loss}')
157142

158143
def predict(self, inputs: np.ndarray) -> np.ndarray:
159144
"""
@@ -166,25 +151,10 @@ def predict(self, inputs: np.ndarray) -> np.ndarray:
166151
ndarray: Predicted output from the network.
167152
168153
Example:
154+
>>> from __main__ import SimpleANN
169155
>>> ann = SimpleANN(2, 2, 1)
170156
>>> inputs = np.array([[0, 0], [1, 1]])
171157
>>> ann.predict(inputs).shape
172158
(2, 1)
173159
"""
174160
return self.feedforward(inputs)
175-
176-
177-
# Example usage
178-
if __name__ == "__main__":
179-
# XOR dataset
180-
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
181-
y = np.array([[0], [1], [1], [0]])
182-
183-
# Initialize and train the neural network
184-
nn = ANN(input_size=2, hidden_size=2, output_size=1, learning_rate=0.1)
185-
nn.train(X, y, epochs=10000)
186-
187-
# Predictions
188-
predictions = nn.predict(X)
189-
print("Predictions:")
190-
print(predictions)

0 commit comments

Comments
 (0)