1
- """
2
- Simple Artificial Neural Network (ANN)
3
- - Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
4
- - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
5
- as the loss function.
6
- - Example demonstrates solving the XOR problem.
7
- """
8
-
9
1
import numpy as np
10
2
11
3
12
4
class ANN :
13
5
"""
14
- Artificial Neural Network (ANN)
6
+ Simple Artificial Neural Network (ANN)
15
7
16
- - Feedforward Neural Network with 1 hidden layer
17
- and Sigmoid activation.
18
- - Uses Gradient Descent for backpropagation.
8
+ - Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
9
+ - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE) as the loss function.
19
10
- Example demonstrates solving the XOR problem.
20
11
"""
21
12
22
- def __init__ (self , input_size , hidden_size , output_size , learning_rate = 0.1 ):
23
- # Initialize weights using np.random.Generator
13
+ def __init__ (
14
+ self ,
15
+ input_size : int ,
16
+ hidden_size : int ,
17
+ output_size : int ,
18
+ learning_rate : float = 0.1 ,
19
+ ) -> None :
20
+ """
21
+ Initialize the neural network with random weights and biases.
22
+
23
+ Args:
24
+ input_size (int): Number of input features.
25
+ hidden_size (int): Number of neurons in the hidden layer.
26
+ output_size (int): Number of neurons in the output layer.
27
+ learning_rate (float): Learning rate for gradient descent.
28
+ """
24
29
rng = np .random .default_rng ()
25
30
self .weights_input_hidden = rng .standard_normal ((input_size , hidden_size ))
26
31
self .weights_hidden_output = rng .standard_normal ((hidden_size , output_size ))
27
32
28
- # Initialize biases
29
33
self .bias_hidden = np .zeros ((1 , hidden_size ))
30
34
self .bias_output = np .zeros ((1 , output_size ))
31
35
32
- # Learning rate
33
36
self .learning_rate = learning_rate
34
37
35
- def sigmoid (self , x ):
36
- """Sigmoid activation function."""
37
- return 1 / (1 + np .exp (- x ))
38
-
39
- def sigmoid_derivative (self , x ):
40
- """Derivative of the sigmoid function."""
41
- return x * (1 - x )
42
-
43
- def feedforward (self , x ):
44
- """Forward pass."""
45
- self .hidden_input = np .dot (x , self .weights_input_hidden ) + self .bias_hidden
38
+ def sigmoid (self , value : np .ndarray ) -> np .ndarray :
39
+ """
40
+ Sigmoid activation function.
41
+
42
+ Args:
43
+ value (ndarray): Input value for activation.
44
+
45
+ Returns:
46
+ ndarray: Activated output using sigmoid function.
47
+
48
+ Example:
49
+ >>> ann = SimpleANN(2, 2, 1)
50
+ >>> ann.sigmoid(np.array([0]))
51
+ array([0.5])
52
+ """
53
+ return 1 / (1 + np .exp (- value ))
54
+
55
+ def sigmoid_derivative (self , sigmoid_output : np .ndarray ) -> np .ndarray :
56
+ """
57
+ Derivative of the sigmoid function.
58
+
59
+ Args:
60
+ sigmoid_output (ndarray): Output after applying the sigmoid function.
61
+
62
+ Returns:
63
+ ndarray: Derivative of the sigmoid function.
64
+
65
+ Example:
66
+ >>> ann = SimpleANN(2, 2, 1)
67
+ >>> output = ann.sigmoid(np.array([0.5]))
68
+ >>> ann.sigmoid_derivative(output)
69
+ array([0.25])
70
+ """
71
+ return sigmoid_output * (1 - sigmoid_output )
72
+
73
+ def feedforward (self , inputs : np .ndarray ) -> np .ndarray :
74
+ """
75
+ Perform forward propagation through the network.
76
+
77
+ Args:
78
+ inputs (ndarray): Input features for the network.
79
+
80
+ Returns:
81
+ ndarray: Output from the network after feedforward pass.
82
+
83
+ Example:
84
+ >>> ann = SimpleANN(2, 2, 1)
85
+ >>> inputs = np.array([[0, 0], [1, 1]])
86
+ >>> ann.feedforward(inputs).shape
87
+ (2, 1)
88
+ """
89
+ self .hidden_input = np .dot (inputs , self .weights_input_hidden ) + self .bias_hidden
46
90
self .hidden_output = self .sigmoid (self .hidden_input )
47
91
self .final_input = (
48
92
np .dot (self .hidden_output , self .weights_hidden_output ) + self .bias_output
49
93
)
50
94
self .final_output = self .sigmoid (self .final_input )
51
95
return self .final_output
52
96
53
- def backpropagation (self , x , y , output ):
54
- """Backpropagation to adjust weights."""
55
- error = y - output
56
- output_gradient = error * self .sigmoid_derivative (output )
97
+ def backpropagation (
98
+ self , inputs : np .ndarray , targets : np .ndarray , outputs : np .ndarray
99
+ ) -> None :
100
+ """
101
+ Perform backpropagation to adjust the weights and biases.
102
+
103
+ Args:
104
+ inputs (ndarray): Input features.
105
+ targets (ndarray): True output labels.
106
+ outputs (ndarray): Output predicted by the network.
107
+
108
+ Example:
109
+ >>> ann = SimpleANN(2, 2, 1)
110
+ >>> inputs = np.array([[0, 0], [1, 1]])
111
+ >>> outputs = ann.feedforward(inputs)
112
+ >>> targets = np.array([[0], [1]])
113
+ >>> ann.backpropagation(inputs, targets, outputs)
114
+ """
115
+ error = targets - outputs
116
+ output_gradient = error * self .sigmoid_derivative (outputs )
57
117
hidden_error = output_gradient .dot (self .weights_hidden_output .T )
58
118
hidden_gradient = hidden_error * self .sigmoid_derivative (self .hidden_output )
59
119
@@ -64,33 +124,65 @@ def backpropagation(self, x, y, output):
64
124
np .sum (output_gradient , axis = 0 , keepdims = True ) * self .learning_rate
65
125
)
66
126
67
- self .weights_input_hidden += x .T .dot (hidden_gradient ) * self .learning_rate
127
+ self .weights_input_hidden += inputs .T .dot (hidden_gradient ) * self .learning_rate
68
128
self .bias_hidden += (
69
129
np .sum (hidden_gradient , axis = 0 , keepdims = True ) * self .learning_rate
70
130
)
71
131
72
- def train (self , x , y , epochs = 10000 ):
73
- """Train the network."""
132
+ def train (
133
+ self , inputs : np .ndarray , targets : np .ndarray , epochs : int = 10000
134
+ ) -> None :
135
+ """
136
+ Train the neural network on the given input and target data.
137
+
138
+ Args:
139
+ inputs (ndarray): Input features for training.
140
+ targets (ndarray): True labels for training.
141
+ epochs (int): Number of training iterations.
142
+
143
+ Example:
144
+ >>> ann = SimpleANN(2, 2, 1)
145
+ >>> inputs = np.array([[0, 0], [1, 1]])
146
+ >>> targets = np.array([[0], [1]])
147
+ >>> ann.train(inputs, targets, epochs=1)
148
+ """
74
149
for epoch in range (epochs ):
75
- output = self .feedforward (x )
76
- self .backpropagation (x , y , output )
150
+ outputs = self .feedforward (inputs )
151
+ self .backpropagation (inputs , targets , outputs )
77
152
if epoch % 1000 == 0 :
78
- loss = np .mean (np .square (y - output ))
153
+ loss = np .mean (np .square (targets - outputs ))
79
154
print (f"Epoch { epoch } , Loss: { loss } " )
80
155
81
- def predict (self , x ):
82
- """Make predictions."""
83
- return self .feedforward (x )
156
+ def predict (self , inputs : np .ndarray ) -> np .ndarray :
157
+ """
158
+ Predict the output for new input data.
159
+
160
+ Args:
161
+ inputs (ndarray): Input data for prediction.
84
162
163
+ Returns:
164
+ ndarray: Predicted output from the network.
85
165
166
+ Example:
167
+ >>> ann = SimpleANN(2, 2, 1)
168
+ >>> inputs = np.array([[0, 0], [1, 1]])
169
+ >>> ann.predict(inputs).shape
170
+ (2, 1)
171
+ """
172
+ return self .feedforward (inputs )
173
+
174
+
175
+ # Example usage
86
176
if __name__ == "__main__" :
177
+ # XOR dataset
87
178
X = np .array ([[0 , 0 ], [0 , 1 ], [1 , 0 ], [1 , 1 ]])
88
179
y = np .array ([[0 ], [1 ], [1 ], [0 ]])
89
- # Initialize the neural network
90
- ann = ANN (input_size = 2 , hidden_size = 2 , output_size = 1 , learning_rate = 0.1 )
91
- # Train the neural network
92
- ann .train (X , y , epochs = 100 )
93
- # Predict
94
- predictions = ann .predict (X )
180
+
181
+ # Initialize and train the neural network
182
+ nn = ANN (input_size = 2 , hidden_size = 2 , output_size = 1 , learning_rate = 0.1 )
183
+ nn .train (X , y , epochs = 10000 )
184
+
185
+ # Predictions
186
+ predictions = nn .predict (X )
95
187
print ("Predictions:" )
96
188
print (predictions )
0 commit comments