1
1
"""
2
2
Simple Artificial Neural Network (ANN)
3
3
- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
4
- - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE) as the loss function.
4
+ - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
5
+ as the loss function.
5
6
- Example demonstrates solving the XOR problem.
6
7
"""
7
8
8
9
import numpy as np
9
10
10
11
11
12
class ANN :
13
+ """
14
+ Artificial Neural Network (ANN)
15
+
16
+ - Feedforward Neural Network with 1 hidden layer
17
+ and Sigmoid activation.
18
+ - Uses Gradient Descent for backpropagation.
19
+ - Example demonstrates solving the XOR problem.
20
+ """
21
+
12
22
def __init__ (self , input_size , hidden_size , output_size , learning_rate = 0.1 ):
13
- # Initialize weights
14
- self .weights_input_hidden = np .random .randn (input_size , hidden_size )
15
- self .weights_hidden_output = np .random .randn (hidden_size , output_size )
23
+ # Initialize weights using np.random.Generator
24
+ rng = np .random .default_rng ()
25
+ self .weights_input_hidden = rng .standard_normal ((input_size , hidden_size ))
26
+ self .weights_hidden_output = rng .standard_normal ((hidden_size , output_size ))
27
+
16
28
# Initialize biases
17
29
self .bias_hidden = np .zeros ((1 , hidden_size ))
18
30
self .bias_output = np .zeros ((1 , output_size ))
@@ -21,54 +33,54 @@ def __init__(self, input_size, hidden_size, output_size, learning_rate=0.1):
21
33
self .learning_rate = learning_rate
22
34
23
35
def sigmoid (self , x ):
36
+ """Sigmoid activation function."""
24
37
return 1 / (1 + np .exp (- x ))
25
38
26
39
def sigmoid_derivative (self , x ):
40
+ """Derivative of the sigmoid function."""
27
41
return x * (1 - x )
28
42
29
- def feedforward (self , X ):
30
- # Hidden layer
31
- self .hidden_input = np .dot (X , self .weights_input_hidden ) + self .bias_hidden
43
+ def feedforward (self , x ):
44
+ """Forward pass."""
45
+ self .hidden_input = np .dot (x , self .weights_input_hidden ) + self .bias_hidden
32
46
self .hidden_output = self .sigmoid (self .hidden_input )
33
-
34
- # Output layer
35
47
self .final_input = (
36
48
np .dot (self .hidden_output , self .weights_hidden_output ) + self .bias_output
37
49
)
38
50
self .final_output = self .sigmoid (self .final_input )
39
-
40
51
return self .final_output
41
52
42
- def backpropagation (self , X , y , output ):
43
- # Calculate the error (Mean Squared Error)
53
+ def backpropagation (self , x , y , output ):
54
+ """Backpropagation to adjust weights."""
44
55
error = y - output
45
- # Gradient for output layer
46
56
output_gradient = error * self .sigmoid_derivative (output )
47
- # Error for hidden layer
48
57
hidden_error = output_gradient .dot (self .weights_hidden_output .T )
49
58
hidden_gradient = hidden_error * self .sigmoid_derivative (self .hidden_output )
50
- # Update weights and biases
59
+
51
60
self .weights_hidden_output += (
52
61
self .hidden_output .T .dot (output_gradient ) * self .learning_rate
53
62
)
54
63
self .bias_output += (
55
64
np .sum (output_gradient , axis = 0 , keepdims = True ) * self .learning_rate
56
65
)
57
- self .weights_input_hidden += X .T .dot (hidden_gradient ) * self .learning_rate
66
+
67
+ self .weights_input_hidden += x .T .dot (hidden_gradient ) * self .learning_rate
58
68
self .bias_hidden += (
59
69
np .sum (hidden_gradient , axis = 0 , keepdims = True ) * self .learning_rate
60
70
)
61
71
62
- def train (self , X , y , epochs = 10000 ):
72
+ def train (self , x , y , epochs = 10000 ):
73
+ """Train the network."""
63
74
for epoch in range (epochs ):
64
- output = self .feedforward (X )
65
- self .backpropagation (X , y , output )
75
+ output = self .feedforward (x )
76
+ self .backpropagation (x , y , output )
66
77
if epoch % 1000 == 0 :
67
78
loss = np .mean (np .square (y - output ))
68
79
print (f"Epoch { epoch } , Loss: { loss } " )
69
80
70
- def predict (self , X ):
71
- return self .feedforward (X )
81
+ def predict (self , x ):
82
+ """Make predictions."""
83
+ return self .feedforward (x )
72
84
73
85
74
86
if __name__ == "__main__" :
0 commit comments