@@ -6,13 +6,18 @@ class SimpleANN:
6
6
Simple Artificial Neural Network (ANN)
7
7
8
8
- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
9
- - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
10
- as the loss function.
9
+ - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
10
+ as the loss function.
11
11
- Example demonstrates solving the XOR problem.
12
12
"""
13
13
14
- def __init__ (self , input_size : int , hidden_size : int , output_size : int ,
15
- learning_rate : float = 0.1 ) -> None :
14
+ def __init__ (
15
+ self ,
16
+ input_size : int ,
17
+ hidden_size : int ,
18
+ output_size : int ,
19
+ learning_rate : float = 0.1 ,
20
+ ) -> None :
16
21
"""
17
22
Initialize the neural network with random weights and biases.
18
23
@@ -85,16 +90,17 @@ def feedforward(self, inputs: np.ndarray) -> np.ndarray:
85
90
>>> ann.feedforward(inputs).shape
86
91
(2, 1)
87
92
"""
88
- self .hidden_input = (np .dot (inputs , self .weights_input_hidden ) +
89
- self .bias_hidden )
93
+ self .hidden_input = np .dot (inputs , self .weights_input_hidden ) + self .bias_hidden
90
94
self .hidden_output = self .sigmoid (self .hidden_input )
91
- self .final_input = (np .dot (self .hidden_output , self .weights_hidden_output ) +
92
- self .bias_output )
95
+ self .final_input = (
96
+ np .dot (self .hidden_output , self .weights_hidden_output ) + self .bias_output
97
+ )
93
98
self .final_output = self .sigmoid (self .final_input )
94
99
return self .final_output
95
100
96
- def backpropagation (self , inputs : np .ndarray , targets : np .ndarray ,
97
- outputs : np .ndarray ) -> None :
101
+ def backpropagation (
102
+ self , inputs : np .ndarray , targets : np .ndarray , outputs : np .ndarray
103
+ ) -> None :
98
104
"""
99
105
Perform backpropagation to adjust the weights and biases.
100
106
@@ -115,18 +121,21 @@ def backpropagation(self, inputs: np.ndarray, targets: np.ndarray,
115
121
hidden_error = output_gradient .dot (self .weights_hidden_output .T )
116
122
hidden_gradient = hidden_error * self .sigmoid_derivative (self .hidden_output )
117
123
118
- self .weights_hidden_output += (self .hidden_output .T .dot (output_gradient ) *
119
- self .learning_rate )
120
- self .bias_output += (np .sum (output_gradient , axis = 0 , keepdims = True ) *
121
- self .learning_rate )
122
-
123
- self .weights_input_hidden += (inputs .T .dot (hidden_gradient ) *
124
- self .learning_rate )
125
- self .bias_hidden += (np .sum (hidden_gradient , axis = 0 , keepdims = True ) *
126
- self .learning_rate )
127
-
128
- def train (self , inputs : np .ndarray , targets : np .ndarray ,
129
- epochs : int = 10000 ) -> None :
124
+ self .weights_hidden_output += (
125
+ self .hidden_output .T .dot (output_gradient ) * self .learning_rate
126
+ )
127
+ self .bias_output += (
128
+ np .sum (output_gradient , axis = 0 , keepdims = True ) * self .learning_rate
129
+ )
130
+
131
+ self .weights_input_hidden += inputs .T .dot (hidden_gradient ) * self .learning_rate
132
+ self .bias_hidden += (
133
+ np .sum (hidden_gradient , axis = 0 , keepdims = True ) * self .learning_rate
134
+ )
135
+
136
+ def train (
137
+ self , inputs : np .ndarray , targets : np .ndarray , epochs : int = 10000
138
+ ) -> None :
130
139
"""
131
140
Train the neural network on the given input and target data.
132
141
0 commit comments