@@ -6,17 +6,13 @@ class SimpleANN:
6
6
Simple Artificial Neural Network (ANN)
7
7
8
8
- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
9
- - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE) as the loss function.
9
+ - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
10
+ as the loss function.
10
11
- Example demonstrates solving the XOR problem.
11
12
"""
12
13
13
- def __init__ (
14
- self ,
15
- input_size : int ,
16
- hidden_size : int ,
17
- output_size : int ,
18
- learning_rate : float = 0.1 ,
19
- ) -> None :
14
+ def __init__ (self , input_size : int , hidden_size : int , output_size : int ,
15
+ learning_rate : float = 0.1 ) -> None :
20
16
"""
21
17
Initialize the neural network with random weights and biases.
22
18
@@ -89,17 +85,16 @@ def feedforward(self, inputs: np.ndarray) -> np.ndarray:
89
85
>>> ann.feedforward(inputs).shape
90
86
(2, 1)
91
87
"""
92
- self .hidden_input = np .dot (inputs , self .weights_input_hidden ) + self .bias_hidden
88
+ self .hidden_input = (np .dot (inputs , self .weights_input_hidden ) +
89
+ self .bias_hidden )
93
90
self .hidden_output = self .sigmoid (self .hidden_input )
94
- self .final_input = (
95
- np .dot (self .hidden_output , self .weights_hidden_output ) + self .bias_output
96
- )
91
+ self .final_input = (np .dot (self .hidden_output , self .weights_hidden_output ) +
92
+ self .bias_output )
97
93
self .final_output = self .sigmoid (self .final_input )
98
94
return self .final_output
99
95
100
- def backpropagation (
101
- self , inputs : np .ndarray , targets : np .ndarray , outputs : np .ndarray
102
- ) -> None :
96
+ def backpropagation (self , inputs : np .ndarray , targets : np .ndarray ,
97
+ outputs : np .ndarray ) -> None :
103
98
"""
104
99
Perform backpropagation to adjust the weights and biases.
105
100
@@ -120,21 +115,18 @@ def backpropagation(
120
115
hidden_error = output_gradient .dot (self .weights_hidden_output .T )
121
116
hidden_gradient = hidden_error * self .sigmoid_derivative (self .hidden_output )
122
117
123
- self .weights_hidden_output += (
124
- self .hidden_output .T .dot (output_gradient ) * self .learning_rate
125
- )
126
- self .bias_output += (
127
- np .sum (output_gradient , axis = 0 , keepdims = True ) * self .learning_rate
128
- )
129
-
130
- self .weights_input_hidden += inputs .T .dot (hidden_gradient ) * self .learning_rate
131
- self .bias_hidden += (
132
- np .sum (hidden_gradient , axis = 0 , keepdims = True ) * self .learning_rate
133
- )
134
-
135
- def train (
136
- self , inputs : np .ndarray , targets : np .ndarray , epochs : int = 10000
137
- ) -> None :
118
+ self .weights_hidden_output += (self .hidden_output .T .dot (output_gradient ) *
119
+ self .learning_rate )
120
+ self .bias_output += (np .sum (output_gradient , axis = 0 , keepdims = True ) *
121
+ self .learning_rate )
122
+
123
+ self .weights_input_hidden += (inputs .T .dot (hidden_gradient ) *
124
+ self .learning_rate )
125
+ self .bias_hidden += (np .sum (hidden_gradient , axis = 0 , keepdims = True ) *
126
+ self .learning_rate )
127
+
128
+ def train (self , inputs : np .ndarray , targets : np .ndarray ,
129
+ epochs : int = 10000 ) -> None :
138
130
"""
139
131
Train the neural network on the given input and target data.
140
132
0 commit comments