1
1
import numpy as np
2
2
3
-
4
- class ANN :
3
+ class SimpleANN :
5
4
"""
6
5
Simple Artificial Neural Network (ANN)
7
6
8
7
- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
9
8
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
10
- as the loss function.
9
+ as the loss function.
11
10
- Example demonstrates solving the XOR problem.
12
11
"""
13
12
14
- def __init__ (
15
- self ,
16
- input_size : int ,
17
- hidden_size : int ,
18
- output_size : int ,
19
- learning_rate : float = 0.1 ,
20
- ) -> None :
13
+ def __init__ (self , input_size : int , hidden_size : int , output_size : int , learning_rate : float = 0.1 ) -> None :
21
14
"""
22
15
Initialize the neural network with random weights and biases.
23
16
@@ -47,6 +40,7 @@ def sigmoid(self, value: np.ndarray) -> np.ndarray:
47
40
ndarray: Activated output using sigmoid function.
48
41
49
42
Example:
43
+ >>> from __main__ import SimpleANN
50
44
>>> ann = SimpleANN(2, 2, 1)
51
45
>>> ann.sigmoid(np.array([0]))
52
46
array([0.5])
@@ -58,13 +52,13 @@ def sigmoid_derivative(self, sigmoid_output: np.ndarray) -> np.ndarray:
58
52
Derivative of the sigmoid function.
59
53
60
54
Args:
61
- sigmoid_output (ndarray): Output after applying
62
- the sigmoid function.
55
+ sigmoid_output (ndarray): Output after applying the sigmoid function.
63
56
64
57
Returns:
65
58
ndarray: Derivative of the sigmoid function.
66
59
67
60
Example:
61
+ >>> from __main__ import SimpleANN
68
62
>>> ann = SimpleANN(2, 2, 1)
69
63
>>> output = ann.sigmoid(np.array([0.5]))
70
64
>>> ann.sigmoid_derivative(output)
@@ -83,22 +77,19 @@ def feedforward(self, inputs: np.ndarray) -> np.ndarray:
83
77
ndarray: Output from the network after feedforward pass.
84
78
85
79
Example:
80
+ >>> from __main__ import SimpleANN
86
81
>>> ann = SimpleANN(2, 2, 1)
87
82
>>> inputs = np.array([[0, 0], [1, 1]])
88
83
>>> ann.feedforward(inputs).shape
89
84
(2, 1)
90
85
"""
91
86
self .hidden_input = np .dot (inputs , self .weights_input_hidden ) + self .bias_hidden
92
87
self .hidden_output = self .sigmoid (self .hidden_input )
93
- self .final_input = (
94
- np .dot (self .hidden_output , self .weights_hidden_output ) + self .bias_output
95
- )
88
+ self .final_input = np .dot (self .hidden_output , self .weights_hidden_output ) + self .bias_output
96
89
self .final_output = self .sigmoid (self .final_input )
97
90
return self .final_output
98
91
99
- def backpropagation (
100
- self , inputs : np .ndarray , targets : np .ndarray , outputs : np .ndarray
101
- ) -> None :
92
+ def backpropagation (self , inputs : np .ndarray , targets : np .ndarray , outputs : np .ndarray ) -> None :
102
93
"""
103
94
Perform backpropagation to adjust the weights and biases.
104
95
@@ -108,6 +99,7 @@ def backpropagation(
108
99
outputs (ndarray): Output predicted by the network.
109
100
110
101
Example:
102
+ >>> from __main__ import SimpleANN
111
103
>>> ann = SimpleANN(2, 2, 1)
112
104
>>> inputs = np.array([[0, 0], [1, 1]])
113
105
>>> outputs = ann.feedforward(inputs)
@@ -119,21 +111,13 @@ def backpropagation(
119
111
hidden_error = output_gradient .dot (self .weights_hidden_output .T )
120
112
hidden_gradient = hidden_error * self .sigmoid_derivative (self .hidden_output )
121
113
122
- self .weights_hidden_output += (
123
- self .hidden_output .T .dot (output_gradient ) * self .learning_rate
124
- )
125
- self .bias_output += (
126
- np .sum (output_gradient , axis = 0 , keepdims = True ) * self .learning_rate
127
- )
114
+ self .weights_hidden_output += self .hidden_output .T .dot (output_gradient ) * self .learning_rate
115
+ self .bias_output += np .sum (output_gradient , axis = 0 , keepdims = True ) * self .learning_rate
128
116
129
117
self .weights_input_hidden += inputs .T .dot (hidden_gradient ) * self .learning_rate
130
- self .bias_hidden += (
131
- np .sum (hidden_gradient , axis = 0 , keepdims = True ) * self .learning_rate
132
- )
118
+ self .bias_hidden += np .sum (hidden_gradient , axis = 0 , keepdims = True ) * self .learning_rate
133
119
134
- def train (
135
- self , inputs : np .ndarray , targets : np .ndarray , epochs : int = 10000
136
- ) -> None :
120
+ def train (self , inputs : np .ndarray , targets : np .ndarray , epochs : int = 10000 ) -> None :
137
121
"""
138
122
Train the neural network on the given input and target data.
139
123
@@ -143,6 +127,7 @@ def train(
143
127
epochs (int): Number of training iterations.
144
128
145
129
Example:
130
+ >>> from __main__ import SimpleANN
146
131
>>> ann = SimpleANN(2, 2, 1)
147
132
>>> inputs = np.array([[0, 0], [1, 1]])
148
133
>>> targets = np.array([[0], [1]])
@@ -153,7 +138,7 @@ def train(
153
138
self .backpropagation (inputs , targets , outputs )
154
139
if epoch % 1000 == 0 :
155
140
loss = np .mean (np .square (targets - outputs ))
156
- print (f" Epoch { epoch } , Loss: { loss } " )
141
+ print (f' Epoch { epoch } , Loss: { loss } ' )
157
142
158
143
def predict (self , inputs : np .ndarray ) -> np .ndarray :
159
144
"""
@@ -166,25 +151,10 @@ def predict(self, inputs: np.ndarray) -> np.ndarray:
166
151
ndarray: Predicted output from the network.
167
152
168
153
Example:
154
+ >>> from __main__ import SimpleANN
169
155
>>> ann = SimpleANN(2, 2, 1)
170
156
>>> inputs = np.array([[0, 0], [1, 1]])
171
157
>>> ann.predict(inputs).shape
172
158
(2, 1)
173
159
"""
174
160
return self .feedforward (inputs )
175
-
176
-
177
- # Example usage
178
- if __name__ == "__main__" :
179
- # XOR dataset
180
- X = np .array ([[0 , 0 ], [0 , 1 ], [1 , 0 ], [1 , 1 ]])
181
- y = np .array ([[0 ], [1 ], [1 ], [0 ]])
182
-
183
- # Initialize and train the neural network
184
- nn = ANN (input_size = 2 , hidden_size = 2 , output_size = 1 , learning_rate = 0.1 )
185
- nn .train (X , y , epochs = 10000 )
186
-
187
- # Predictions
188
- predictions = nn .predict (X )
189
- print ("Predictions:" )
190
- print (predictions )
0 commit comments