@@ -5,8 +5,7 @@ class SimpleANN:
5
5
Simple Artificial Neural Network (ANN)
6
6
7
7
- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
8
- - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
9
- as the loss function.
8
+ - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE) as the loss function.
10
9
- Example demonstrates solving the XOR problem.
11
10
"""
12
11
@@ -19,14 +18,17 @@ def __init__(self, input_size: int, hidden_size: int, output_size: int, learning
19
18
hidden_size (int): Number of neurons in the hidden layer.
20
19
output_size (int): Number of neurons in the output layer.
21
20
learning_rate (float): Learning rate for gradient descent.
21
+
22
+ Example:
23
+ >>> ann = SimpleANN(2, 2, 1)
24
+ >>> isinstance(ann, SimpleANN)
25
+ True
22
26
"""
23
27
rng = np .random .default_rng ()
24
28
self .weights_input_hidden = rng .standard_normal ((input_size , hidden_size ))
25
29
self .weights_hidden_output = rng .standard_normal ((hidden_size , output_size ))
26
-
27
30
self .bias_hidden = np .zeros ((1 , hidden_size ))
28
31
self .bias_output = np .zeros ((1 , output_size ))
29
-
30
32
self .learning_rate = learning_rate
31
33
32
34
def sigmoid (self , value : np .ndarray ) -> np .ndarray :
@@ -40,7 +42,6 @@ def sigmoid(self, value: np.ndarray) -> np.ndarray:
40
42
ndarray: Activated output using sigmoid function.
41
43
42
44
Example:
43
- >>> from __main__ import SimpleANN
44
45
>>> ann = SimpleANN(2, 2, 1)
45
46
>>> ann.sigmoid(np.array([0]))
46
47
array([0.5])
@@ -58,7 +59,6 @@ def sigmoid_derivative(self, sigmoid_output: np.ndarray) -> np.ndarray:
58
59
ndarray: Derivative of the sigmoid function.
59
60
60
61
Example:
61
- >>> from __main__ import SimpleANN
62
62
>>> ann = SimpleANN(2, 2, 1)
63
63
>>> output = ann.sigmoid(np.array([0.5]))
64
64
>>> ann.sigmoid_derivative(output)
@@ -77,7 +77,6 @@ def feedforward(self, inputs: np.ndarray) -> np.ndarray:
77
77
ndarray: Output from the network after feedforward pass.
78
78
79
79
Example:
80
- >>> from __main__ import SimpleANN
81
80
>>> ann = SimpleANN(2, 2, 1)
82
81
>>> inputs = np.array([[0, 0], [1, 1]])
83
82
>>> ann.feedforward(inputs).shape
@@ -99,7 +98,6 @@ def backpropagation(self, inputs: np.ndarray, targets: np.ndarray, outputs: np.n
99
98
outputs (ndarray): Output predicted by the network.
100
99
101
100
Example:
102
- >>> from __main__ import SimpleANN
103
101
>>> ann = SimpleANN(2, 2, 1)
104
102
>>> inputs = np.array([[0, 0], [1, 1]])
105
103
>>> outputs = ann.feedforward(inputs)
@@ -127,7 +125,6 @@ def train(self, inputs: np.ndarray, targets: np.ndarray, epochs: int = 10000) ->
127
125
epochs (int): Number of training iterations.
128
126
129
127
Example:
130
- >>> from __main__ import SimpleANN
131
128
>>> ann = SimpleANN(2, 2, 1)
132
129
>>> inputs = np.array([[0, 0], [1, 1]])
133
130
>>> targets = np.array([[0], [1]])
@@ -151,7 +148,6 @@ def predict(self, inputs: np.ndarray) -> np.ndarray:
151
148
ndarray: Predicted output from the network.
152
149
153
150
Example:
154
- >>> from __main__ import SimpleANN
155
151
>>> ann = SimpleANN(2, 2, 1)
156
152
>>> inputs = np.array([[0, 0], [1, 1]])
157
153
>>> ann.predict(inputs).shape
0 commit comments