Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit d32f5f8

Browse files
committedOct 7, 2024·
2 parents a88e165 + a159cb7 commit d32f5f8

File tree

1 file changed

+27
-8
lines changed

1 file changed

+27
-8
lines changed
 

‎neural_network/artificial_neural_network.py

Lines changed: 27 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import numpy as np
22

3+
34
class SimpleANN:
45
"""
56
Simple Artificial Neural Network (ANN)
@@ -9,7 +10,13 @@ class SimpleANN:
910
- Example demonstrates solving the XOR problem.
1011
"""
1112

12-
def __init__(self, input_size: int, hidden_size: int, output_size: int, learning_rate: float = 0.1) -> None:
13+
def __init__(
14+
self,
15+
input_size: int,
16+
hidden_size: int,
17+
output_size: int,
18+
learning_rate: float = 0.1,
19+
) -> None:
1320
"""
1421
Initialize the neural network with random weights and biases.
1522
@@ -84,11 +91,15 @@ def feedforward(self, inputs: np.ndarray) -> np.ndarray:
8491
"""
8592
self.hidden_input = np.dot(inputs, self.weights_input_hidden) + self.bias_hidden
8693
self.hidden_output = self.sigmoid(self.hidden_input)
87-
self.final_input = np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
94+
self.final_input = (
95+
np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
96+
)
8897
self.final_output = self.sigmoid(self.final_input)
8998
return self.final_output
9099

91-
def backpropagation(self, inputs: np.ndarray, targets: np.ndarray, outputs: np.ndarray) -> None:
100+
def backpropagation(
101+
self, inputs: np.ndarray, targets: np.ndarray, outputs: np.ndarray
102+
) -> None:
92103
"""
93104
Perform backpropagation to adjust the weights and biases.
94105
@@ -109,13 +120,21 @@ def backpropagation(self, inputs: np.ndarray, targets: np.ndarray, outputs: np.n
109120
hidden_error = output_gradient.dot(self.weights_hidden_output.T)
110121
hidden_gradient = hidden_error * self.sigmoid_derivative(self.hidden_output)
111122

112-
self.weights_hidden_output += self.hidden_output.T.dot(output_gradient) * self.learning_rate
113-
self.bias_output += np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
123+
self.weights_hidden_output += (
124+
self.hidden_output.T.dot(output_gradient) * self.learning_rate
125+
)
126+
self.bias_output += (
127+
np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
128+
)
114129

115130
self.weights_input_hidden += inputs.T.dot(hidden_gradient) * self.learning_rate
116-
self.bias_hidden += np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
131+
self.bias_hidden += (
132+
np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
133+
)
117134

118-
def train(self, inputs: np.ndarray, targets: np.ndarray, epochs: int = 10000) -> None:
135+
def train(
136+
self, inputs: np.ndarray, targets: np.ndarray, epochs: int = 10000
137+
) -> None:
119138
"""
120139
Train the neural network on the given input and target data.
121140
@@ -135,7 +154,7 @@ def train(self, inputs: np.ndarray, targets: np.ndarray, epochs: int = 10000) ->
135154
self.backpropagation(inputs, targets, outputs)
136155
if epoch % 1000 == 0:
137156
loss = np.mean(np.square(targets - outputs))
138-
print(f'Epoch {epoch}, Loss: {loss}')
157+
print(f"Epoch {epoch}, Loss: {loss}")
139158

140159
def predict(self, inputs: np.ndarray) -> np.ndarray:
141160
"""

0 commit comments

Comments
 (0)
Please sign in to comment.