Skip to content

Create artificial_neural_network.py #11873

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 14 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions DIRECTORY.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
* [Rat In Maze](backtracking/rat_in_maze.py)
* [Sudoku](backtracking/sudoku.py)
* [Sum Of Subsets](backtracking/sum_of_subsets.py)
* [Word Break](backtracking/word_break.py)
* [Word Ladder](backtracking/word_ladder.py)
* [Word Search](backtracking/word_search.py)

Expand Down Expand Up @@ -99,6 +100,7 @@
* [Elgamal Key Generator](ciphers/elgamal_key_generator.py)
* [Enigma Machine2](ciphers/enigma_machine2.py)
* [Fractionated Morse Cipher](ciphers/fractionated_morse_cipher.py)
* [Gronsfeld Cipher](ciphers/gronsfeld_cipher.py)
* [Hill Cipher](ciphers/hill_cipher.py)
* [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py)
* [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py)
Expand Down Expand Up @@ -211,6 +213,7 @@
* [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py)
* [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py)
* [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py)
* [Maximum Sum Bst](data_structures/binary_tree/maximum_sum_bst.py)
* [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py)
* [Mirror Binary Tree](data_structures/binary_tree/mirror_binary_tree.py)
* [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py)
Expand Down Expand Up @@ -284,6 +287,7 @@
* [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py)
* [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py)
* [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py)
* [Lexicographical Numbers](data_structures/stacks/lexicographical_numbers.py)
* [Next Greater Element](data_structures/stacks/next_greater_element.py)
* [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py)
* [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py)
Expand Down Expand Up @@ -820,6 +824,7 @@
* [Softplus](neural_network/activation_functions/softplus.py)
* [Squareplus](neural_network/activation_functions/squareplus.py)
* [Swish](neural_network/activation_functions/swish.py)
* [Artificial Neural Network](neural_network/artificial_neural_network.py)
* [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py)
* [Convolution Neural Network](neural_network/convolution_neural_network.py)
* [Input Data](neural_network/input_data.py)
Expand Down Expand Up @@ -1201,6 +1206,7 @@
* [Binary Tree Traversal](searches/binary_tree_traversal.py)
* [Double Linear Search](searches/double_linear_search.py)
* [Double Linear Search Recursion](searches/double_linear_search_recursion.py)
* [Exponential Search](searches/exponential_search.py)
* [Fibonacci Search](searches/fibonacci_search.py)
* [Hill Climbing](searches/hill_climbing.py)
* [Interpolation Search](searches/interpolation_search.py)
Expand Down
180 changes: 180 additions & 0 deletions neural_network/artificial_neural_network.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
import numpy as np


class SimpleANN:
"""
Simple Artificial Neural Network (ANN)

- Feedforward Neural Network with 1 hidden layer and Sigmoid activation.
- Uses Gradient Descent for backpropagation and Mean Squared Error (MSE)
as the loss function.
- Example demonstrates solving the XOR problem.
"""

def __init__(
self,
input_size: int,
hidden_size: int,
output_size: int,
learning_rate: float = 0.1,
) -> None:
"""
Initialize the neural network with random weights and biases.

Args:
input_size (int): Number of input features.
hidden_size (int): Number of neurons in the hidden layer.
output_size (int): Number of neurons in the output layer.
learning_rate (float): Learning rate for gradient descent.

Example:
>>> ann = SimpleANN(2, 2, 1)
>>> isinstance(ann, SimpleANN)
True
"""
rng = np.random.default_rng()
self.weights_input_hidden = rng.standard_normal((input_size, hidden_size))
self.weights_hidden_output = rng.standard_normal((hidden_size, output_size))
self.bias_hidden = np.zeros((1, hidden_size))
self.bias_output = np.zeros((1, output_size))
self.learning_rate = learning_rate

def sigmoid(self, value: np.ndarray) -> np.ndarray:
"""
Sigmoid activation function.

Args:
value (ndarray): Input value for activation.

Returns:
ndarray: Activated output using sigmoid function.

Example:
>>> ann = SimpleANN(2, 2, 1)
>>> ann.sigmoid(np.array([0]))
array([0.5])
"""
return 1 / (1 + np.exp(-value))

def sigmoid_derivative(self, sigmoid_output: np.ndarray) -> np.ndarray:
"""
Derivative of the sigmoid function.

Args:
sigmoid_output (ndarray): Output after applying the sigmoid function.

Returns:
ndarray: Derivative of the sigmoid function.

Example:
>>> ann = SimpleANN(2, 2, 1)
>>> output = ann.sigmoid(np.array([0])) # Use input 0 for testing
>>> ann.sigmoid_derivative(output)
array([0.25])
"""
return sigmoid_output * (1 - sigmoid_output)

def feedforward(self, inputs: np.ndarray) -> np.ndarray:
"""
Perform forward propagation through the network.

Args:
inputs (ndarray): Input features for the network.

Returns:
ndarray: Output from the network after feedforward pass.

Example:
>>> ann = SimpleANN(2, 2, 1)
>>> inputs = np.array([[0, 0], [1, 1]])
>>> ann.feedforward(inputs).shape
(2, 1)
"""
self.hidden_input = np.dot(inputs, self.weights_input_hidden) + self.bias_hidden
self.hidden_output = self.sigmoid(self.hidden_input)
self.final_input = (
np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output
)
self.final_output = self.sigmoid(self.final_input)
return self.final_output

def backpropagation(
self, inputs: np.ndarray, targets: np.ndarray, outputs: np.ndarray
) -> None:
"""
Perform backpropagation to adjust the weights and biases.

Args:
inputs (ndarray): Input features.
targets (ndarray): True output labels.
outputs (ndarray): Output predicted by the network.

Example:
>>> ann = SimpleANN(2, 2, 1)
>>> inputs = np.array([[0, 0], [1, 1]])
>>> outputs = ann.feedforward(inputs)
>>> targets = np.array([[0], [1]])
>>> ann.backpropagation(inputs, targets, outputs)
"""
error = targets - outputs
output_gradient = error * self.sigmoid_derivative(outputs)
hidden_error = output_gradient.dot(self.weights_hidden_output.T)
hidden_gradient = hidden_error * self.sigmoid_derivative(self.hidden_output)

self.weights_hidden_output += (
self.hidden_output.T.dot(output_gradient) * self.learning_rate
)
self.bias_output += (
np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate
)

self.weights_input_hidden += inputs.T.dot(hidden_gradient) * self.learning_rate
self.bias_hidden += (
np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate
)

def train(
self, inputs: np.ndarray, targets: np.ndarray, epochs: int = 10000
) -> None:
"""
Train the neural network on the given input and target data.

Args:
inputs (ndarray): Input features for training.
targets (ndarray): True labels for training.
epochs (int): Number of training iterations.
verbose (bool): Whether to print loss every 1000 epochs.

Example:
>>> ann = SimpleANN(2, 2, 1)
>>> inputs = np.array([[0, 0], [1, 1]])
>>> targets = np.array([[0], [1]])
>>> ann.train(inputs, targets, epochs=1, verbose=False)
"""
for epoch in range(epochs):
outputs = self.feedforward(inputs)
self.backpropagation(inputs, targets, outputs)
if verbose and epoch % 1000 == 0:
loss = np.mean(np.square(targets - outputs))
print(f"Epoch {epoch}, Loss: {loss}")

def predict(self, inputs: np.ndarray) -> np.ndarray:
"""
Predict the output for new input data.

Args:
inputs (ndarray): Input data for prediction.

Returns:
ndarray: Predicted output from the network.

Example:
>>> ann = SimpleANN(2, 2, 1)
>>> inputs = np.array([[0, 0], [1, 1]])
>>> ann.predict(inputs).shape
(2, 1)
"""
return self.feedforward(inputs)


if __name__ == "__main__":
Loading