diff --git a/DIRECTORY.md b/DIRECTORY.md index cdbbac684fd2..ee36bc0b1e12 100644 --- a/DIRECTORY.md +++ b/DIRECTORY.md @@ -22,6 +22,7 @@ * [Rat In Maze](backtracking/rat_in_maze.py) * [Sudoku](backtracking/sudoku.py) * [Sum Of Subsets](backtracking/sum_of_subsets.py) + * [Word Break](backtracking/word_break.py) * [Word Ladder](backtracking/word_ladder.py) * [Word Search](backtracking/word_search.py) @@ -99,6 +100,7 @@ * [Elgamal Key Generator](ciphers/elgamal_key_generator.py) * [Enigma Machine2](ciphers/enigma_machine2.py) * [Fractionated Morse Cipher](ciphers/fractionated_morse_cipher.py) + * [Gronsfeld Cipher](ciphers/gronsfeld_cipher.py) * [Hill Cipher](ciphers/hill_cipher.py) * [Mixed Keyword Cypher](ciphers/mixed_keyword_cypher.py) * [Mono Alphabetic Ciphers](ciphers/mono_alphabetic_ciphers.py) @@ -211,6 +213,7 @@ * [Lazy Segment Tree](data_structures/binary_tree/lazy_segment_tree.py) * [Lowest Common Ancestor](data_structures/binary_tree/lowest_common_ancestor.py) * [Maximum Fenwick Tree](data_structures/binary_tree/maximum_fenwick_tree.py) + * [Maximum Sum Bst](data_structures/binary_tree/maximum_sum_bst.py) * [Merge Two Binary Trees](data_structures/binary_tree/merge_two_binary_trees.py) * [Mirror Binary Tree](data_structures/binary_tree/mirror_binary_tree.py) * [Non Recursive Segment Tree](data_structures/binary_tree/non_recursive_segment_tree.py) @@ -284,6 +287,7 @@ * [Dijkstras Two Stack Algorithm](data_structures/stacks/dijkstras_two_stack_algorithm.py) * [Infix To Postfix Conversion](data_structures/stacks/infix_to_postfix_conversion.py) * [Infix To Prefix Conversion](data_structures/stacks/infix_to_prefix_conversion.py) + * [Lexicographical Numbers](data_structures/stacks/lexicographical_numbers.py) * [Next Greater Element](data_structures/stacks/next_greater_element.py) * [Postfix Evaluation](data_structures/stacks/postfix_evaluation.py) * [Prefix Evaluation](data_structures/stacks/prefix_evaluation.py) @@ -820,6 +824,7 @@ * [Softplus](neural_network/activation_functions/softplus.py) * [Squareplus](neural_network/activation_functions/squareplus.py) * [Swish](neural_network/activation_functions/swish.py) + * [Artificial Neural Network](neural_network/artificial_neural_network.py) * [Back Propagation Neural Network](neural_network/back_propagation_neural_network.py) * [Convolution Neural Network](neural_network/convolution_neural_network.py) * [Input Data](neural_network/input_data.py) @@ -1201,6 +1206,7 @@ * [Binary Tree Traversal](searches/binary_tree_traversal.py) * [Double Linear Search](searches/double_linear_search.py) * [Double Linear Search Recursion](searches/double_linear_search_recursion.py) + * [Exponential Search](searches/exponential_search.py) * [Fibonacci Search](searches/fibonacci_search.py) * [Hill Climbing](searches/hill_climbing.py) * [Interpolation Search](searches/interpolation_search.py) diff --git a/neural_network/artificial_neural_network.py b/neural_network/artificial_neural_network.py new file mode 100644 index 000000000000..d086efb4eca2 --- /dev/null +++ b/neural_network/artificial_neural_network.py @@ -0,0 +1,175 @@ +import numpy as np + + +class SimpleANN: + """ + Simple Artificial Neural Network (ANN) + + - Feedforward Neural Network with 1 hidden layer and Sigmoid activation. + - Uses Gradient Descent for backpropagation and Mean Squared Error (MSE) as the loss function. + - Example demonstrates solving the XOR problem. + """ + + def __init__( + self, + input_size: int, + hidden_size: int, + output_size: int, + learning_rate: float = 0.1, + ) -> None: + """ + Initialize the neural network with random weights and biases. + + Args: + input_size (int): Number of input features. + hidden_size (int): Number of neurons in the hidden layer. + output_size (int): Number of neurons in the output layer. + learning_rate (float): Learning rate for gradient descent. + + Example: + >>> ann = SimpleANN(2, 2, 1) + >>> isinstance(ann, SimpleANN) + True + """ + rng = np.random.default_rng() + self.weights_input_hidden = rng.standard_normal((input_size, hidden_size)) + self.weights_hidden_output = rng.standard_normal((hidden_size, output_size)) + self.bias_hidden = np.zeros((1, hidden_size)) + self.bias_output = np.zeros((1, output_size)) + self.learning_rate = learning_rate + + def sigmoid(self, value: np.ndarray) -> np.ndarray: + """ + Sigmoid activation function. + + Args: + value (ndarray): Input value for activation. + + Returns: + ndarray: Activated output using sigmoid function. + + Example: + >>> ann = SimpleANN(2, 2, 1) + >>> ann.sigmoid(np.array([0])) + array([0.5]) + """ + return 1 / (1 + np.exp(-value)) + + def sigmoid_derivative(self, sigmoid_output: np.ndarray) -> np.ndarray: + """ + Derivative of the sigmoid function. + + Args: + sigmoid_output (ndarray): Output after applying the sigmoid function. + + Returns: + ndarray: Derivative of the sigmoid function. + + Example: + >>> ann = SimpleANN(2, 2, 1) + >>> output = ann.sigmoid(np.array([0.5])) + >>> ann.sigmoid_derivative(output) + array([0.25]) + """ + return sigmoid_output * (1 - sigmoid_output) + + def feedforward(self, inputs: np.ndarray) -> np.ndarray: + """ + Perform forward propagation through the network. + + Args: + inputs (ndarray): Input features for the network. + + Returns: + ndarray: Output from the network after feedforward pass. + + Example: + >>> ann = SimpleANN(2, 2, 1) + >>> inputs = np.array([[0, 0], [1, 1]]) + >>> ann.feedforward(inputs).shape + (2, 1) + """ + self.hidden_input = np.dot(inputs, self.weights_input_hidden) + self.bias_hidden + self.hidden_output = self.sigmoid(self.hidden_input) + self.final_input = ( + np.dot(self.hidden_output, self.weights_hidden_output) + self.bias_output + ) + self.final_output = self.sigmoid(self.final_input) + return self.final_output + + def backpropagation( + self, inputs: np.ndarray, targets: np.ndarray, outputs: np.ndarray + ) -> None: + """ + Perform backpropagation to adjust the weights and biases. + + Args: + inputs (ndarray): Input features. + targets (ndarray): True output labels. + outputs (ndarray): Output predicted by the network. + + Example: + >>> ann = SimpleANN(2, 2, 1) + >>> inputs = np.array([[0, 0], [1, 1]]) + >>> outputs = ann.feedforward(inputs) + >>> targets = np.array([[0], [1]]) + >>> ann.backpropagation(inputs, targets, outputs) + """ + error = targets - outputs + output_gradient = error * self.sigmoid_derivative(outputs) + hidden_error = output_gradient.dot(self.weights_hidden_output.T) + hidden_gradient = hidden_error * self.sigmoid_derivative(self.hidden_output) + + self.weights_hidden_output += ( + self.hidden_output.T.dot(output_gradient) * self.learning_rate + ) + self.bias_output += ( + np.sum(output_gradient, axis=0, keepdims=True) * self.learning_rate + ) + + self.weights_input_hidden += inputs.T.dot(hidden_gradient) * self.learning_rate + self.bias_hidden += ( + np.sum(hidden_gradient, axis=0, keepdims=True) * self.learning_rate + ) + + def train( + self, inputs: np.ndarray, targets: np.ndarray, epochs: int = 10000 + ) -> None: + """ + Train the neural network on the given input and target data. + + Args: + inputs (ndarray): Input features for training. + targets (ndarray): True labels for training. + epochs (int): Number of training iterations. + + Example: + >>> ann = SimpleANN(2, 2, 1) + >>> inputs = np.array([[0, 0], [1, 1]]) + >>> targets = np.array([[0], [1]]) + >>> ann.train(inputs, targets, epochs=1) + """ + for epoch in range(epochs): + outputs = self.feedforward(inputs) + self.backpropagation(inputs, targets, outputs) + if epoch % 1000 == 0: + loss = np.mean(np.square(targets - outputs)) + print(f"Epoch {epoch}, Loss: {loss}") + + def predict(self, inputs: np.ndarray) -> np.ndarray: + """ + Predict the output for new input data. + + Args: + inputs (ndarray): Input data for prediction. + + Returns: + ndarray: Predicted output from the network. + + Example: + >>> ann = SimpleANN(2, 2, 1) + >>> inputs = np.array([[0, 0], [1, 1]]) + >>> ann.predict(inputs).shape + (2, 1) + """ + return self.feedforward(inputs)