From 771ca1d7990fe0f0f24b725612a3aef807929041 Mon Sep 17 00:00:00 2001 From: kausthub-kannan Date: Mon, 14 Aug 2023 23:16:19 +0530 Subject: [PATCH 1/4] Added Leaky ReLU activation function --- .../leaky_rectified_linear_unit.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 neural_network/activation_functions/leaky_rectified_linear_unit.py diff --git a/neural_network/activation_functions/leaky_rectified_linear_unit.py b/neural_network/activation_functions/leaky_rectified_linear_unit.py new file mode 100644 index 000000000000..1e99fed4e405 --- /dev/null +++ b/neural_network/activation_functions/leaky_rectified_linear_unit.py @@ -0,0 +1,45 @@ +""" +Leaky Rectified Linear Unit (LeakyReLU) + +Input: vector (type: np.ndarray) , alpha (type: float) +Output: vector (type: np.ndarray) + +UseCase: LeakyReLU solves the issue of dead neurons or vanishing gradient problem. +Refer the below link for more information: +https://en.wikipedia.org/wiki/Rectifier_(neural_networks)#Leaky_ReLU + +Applications: +Generative Adversarial Networks (GANs) +Object Detection and Image Segmentation +""" + +import numpy as np + + +def leaky_rectified_linear_unit(vector: np.ndarray, alpha: float) -> np.ndarray: + """ + Implements the LeakyReLU activation function. + Parameters: + vector: the array containing input of elu activation + alpha: hyperparameter + return: + leaky_relu (np.array): The input numpy array after applying leaky-relu. + + Formula : f(x) = x if x > 0 else f(x) = alpha * x + + Examples: + >>> leaky_rectified_linear_unit(vector=np.array([2.3,0.6,-2,-3.8]), alpha=0.3) + array([ 2.3 , 0.6 , -0.6 , -1.14]) + + >>> leaky_rectified_linear_unit(vector=np.array([-9.2,-0.3,0.45,-4.56]), \ + alpha=0.067) + array([-0.6164 , -0.0201 , 0.45 , -0.30552]) + + """ + return np.where(vector > 0, vector, alpha * vector) + + +if __name__ == "__main__": + import doctest + + doctest.testmod() From 9b80d1f943a1835d075f5ddae69c169bab4e503e Mon Sep 17 00:00:00 2001 From: kausthub-kannan Date: Mon, 14 Aug 2023 23:23:13 +0530 Subject: [PATCH 2/4] Added Leaky ReLU activation function --- .../activation_functions/leaky_rectified_linear_unit.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neural_network/activation_functions/leaky_rectified_linear_unit.py b/neural_network/activation_functions/leaky_rectified_linear_unit.py index 1e99fed4e405..2ccfc20ee0be 100644 --- a/neural_network/activation_functions/leaky_rectified_linear_unit.py +++ b/neural_network/activation_functions/leaky_rectified_linear_unit.py @@ -20,10 +20,10 @@ def leaky_rectified_linear_unit(vector: np.ndarray, alpha: float) -> np.ndarray: """ Implements the LeakyReLU activation function. Parameters: - vector: the array containing input of elu activation + vector: the array containing input of leakyReLu activation alpha: hyperparameter return: - leaky_relu (np.array): The input numpy array after applying leaky-relu. + leaky_relu (np.array): The input numpy array after applying leakyReLu. Formula : f(x) = x if x > 0 else f(x) = alpha * x From 037b05d4311fbf1d8eb6372ad8dc4a0f34db9717 Mon Sep 17 00:00:00 2001 From: kausthub-kannan Date: Mon, 14 Aug 2023 23:24:36 +0530 Subject: [PATCH 3/4] Added Leaky ReLU activation function --- .../leaky_rectified_linear_unit.py | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/neural_network/activation_functions/leaky_rectified_linear_unit.py b/neural_network/activation_functions/leaky_rectified_linear_unit.py index 2ccfc20ee0be..5226c86eaa1b 100644 --- a/neural_network/activation_functions/leaky_rectified_linear_unit.py +++ b/neural_network/activation_functions/leaky_rectified_linear_unit.py @@ -18,14 +18,16 @@ def leaky_rectified_linear_unit(vector: np.ndarray, alpha: float) -> np.ndarray: """ - Implements the LeakyReLU activation function. - Parameters: - vector: the array containing input of leakyReLu activation - alpha: hyperparameter - return: - leaky_relu (np.array): The input numpy array after applying leakyReLu. - - Formula : f(x) = x if x > 0 else f(x) = alpha * x + Implements the LeakyReLU activation function. + + Parameters: + vector (np.ndarray): The input array for LeakyReLU activation. + alpha (float): The slope for negative values. + + Returns: + np.ndarray: The input array after applying the LeakyReLU activation. + + Formula: f(x) = x if x > 0 else f(x) = alpha * x Examples: >>> leaky_rectified_linear_unit(vector=np.array([2.3,0.6,-2,-3.8]), alpha=0.3) From 1f84b1c07ece37a841e852779362e6d89bd40151 Mon Sep 17 00:00:00 2001 From: kausthub-kannan Date: Thu, 17 Aug 2023 00:48:49 +0530 Subject: [PATCH 4/4] Formatting and spelling fixes done --- .../leaky_rectified_linear_unit.py | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/neural_network/activation_functions/leaky_rectified_linear_unit.py b/neural_network/activation_functions/leaky_rectified_linear_unit.py index 5226c86eaa1b..019086fd9821 100644 --- a/neural_network/activation_functions/leaky_rectified_linear_unit.py +++ b/neural_network/activation_functions/leaky_rectified_linear_unit.py @@ -1,16 +1,9 @@ """ -Leaky Rectified Linear Unit (LeakyReLU) +Leaky Rectified Linear Unit (Leaky ReLU) -Input: vector (type: np.ndarray) , alpha (type: float) -Output: vector (type: np.ndarray) - -UseCase: LeakyReLU solves the issue of dead neurons or vanishing gradient problem. -Refer the below link for more information: +Use Case: Leaky ReLU addresses the problem of the vanishing gradient. +For more detailed information, you can refer to the following link: https://en.wikipedia.org/wiki/Rectifier_(neural_networks)#Leaky_ReLU - -Applications: -Generative Adversarial Networks (GANs) -Object Detection and Image Segmentation """ import numpy as np @@ -33,8 +26,7 @@ def leaky_rectified_linear_unit(vector: np.ndarray, alpha: float) -> np.ndarray: >>> leaky_rectified_linear_unit(vector=np.array([2.3,0.6,-2,-3.8]), alpha=0.3) array([ 2.3 , 0.6 , -0.6 , -1.14]) - >>> leaky_rectified_linear_unit(vector=np.array([-9.2,-0.3,0.45,-4.56]), \ - alpha=0.067) + >>> leaky_rectified_linear_unit(np.array([-9.2, -0.3, 0.45, -4.56]), alpha=0.067) array([-0.6164 , -0.0201 , 0.45 , -0.30552]) """