From 9ceb099b0ee19ec183f43a296c14db4cc09f68c0 Mon Sep 17 00:00:00 2001 From: Suyashd999 Date: Mon, 23 Oct 2023 12:57:48 +0530 Subject: [PATCH 1/7] Added doctest for sigmoid_function & cost_function --- machine_learning/logistic_regression.py | 57 +++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index f9da0104ab4b..bc5340f46a8a 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -42,11 +42,64 @@ def sigmoid_function(z): @param z: input to the function @returns: returns value in the range 0 to 1 + + Examples: + >>> sigmoid_function(4) + 0.9820137900379085 + >>> sigmoid_function(np.array([-3,3])) + array([0.04742587, 0.95257413]) + >>> sigmoid_function(np.array([-3,3,1])) + array([0.04742587, 0.95257413, 0.73105858]) """ return 1 / (1 + np.exp(-z)) def cost_function(h, y): + """ + Cost function quantifies the error between predicted and expected values. + The cost function used in Logistic Regression is called Log Loss + or Cross Entropy Function. + + J(θ) = (1/m) * Σ [ -y * log(hθ(x)) - (1 - y) * log(1 - hθ(x)) ] + + Where: + - J(θ) is the cost that we want to minimize during training + - m is the number of training examples + - Σ represents the summation over all training examples + - y is the actual binary label (0 or 1) for a given example + - hθ(x) is the predicted probability that x belongs to the positive class + + @param h: the output of sigmoid function. It is the estimated probability + that the input example 'x' belongs to the positive class + + @param y: the actual binary label associated with input example 'x' + + Examples: + >>> h1 = sigmoid_function(0.3) + >>> h2 = sigmoid_function(-4.3) + >>> h3 = sigmoid_function(8.1) + >>> h = np.array([h1,h2,h3]) + >>> y = np.array([1,0,1]) + >>> cost_function(h,y) + 0.18937868932131605 + >>> h1 = sigmoid_function(4) + >>> h2 = sigmoid_function(3) + >>> h3 = sigmoid_function(1) + >>> h = np.array([h1,h2,h3]) + >>> y = np.array([1,0,0]) + >>> cost_function(h,y) + 1.459999655669926 + >>> h1 = sigmoid_function(4) + >>> h2 = sigmoid_function(-3) + >>> h3 = sigmoid_function(-1) + >>> h = np.array([h1,h2,h3]) + >>> y = np.array([1,0,0]) + >>> cost_function(h,y) + 0.1266663223365915 + + References: + - https://en.wikipedia.org/wiki/Logistic_regression + """ return (-y * np.log(h) - (1 - y) * np.log(1 - h)).mean() @@ -75,6 +128,10 @@ def logistic_reg(alpha, x, y, max_iterations=70000): # In[68]: if __name__ == "__main__": + import doctest + + doctest.testmod() + iris = datasets.load_iris() x = iris.data[:, :2] y = (iris.target != 0) * 1 From d1bc2e860c950e55cf29e72d33f24f4b712ddc73 Mon Sep 17 00:00:00 2001 From: Suyash Dongre <109069262+Suyashd999@users.noreply.github.com> Date: Wed, 25 Oct 2023 22:26:48 +0530 Subject: [PATCH 2/7] Update logistic_regression.py --- machine_learning/logistic_regression.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index bc5340f46a8a..af6ad8572c1d 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -14,6 +14,8 @@ Coursera ML course https://medium.com/@martinpella/logistic-regression-from-scratch-in-python-124c5636b8ac """ +from typing import Union + import numpy as np from matplotlib import pyplot as plt from sklearn import datasets @@ -27,7 +29,7 @@ # classification problems -def sigmoid_function(z): +def sigmoid_function(z: Union[int, np.ndarray]) -> Union[int, np.ndarray]: """ Also known as Logistic Function. @@ -50,11 +52,19 @@ def sigmoid_function(z): array([0.04742587, 0.95257413]) >>> sigmoid_function(np.array([-3,3,1])) array([0.04742587, 0.95257413, 0.73105858]) + >>> sigmoid_function(np.array([-0.01,-2,-1.9])) + array([0.49750002, 0.11920292, 0.13010847]) + >>> sigmoid_function(np.array([-1.3,5.3,12])) + array([0.21416502, 0.9950332 , 0.99999386]) + >>> sigmoid_function(np.array([0.01,0.02,4.1])) + array([0.50249998, 0.50499983, 0.9836975 ]) + >>> sigmoid_function(np.array([0.8])) + array([0.68997448]) """ return 1 / (1 + np.exp(-z)) -def cost_function(h, y): +def cost_function(h: Union[int, np.ndarray], y: Union[int, np.ndarray]) -> int: """ Cost function quantifies the error between predicted and expected values. The cost function used in Logistic Regression is called Log Loss @@ -96,6 +106,10 @@ def cost_function(h, y): >>> y = np.array([1,0,0]) >>> cost_function(h,y) 0.1266663223365915 + >>> h = sigmoid_function(0) + >>> y = 1 + >>> cost_function(h,y) + 0.6931471805599453 References: - https://en.wikipedia.org/wiki/Logistic_regression From 79d3d5485b4595f0bf029e0505e35c0834cb7fa6 Mon Sep 17 00:00:00 2001 From: Suyash Dongre <109069262+Suyashd999@users.noreply.github.com> Date: Wed, 25 Oct 2023 22:31:42 +0530 Subject: [PATCH 3/7] Update logistic_regression.py --- machine_learning/logistic_regression.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index af6ad8572c1d..04e35a7d68bb 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -14,8 +14,6 @@ Coursera ML course https://medium.com/@martinpella/logistic-regression-from-scratch-in-python-124c5636b8ac """ -from typing import Union - import numpy as np from matplotlib import pyplot as plt from sklearn import datasets @@ -29,7 +27,7 @@ # classification problems -def sigmoid_function(z: Union[int, np.ndarray]) -> Union[int, np.ndarray]: +def sigmoid_function(z: np.ndarray) -> np.ndarray: """ Also known as Logistic Function. @@ -64,7 +62,7 @@ def sigmoid_function(z: Union[int, np.ndarray]) -> Union[int, np.ndarray]: return 1 / (1 + np.exp(-z)) -def cost_function(h: Union[int, np.ndarray], y: Union[int, np.ndarray]) -> int: +def cost_function(h: np.ndarray, y: np.ndarray) -> int: """ Cost function quantifies the error between predicted and expected values. The cost function used in Logistic Regression is called Log Loss From 694bb0396413dd2ec4a1f4644f81cdc17ac4ebad Mon Sep 17 00:00:00 2001 From: Tianyi Zheng Date: Thu, 26 Oct 2023 01:35:46 -0400 Subject: [PATCH 4/7] Minor formatting changes in doctests --- machine_learning/logistic_regression.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index 04e35a7d68bb..2b8aab367f29 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -46,15 +46,15 @@ def sigmoid_function(z: np.ndarray) -> np.ndarray: Examples: >>> sigmoid_function(4) 0.9820137900379085 - >>> sigmoid_function(np.array([-3,3])) + >>> sigmoid_function(np.array([-3, 3])) array([0.04742587, 0.95257413]) - >>> sigmoid_function(np.array([-3,3,1])) + >>> sigmoid_function(np.array([-3, 3, 1])) array([0.04742587, 0.95257413, 0.73105858]) - >>> sigmoid_function(np.array([-0.01,-2,-1.9])) + >>> sigmoid_function(np.array([-0.01, -2, -1.9])) array([0.49750002, 0.11920292, 0.13010847]) - >>> sigmoid_function(np.array([-1.3,5.3,12])) + >>> sigmoid_function(np.array([-1.3, 5.3, 12])) array([0.21416502, 0.9950332 , 0.99999386]) - >>> sigmoid_function(np.array([0.01,0.02,4.1])) + >>> sigmoid_function(np.array([0.01, 0.02, 4.1])) array([0.50249998, 0.50499983, 0.9836975 ]) >>> sigmoid_function(np.array([0.8])) array([0.68997448]) @@ -86,27 +86,27 @@ def cost_function(h: np.ndarray, y: np.ndarray) -> int: >>> h1 = sigmoid_function(0.3) >>> h2 = sigmoid_function(-4.3) >>> h3 = sigmoid_function(8.1) - >>> h = np.array([h1,h2,h3]) - >>> y = np.array([1,0,1]) + >>> h = np.array([h1, h2, h3]) + >>> y = np.array([1, 0, 1]) >>> cost_function(h,y) 0.18937868932131605 >>> h1 = sigmoid_function(4) >>> h2 = sigmoid_function(3) >>> h3 = sigmoid_function(1) - >>> h = np.array([h1,h2,h3]) - >>> y = np.array([1,0,0]) + >>> h = np.array([h1, h2, h3]) + >>> y = np.array([1, 0, 0]) >>> cost_function(h,y) 1.459999655669926 >>> h1 = sigmoid_function(4) >>> h2 = sigmoid_function(-3) >>> h3 = sigmoid_function(-1) - >>> h = np.array([h1,h2,h3]) - >>> y = np.array([1,0,0]) + >>> h = np.array([h1, h2, h3]) + >>> y = np.array([1, 0, 0]) >>> cost_function(h,y) 0.1266663223365915 >>> h = sigmoid_function(0) >>> y = 1 - >>> cost_function(h,y) + >>> cost_function(h, y) 0.6931471805599453 References: From 59dcc441de176f19623fda8c1c4aa1856032e946 Mon Sep 17 00:00:00 2001 From: Tianyi Zheng Date: Thu, 26 Oct 2023 02:03:00 -0400 Subject: [PATCH 5/7] Apply suggestions from code review --- machine_learning/logistic_regression.py | 34 ++++++++++--------------- 1 file changed, 13 insertions(+), 21 deletions(-) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index 2b8aab367f29..5ed902ba9fa9 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -83,30 +83,22 @@ def cost_function(h: np.ndarray, y: np.ndarray) -> int: @param y: the actual binary label associated with input example 'x' Examples: - >>> h1 = sigmoid_function(0.3) - >>> h2 = sigmoid_function(-4.3) - >>> h3 = sigmoid_function(8.1) - >>> h = np.array([h1, h2, h3]) - >>> y = np.array([1, 0, 1]) - >>> cost_function(h,y) + >>> estimations = np.array([ + ... sigmoid_function(0.3), sigmoid_function(-4.3), sigmoid_function(8.1) + ... ]) + >>> cost_function(h=estimations, y=np.array([1, 0, 1])) 0.18937868932131605 - >>> h1 = sigmoid_function(4) - >>> h2 = sigmoid_function(3) - >>> h3 = sigmoid_function(1) - >>> h = np.array([h1, h2, h3]) - >>> y = np.array([1, 0, 0]) - >>> cost_function(h,y) + >>> estimations = np.array([ + ... sigmoid_function(4), sigmoid_function(3), sigmoid_function(1) + ... ]) + >>> cost_function(h=estimations, y=np.array([1, 0, 0])) 1.459999655669926 - >>> h1 = sigmoid_function(4) - >>> h2 = sigmoid_function(-3) - >>> h3 = sigmoid_function(-1) - >>> h = np.array([h1, h2, h3]) - >>> y = np.array([1, 0, 0]) - >>> cost_function(h,y) + >>> estimations = np.array([ + ... sigmoid_function(4), sigmoid_function(-3), sigmoid_function(-1) + ... ]) + >>> cost_function(h=estimations, y=np.array([1, 0, 0])) 0.1266663223365915 - >>> h = sigmoid_function(0) - >>> y = 1 - >>> cost_function(h, y) + >>> cost_function(h=np.array([sigmoid_function(0)]), y=np.array([1])) 0.6931471805599453 References: From 4ef1ec81e76a912104af8600fed03b668ce27a35 Mon Sep 17 00:00:00 2001 From: Suyash Dongre <109069262+Suyashd999@users.noreply.github.com> Date: Thu, 26 Oct 2023 12:06:06 +0530 Subject: [PATCH 6/7] Made requested changes in logistic_regression.py --- machine_learning/logistic_regression.py | 35 +++++++++++-------------- 1 file changed, 15 insertions(+), 20 deletions(-) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index 5ed902ba9fa9..d714d2b6e10f 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -27,7 +27,7 @@ # classification problems -def sigmoid_function(z: np.ndarray) -> np.ndarray: +def sigmoid_function(z: float | np.ndarray) -> float | np.ndarray: """ Also known as Logistic Function. @@ -46,15 +46,15 @@ def sigmoid_function(z: np.ndarray) -> np.ndarray: Examples: >>> sigmoid_function(4) 0.9820137900379085 - >>> sigmoid_function(np.array([-3, 3])) + >>> sigmoid_function(np.array([-3,3])) array([0.04742587, 0.95257413]) - >>> sigmoid_function(np.array([-3, 3, 1])) + >>> sigmoid_function(np.array([-3,3,1])) array([0.04742587, 0.95257413, 0.73105858]) - >>> sigmoid_function(np.array([-0.01, -2, -1.9])) + >>> sigmoid_function(np.array([-0.01,-2,-1.9])) array([0.49750002, 0.11920292, 0.13010847]) - >>> sigmoid_function(np.array([-1.3, 5.3, 12])) + >>> sigmoid_function(np.array([-1.3,5.3,12])) array([0.21416502, 0.9950332 , 0.99999386]) - >>> sigmoid_function(np.array([0.01, 0.02, 4.1])) + >>> sigmoid_function(np.array([0.01,0.02,4.1])) array([0.50249998, 0.50499983, 0.9836975 ]) >>> sigmoid_function(np.array([0.8])) array([0.68997448]) @@ -62,7 +62,7 @@ def sigmoid_function(z: np.ndarray) -> np.ndarray: return 1 / (1 + np.exp(-z)) -def cost_function(h: np.ndarray, y: np.ndarray) -> int: +def cost_function(h: np.ndarray, y: np.ndarray) -> float: """ Cost function quantifies the error between predicted and expected values. The cost function used in Logistic Regression is called Log Loss @@ -83,22 +83,17 @@ def cost_function(h: np.ndarray, y: np.ndarray) -> int: @param y: the actual binary label associated with input example 'x' Examples: - >>> estimations = np.array([ - ... sigmoid_function(0.3), sigmoid_function(-4.3), sigmoid_function(8.1) - ... ]) - >>> cost_function(h=estimations, y=np.array([1, 0, 1])) + >>> estimations = sigmoid_function(np.array([0.3,-4.3,8.1])) + >>> cost_function(h=estimations,y=np.array([1,0,1])) 0.18937868932131605 - >>> estimations = np.array([ - ... sigmoid_function(4), sigmoid_function(3), sigmoid_function(1) - ... ]) - >>> cost_function(h=estimations, y=np.array([1, 0, 0])) + >>> estimations = sigmoid_function(np.array([4,3,1])) + >>> cost_function(h=estimations,y=np.array([1,0,0])) 1.459999655669926 - >>> estimations = np.array([ - ... sigmoid_function(4), sigmoid_function(-3), sigmoid_function(-1) - ... ]) - >>> cost_function(h=estimations, y=np.array([1, 0, 0])) + >>> estimations = sigmoid_function(np.array([4,-3,-1])) + >>> cost_function(h=estimations,y=np.array([1,0,0])) 0.1266663223365915 - >>> cost_function(h=np.array([sigmoid_function(0)]), y=np.array([1])) + >>> estimations = sigmoid_function(0) + >>> cost_function(h=estimations,y=np.array([1])) 0.6931471805599453 References: From c9e403c70916c1bc20297faac96780108cdb06ea Mon Sep 17 00:00:00 2001 From: Tianyi Zheng Date: Thu, 26 Oct 2023 03:49:40 -0400 Subject: [PATCH 7/7] Apply suggestions from code review --- machine_learning/logistic_regression.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index d714d2b6e10f..59a70fd65cf9 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -46,15 +46,15 @@ def sigmoid_function(z: float | np.ndarray) -> float | np.ndarray: Examples: >>> sigmoid_function(4) 0.9820137900379085 - >>> sigmoid_function(np.array([-3,3])) + >>> sigmoid_function(np.array([-3, 3])) array([0.04742587, 0.95257413]) - >>> sigmoid_function(np.array([-3,3,1])) + >>> sigmoid_function(np.array([-3, 3, 1])) array([0.04742587, 0.95257413, 0.73105858]) - >>> sigmoid_function(np.array([-0.01,-2,-1.9])) + >>> sigmoid_function(np.array([-0.01, -2, -1.9])) array([0.49750002, 0.11920292, 0.13010847]) - >>> sigmoid_function(np.array([-1.3,5.3,12])) + >>> sigmoid_function(np.array([-1.3, 5.3, 12])) array([0.21416502, 0.9950332 , 0.99999386]) - >>> sigmoid_function(np.array([0.01,0.02,4.1])) + >>> sigmoid_function(np.array([0.01, 0.02, 4.1])) array([0.50249998, 0.50499983, 0.9836975 ]) >>> sigmoid_function(np.array([0.8])) array([0.68997448]) @@ -83,13 +83,13 @@ def cost_function(h: np.ndarray, y: np.ndarray) -> float: @param y: the actual binary label associated with input example 'x' Examples: - >>> estimations = sigmoid_function(np.array([0.3,-4.3,8.1])) - >>> cost_function(h=estimations,y=np.array([1,0,1])) + >>> estimations = sigmoid_function(np.array([0.3, -4.3, 8.1])) + >>> cost_function(h=estimations,y=np.array([1, 0, 1])) 0.18937868932131605 - >>> estimations = sigmoid_function(np.array([4,3,1])) - >>> cost_function(h=estimations,y=np.array([1,0,0])) + >>> estimations = sigmoid_function(np.array([4, 3, 1])) + >>> cost_function(h=estimations,y=np.array([1, 0, 0])) 1.459999655669926 - >>> estimations = sigmoid_function(np.array([4,-3,-1])) + >>> estimations = sigmoid_function(np.array([4, -3, -1])) >>> cost_function(h=estimations,y=np.array([1,0,0])) 0.1266663223365915 >>> estimations = sigmoid_function(0)