From 67ddf3af133e7795e9195cfc2665a9f0f1bb05b5 Mon Sep 17 00:00:00 2001 From: Amrit Khera Date: Tue, 6 Aug 2019 21:35:37 +0530 Subject: [PATCH 1/4] Infinite loop was fixed. Removed issue of unused variables. --- machine_learning/logistic_regression.py | 27 +++++++------------------ 1 file changed, 7 insertions(+), 20 deletions(-) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index 853de7896af1..eec73d73be8e 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -40,34 +40,20 @@ def logistic_reg( alpha, X, y, - num_steps, max_iterations=70000, ): - converged = False - iterations = 0 theta = np.zeros(X.shape[1]) - while not converged: + for iterations in range(max_iterations): z = np.dot(X, theta) h = sigmoid_function(z) gradient = np.dot(X.T, h - y) / y.size - theta = theta - alpha * gradient + theta = theta - alpha * gradient #updating the weights z = np.dot(X, theta) h = sigmoid_function(z) J = cost_function(h, y) - iterations += 1 # update iterations - weights = np.zeros(X.shape[1]) - for step in range(num_steps): - scores = np.dot(X, weights) - predictions = sigmoid_function(scores) - if step % 10000 == 0: - print(log_likelihood(X,y,weights)) # Print log-likelihood every so often - return weights - - if iterations == max_iterations: - print('Maximum iterations exceeded!') - print('Minimal cost function J=', J) - converged = True + if iterations % 100 == 0: + print(f'loss: {J} \t') #printing the loss after every 100 iterations return theta # In[68]: @@ -78,8 +64,8 @@ def logistic_reg( y = (iris.target != 0) * 1 alpha = 0.1 - theta = logistic_reg(alpha,X,y,max_iterations=70000,num_steps=30000) - print(theta) + theta = logistic_reg(alpha,X,y,max_iterations=70000) + print("theta: ",theta) #printing the theta i.e our weights vecto def predict_prob(X): @@ -105,3 +91,4 @@ def predict_prob(X): ) plt.legend() + plt.show() \ No newline at end of file From 94e585c60075123c38f47a55071ad439c3c77b70 Mon Sep 17 00:00:00 2001 From: John Law Date: Wed, 7 Aug 2019 23:55:44 +0800 Subject: [PATCH 2/4] Update logistic_regression.py --- machine_learning/logistic_regression.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index eec73d73be8e..7a2dea0d40fe 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -48,12 +48,12 @@ def logistic_reg( z = np.dot(X, theta) h = sigmoid_function(z) gradient = np.dot(X.T, h - y) / y.size - theta = theta - alpha * gradient #updating the weights + theta = theta - alpha * gradient # updating the weights z = np.dot(X, theta) h = sigmoid_function(z) J = cost_function(h, y) if iterations % 100 == 0: - print(f'loss: {J} \t') #printing the loss after every 100 iterations + print(f'loss: {J} \t') # printing the loss after every 100 iterations return theta # In[68]: @@ -65,11 +65,11 @@ def logistic_reg( alpha = 0.1 theta = logistic_reg(alpha,X,y,max_iterations=70000) - print("theta: ",theta) #printing the theta i.e our weights vecto + print("theta: ",theta) # printing the theta i.e our weights vecto def predict_prob(X): - return sigmoid_function(np.dot(X, theta)) # predicting the value of probability from the logistic regression algorithm + return sigmoid_function(np.dot(X, theta)) # predicting the value of probability from the logistic regression algorithm plt.figure(figsize=(10, 6)) @@ -91,4 +91,4 @@ def predict_prob(X): ) plt.legend() - plt.show() \ No newline at end of file + plt.show() From 2093ea6193fc1c216cee070a840d1b2da31b066f Mon Sep 17 00:00:00 2001 From: John Law Date: Wed, 7 Aug 2019 23:56:25 +0800 Subject: [PATCH 3/4] Update logistic_regression.py --- machine_learning/logistic_regression.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index 7a2dea0d40fe..a9b8efb47a1e 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -65,7 +65,7 @@ def logistic_reg( alpha = 0.1 theta = logistic_reg(alpha,X,y,max_iterations=70000) - print("theta: ",theta) # printing the theta i.e our weights vecto + print("theta: ",theta) # printing the theta i.e our weights vector def predict_prob(X): From a9acf07eba7700db8be651e6d941bcca72d5bc83 Mon Sep 17 00:00:00 2001 From: Amrit Khera Date: Thu, 8 Aug 2019 00:31:32 +0530 Subject: [PATCH 4/4] correct spacing according to PEP8 --- machine_learning/logistic_regression.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/machine_learning/logistic_regression.py b/machine_learning/logistic_regression.py index a9b8efb47a1e..b2749f1be260 100644 --- a/machine_learning/logistic_regression.py +++ b/machine_learning/logistic_regression.py @@ -48,12 +48,12 @@ def logistic_reg( z = np.dot(X, theta) h = sigmoid_function(z) gradient = np.dot(X.T, h - y) / y.size - theta = theta - alpha * gradient # updating the weights + theta = theta - alpha * gradient # updating the weights z = np.dot(X, theta) h = sigmoid_function(z) J = cost_function(h, y) if iterations % 100 == 0: - print(f'loss: {J} \t') # printing the loss after every 100 iterations + print(f'loss: {J} \t') # printing the loss after every 100 iterations return theta # In[68]: @@ -65,11 +65,11 @@ def logistic_reg( alpha = 0.1 theta = logistic_reg(alpha,X,y,max_iterations=70000) - print("theta: ",theta) # printing the theta i.e our weights vector + print("theta: ",theta) # printing the theta i.e our weights vector def predict_prob(X): - return sigmoid_function(np.dot(X, theta)) # predicting the value of probability from the logistic regression algorithm + return sigmoid_function(np.dot(X, theta)) # predicting the value of probability from the logistic regression algorithm plt.figure(figsize=(10, 6))