Skip to content

Commit e3b1e6e

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 07db6e0 commit e3b1e6e

File tree

1 file changed

+16
-6
lines changed

1 file changed

+16
-6
lines changed

machine_learning/Ridge_Regression.py

+16-6
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import numpy as np
22
import requests
33

4+
45
def collect_dataset():
56
"""Collect dataset of CSGO
67
The dataset contains ADR vs Rating of a Player
@@ -20,6 +21,7 @@ def collect_dataset():
2021
dataset = np.matrix(data)
2122
return dataset
2223

24+
2325
def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta, lambda_reg):
2426
"""Run steep gradient descent and updates the Feature vector accordingly
2527
:param data_x : contains the dataset
@@ -36,7 +38,7 @@ def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta, lambda_re
3638
prod = np.dot(theta, data_x.transpose())
3739
prod -= data_y.transpose()
3840
sum_grad = np.dot(prod, data_x)
39-
41+
4042
# Add regularization to the gradient
4143
theta_regularized = np.copy(theta)
4244
theta_regularized[0, 0] = 0 # Don't regularize the bias term
@@ -45,6 +47,7 @@ def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta, lambda_re
4547
theta = theta - (alpha / n) * sum_grad
4648
return theta
4749

50+
4851
def sum_of_square_error(data_x, data_y, len_data, theta, lambda_reg):
4952
"""Return sum of square error for error calculation
5053
:param data_x : contains our dataset
@@ -57,12 +60,15 @@ def sum_of_square_error(data_x, data_y, len_data, theta, lambda_reg):
5760
prod = np.dot(theta, data_x.transpose())
5861
prod -= data_y.transpose()
5962
sum_elem = np.sum(np.square(prod))
60-
63+
6164
# Add regularization to the cost function
62-
regularization_term = lambda_reg * np.sum(np.square(theta[:, 1:])) # Don't regularize the bias term
65+
regularization_term = lambda_reg * np.sum(
66+
np.square(theta[:, 1:])
67+
) # Don't regularize the bias term
6368
error = (sum_elem / (2 * len_data)) + (regularization_term / (2 * len_data))
6469
return error
6570

71+
6672
def run_ridge_regression(data_x, data_y, lambda_reg=1.0):
6773
"""Implement Ridge Regression over the dataset
6874
:param data_x : contains our dataset
@@ -79,12 +85,15 @@ def run_ridge_regression(data_x, data_y, lambda_reg=1.0):
7985
theta = np.zeros((1, no_features))
8086

8187
for i in range(iterations):
82-
theta = run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta, lambda_reg)
88+
theta = run_steep_gradient_descent(
89+
data_x, data_y, len_data, alpha, theta, lambda_reg
90+
)
8391
error = sum_of_square_error(data_x, data_y, len_data, theta, lambda_reg)
8492
print(f"At Iteration {i + 1} - Error is {error:.5f}")
8593

8694
return theta
8795

96+
8897
def mean_absolute_error(predicted_y, original_y):
8998
"""Return mean absolute error for error calculation
9099
:param predicted_y : contains the output of prediction (result vector)
@@ -94,6 +103,7 @@ def mean_absolute_error(predicted_y, original_y):
94103
total = sum(abs(y - predicted_y[i]) for i, y in enumerate(original_y))
95104
return total / len(original_y)
96105

106+
97107
def main():
98108
"""Driver function"""
99109
data = collect_dataset()
@@ -104,12 +114,12 @@ def main():
104114

105115
lambda_reg = 1.0 # Set your desired regularization parameter
106116
theta = run_ridge_regression(data_x, data_y, lambda_reg)
107-
117+
108118
len_result = theta.shape[1]
109119
print("Resultant Feature vector : ")
110120
for i in range(len_result):
111121
print(f"{theta[0, i]:.5f}")
112122

123+
113124
if __name__ == "__main__":
114125
main()
115-

0 commit comments

Comments
 (0)