1
1
import numpy as np
2
2
import requests
3
3
4
+
4
5
def collect_dataset ():
5
6
"""Collect dataset of CSGO
6
7
The dataset contains ADR vs Rating of a Player
@@ -20,6 +21,7 @@ def collect_dataset():
20
21
dataset = np .matrix (data )
21
22
return dataset
22
23
24
+
23
25
def run_steep_gradient_descent (data_x , data_y , len_data , alpha , theta , lambda_reg ):
24
26
"""Run steep gradient descent and updates the Feature vector accordingly
25
27
:param data_x : contains the dataset
@@ -36,7 +38,7 @@ def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta, lambda_re
36
38
prod = np .dot (theta , data_x .transpose ())
37
39
prod -= data_y .transpose ()
38
40
sum_grad = np .dot (prod , data_x )
39
-
41
+
40
42
# Add regularization to the gradient
41
43
theta_regularized = np .copy (theta )
42
44
theta_regularized [0 , 0 ] = 0 # Don't regularize the bias term
@@ -45,6 +47,7 @@ def run_steep_gradient_descent(data_x, data_y, len_data, alpha, theta, lambda_re
45
47
theta = theta - (alpha / n ) * sum_grad
46
48
return theta
47
49
50
+
48
51
def sum_of_square_error (data_x , data_y , len_data , theta , lambda_reg ):
49
52
"""Return sum of square error for error calculation
50
53
:param data_x : contains our dataset
@@ -57,12 +60,15 @@ def sum_of_square_error(data_x, data_y, len_data, theta, lambda_reg):
57
60
prod = np .dot (theta , data_x .transpose ())
58
61
prod -= data_y .transpose ()
59
62
sum_elem = np .sum (np .square (prod ))
60
-
63
+
61
64
# Add regularization to the cost function
62
- regularization_term = lambda_reg * np .sum (np .square (theta [:, 1 :])) # Don't regularize the bias term
65
+ regularization_term = lambda_reg * np .sum (
66
+ np .square (theta [:, 1 :])
67
+ ) # Don't regularize the bias term
63
68
error = (sum_elem / (2 * len_data )) + (regularization_term / (2 * len_data ))
64
69
return error
65
70
71
+
66
72
def run_ridge_regression (data_x , data_y , lambda_reg = 1.0 ):
67
73
"""Implement Ridge Regression over the dataset
68
74
:param data_x : contains our dataset
@@ -79,12 +85,15 @@ def run_ridge_regression(data_x, data_y, lambda_reg=1.0):
79
85
theta = np .zeros ((1 , no_features ))
80
86
81
87
for i in range (iterations ):
82
- theta = run_steep_gradient_descent (data_x , data_y , len_data , alpha , theta , lambda_reg )
88
+ theta = run_steep_gradient_descent (
89
+ data_x , data_y , len_data , alpha , theta , lambda_reg
90
+ )
83
91
error = sum_of_square_error (data_x , data_y , len_data , theta , lambda_reg )
84
92
print (f"At Iteration { i + 1 } - Error is { error :.5f} " )
85
93
86
94
return theta
87
95
96
+
88
97
def mean_absolute_error (predicted_y , original_y ):
89
98
"""Return mean absolute error for error calculation
90
99
:param predicted_y : contains the output of prediction (result vector)
@@ -94,6 +103,7 @@ def mean_absolute_error(predicted_y, original_y):
94
103
total = sum (abs (y - predicted_y [i ]) for i , y in enumerate (original_y ))
95
104
return total / len (original_y )
96
105
106
+
97
107
def main ():
98
108
"""Driver function"""
99
109
data = collect_dataset ()
@@ -104,12 +114,12 @@ def main():
104
114
105
115
lambda_reg = 1.0 # Set your desired regularization parameter
106
116
theta = run_ridge_regression (data_x , data_y , lambda_reg )
107
-
117
+
108
118
len_result = theta .shape [1 ]
109
119
print ("Resultant Feature vector : " )
110
120
for i in range (len_result ):
111
121
print (f"{ theta [0 , i ]:.5f} " )
112
122
123
+
113
124
if __name__ == "__main__" :
114
125
main ()
115
-
0 commit comments