2
2
from matplotlib import pyplot as plt
3
3
from sklearn import datasets
4
4
5
+
5
6
# Ridge Regression function
6
7
# reference : https://en.wikipedia.org/wiki/Ridge_regression
7
- def ridge_cost_function (x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float ) -> float :
8
+ def ridge_cost_function (
9
+ x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float
10
+ ) -> float :
8
11
"""
9
12
Compute the Ridge regression cost function with L2 regularization.
10
13
@@ -26,12 +29,21 @@ def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha:
26
29
"""
27
30
m = len (y )
28
31
predictions = np .dot (x , theta )
29
- cost = (1 / (2 * m )) * np .sum ((predictions - y ) ** 2 ) + \
30
- (alpha / 2 ) * np .sum (theta [1 :] ** 2 )
32
+ cost = (1 / (2 * m )) * np .sum ((predictions - y ) ** 2 ) + (alpha / 2 ) * np .sum (
33
+ theta [1 :] ** 2
34
+ )
31
35
32
36
return cost
33
37
34
- def ridge_gradient_descent (x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float , learning_rate : float , max_iterations : int ) -> np .ndarray :
38
+
39
+ def ridge_gradient_descent (
40
+ x : np .ndarray ,
41
+ y : np .ndarray ,
42
+ theta : np .ndarray ,
43
+ alpha : float ,
44
+ learning_rate : float ,
45
+ max_iterations : int ,
46
+ ) -> np .ndarray :
35
47
"""
36
48
Perform gradient descent to minimize the cost function and fit the Ridge regression model.
37
49
@@ -62,9 +74,9 @@ def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alph
62
74
return theta
63
75
64
76
65
-
66
77
if __name__ == "__main__" :
67
78
import doctest
79
+
68
80
doctest .testmod ()
69
81
70
82
# Load California Housing dataset
@@ -84,18 +96,21 @@ def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alph
84
96
learning_rate = 0.01
85
97
max_iterations = 1000
86
98
87
- optimized_theta = ridge_gradient_descent (x , y , theta_initial , alpha , learning_rate , max_iterations )
99
+ optimized_theta = ridge_gradient_descent (
100
+ x , y , theta_initial , alpha , learning_rate , max_iterations
101
+ )
88
102
print (f"Optimized theta: { optimized_theta } " )
89
103
90
104
# Prediction
91
105
def predict (x , theta ):
92
106
return np .dot (x , theta )
107
+
93
108
y_pred = predict (x , optimized_theta )
94
109
95
110
# Plotting the results (here we visualize predicted vs actual values)
96
111
plt .figure (figsize = (10 , 6 ))
97
- plt .scatter (y , y_pred , color = 'b' , label = ' Predictions vs Actual' )
98
- plt .plot ([min (y ), max (y )], [min (y ), max (y )], color = 'r' , label = ' Perfect Fit' )
112
+ plt .scatter (y , y_pred , color = "b" , label = " Predictions vs Actual" )
113
+ plt .plot ([min (y ), max (y )], [min (y ), max (y )], color = "r" , label = " Perfect Fit" )
99
114
plt .xlabel ("Actual values" )
100
115
plt .ylabel ("Predicted values" )
101
116
plt .title ("Ridge Regression: Actual vs Predicted Values" )
0 commit comments