1
-
2
1
import numpy as np
3
2
from matplotlib import pyplot as plt
4
3
from sklearn import datasets
5
4
5
+
6
6
# Ridge Regression function
7
7
# reference : https://en.wikipedia.org/wiki/Ridge_regression
8
- def ridge_cost_function (x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float ) -> float :
8
+ def ridge_cost_function (
9
+ x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float
10
+ ) -> float :
9
11
"""
10
12
Compute the Ridge regression cost function with L2 regularization.
11
13
@@ -27,12 +29,21 @@ def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha:
27
29
"""
28
30
m = len (y )
29
31
predictions = np .dot (x , theta )
30
- cost = (1 / (2 * m )) * np .sum ((predictions - y ) ** 2 ) + \
31
- (alpha / 2 ) * np .sum (theta [1 :] ** 2 )
32
+ cost = (1 / (2 * m )) * np .sum ((predictions - y ) ** 2 ) + (alpha / 2 ) * np .sum (
33
+ theta [1 :] ** 2
34
+ )
32
35
33
36
return cost
34
37
35
- def ridge_gradient_descent (x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float , learning_rate : float , max_iterations : int ) -> np .ndarray :
38
+
39
+ def ridge_gradient_descent (
40
+ x : np .ndarray ,
41
+ y : np .ndarray ,
42
+ theta : np .ndarray ,
43
+ alpha : float ,
44
+ learning_rate : float ,
45
+ max_iterations : int ,
46
+ ) -> np .ndarray :
36
47
"""
37
48
Perform gradient descent to minimize the cost function and fit the Ridge regression model.
38
49
@@ -62,8 +73,10 @@ def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alph
62
73
63
74
return theta
64
75
76
+
65
77
if __name__ == "__main__" :
66
78
import doctest
79
+
67
80
doctest .testmod ()
68
81
69
82
# Load California Housing dataset
@@ -83,7 +96,9 @@ def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alph
83
96
learning_rate = 0.01
84
97
max_iterations = 1000
85
98
86
- optimized_theta = ridge_gradient_descent (x , y , theta_initial , alpha , learning_rate , max_iterations )
99
+ optimized_theta = ridge_gradient_descent (
100
+ x , y , theta_initial , alpha , learning_rate , max_iterations
101
+ )
87
102
print (f"Optimized theta: { optimized_theta } " )
88
103
89
104
# Prediction
@@ -94,11 +109,10 @@ def predict(x, theta):
94
109
95
110
# Plotting the results (here we visualize predicted vs actual values)
96
111
plt .figure (figsize = (10 , 6 ))
97
- plt .scatter (y , y_pred , color = 'b' , label = ' Predictions vs Actual' )
98
- plt .plot ([min (y ), max (y )], [min (y ), max (y )], color = 'r' , label = ' Perfect Fit' )
112
+ plt .scatter (y , y_pred , color = "b" , label = " Predictions vs Actual" )
113
+ plt .plot ([min (y ), max (y )], [min (y ), max (y )], color = "r" , label = " Perfect Fit" )
99
114
plt .xlabel ("Actual values" )
100
115
plt .ylabel ("Predicted values" )
101
116
plt .title ("Ridge Regression: Actual vs Predicted Values" )
102
117
plt .legend ()
103
118
plt .show ()
104
-
0 commit comments