1
+
1
2
import numpy as np
2
3
from matplotlib import pyplot as plt
3
4
from sklearn import datasets
4
5
5
-
6
6
# Ridge Regression function
7
7
# reference : https://en.wikipedia.org/wiki/Ridge_regression
8
- def ridge_cost_function (
9
- x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float
10
- ) -> float :
8
+ def ridge_cost_function (x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float ) -> float :
11
9
"""
12
10
Compute the Ridge regression cost function with L2 regularization.
13
11
@@ -29,21 +27,12 @@ def ridge_cost_function(
29
27
"""
30
28
m = len (y )
31
29
predictions = np .dot (x , theta )
32
- cost = (1 / (2 * m )) * np .sum ((predictions - y ) ** 2 ) + (alpha / 2 ) * np .sum (
33
- theta [1 :] ** 2
34
- )
30
+ cost = (1 / (2 * m )) * np .sum ((predictions - y ) ** 2 ) + \
31
+ (alpha / 2 ) * np .sum (theta [1 :] ** 2 )
35
32
36
33
return cost
37
34
38
-
39
- def ridge_gradient_descent (
40
- x : np .ndarray ,
41
- y : np .ndarray ,
42
- theta : np .ndarray ,
43
- alpha : float ,
44
- learning_rate : float ,
45
- max_iterations : int ,
46
- ) -> np .ndarray :
35
+ def ridge_gradient_descent (x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float , learning_rate : float , max_iterations : int ) -> np .ndarray :
47
36
"""
48
37
Perform gradient descent to minimize the
49
38
cost function and fit the Ridge regression model.
@@ -74,10 +63,8 @@ def ridge_gradient_descent(
74
63
75
64
return theta
76
65
77
-
78
66
if __name__ == "__main__" :
79
67
import doctest
80
-
81
68
doctest .testmod ()
82
69
83
70
# Load California Housing dataset
@@ -97,9 +84,7 @@ def ridge_gradient_descent(
97
84
learning_rate = 0.01
98
85
max_iterations = 1000
99
86
100
- optimized_theta = ridge_gradient_descent (
101
- x , y , theta_initial , alpha , learning_rate , max_iterations
102
- )
87
+ optimized_theta = ridge_gradient_descent (x , y , theta_initial , alpha , learning_rate , max_iterations )
103
88
print (f"Optimized theta: { optimized_theta } " )
104
89
105
90
# Prediction
@@ -110,10 +95,11 @@ def predict(x, theta):
110
95
111
96
# Plotting the results (here we visualize predicted vs actual values)
112
97
plt .figure (figsize = (10 , 6 ))
113
- plt .scatter (y , y_pred , color = "b" , label = " Predictions vs Actual" )
114
- plt .plot ([min (y ), max (y )], [min (y ), max (y )], color = "r" , label = " Perfect Fit" )
98
+ plt .scatter (y , y_pred , color = 'b' , label = ' Predictions vs Actual' )
99
+ plt .plot ([min (y ), max (y )], [min (y ), max (y )], color = 'r' , label = ' Perfect Fit' )
115
100
plt .xlabel ("Actual values" )
116
101
plt .ylabel ("Predicted values" )
117
102
plt .title ("Ridge Regression: Actual vs Predicted Values" )
118
103
plt .legend ()
119
104
plt .show ()
105
+
0 commit comments