2
2
from matplotlib import pyplot as plt
3
3
from sklearn import datasets
4
4
5
-
6
5
# Ridge Regression function
7
6
# reference : https://en.wikipedia.org/wiki/Ridge_regression
8
- def ridge_cost_function (
9
- x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float
10
- ) -> float :
7
+ def ridge_cost_function (x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float ) -> float :
11
8
"""
12
9
Compute the Ridge regression cost function with L2 regularization.
13
10
@@ -34,15 +31,7 @@ def ridge_cost_function(
34
31
35
32
return cost
36
33
37
-
38
- def ridge_gradient_descent (
39
- x : np .ndarray ,
40
- y : np .ndarray ,
41
- theta : np .ndarray ,
42
- alpha : float ,
43
- learning_rate : float ,
44
- max_iterations : int ,
45
- ) -> np .ndarray :
34
+ def ridge_gradient_descent (x : np .ndarray , y : np .ndarray , theta : np .ndarray , alpha : float , learning_rate : float , max_iterations : int ) -> np .ndarray :
46
35
"""
47
36
Perform gradient descent to minimize the cost function and fit the Ridge regression model.
48
37
@@ -73,9 +62,9 @@ def ridge_gradient_descent(
73
62
return theta
74
63
75
64
65
+
76
66
if __name__ == "__main__" :
77
67
import doctest
78
-
79
68
doctest .testmod ()
80
69
81
70
# Load California Housing dataset
@@ -95,31 +84,18 @@ def ridge_gradient_descent(
95
84
learning_rate = 0.01
96
85
max_iterations = 1000
97
86
98
- < << << << HEAD
99
87
optimized_theta = ridge_gradient_descent (x , y , theta_initial , alpha , learning_rate , max_iterations )
100
88
print (f"Optimized theta: { optimized_theta } " )
101
89
102
90
# Prediction
103
91
def predict (x , theta ):
104
92
return np .dot (x , theta )
105
93
y_pred = predict (x , optimized_theta )
106
- == == == =
107
- optimized_theta = ridge_gradient_descent (
108
- X , y , theta_initial , alpha , learning_rate , max_iterations
109
- )
110
- print (f"Optimized theta: { optimized_theta } " )
111
-
112
- # Prediction
113
- def predict (X , theta ):
114
- return np .dot (X , theta )
115
-
116
- y_pred = predict (X , optimized_theta )
117
- >> >> >> > 2 b4bf7dba7715b721dc9597852100a44acf47566
118
94
119
95
# Plotting the results (here we visualize predicted vs actual values)
120
96
plt .figure (figsize = (10 , 6 ))
121
- plt .scatter (y , y_pred , color = "b" , label = " Predictions vs Actual" )
122
- plt .plot ([min (y ), max (y )], [min (y ), max (y )], color = "r" , label = " Perfect Fit" )
97
+ plt .scatter (y , y_pred , color = 'b' , label = ' Predictions vs Actual' )
98
+ plt .plot ([min (y ), max (y )], [min (y ), max (y )], color = 'r' , label = ' Perfect Fit' )
123
99
plt .xlabel ("Actual values" )
124
100
plt .ylabel ("Predicted values" )
125
101
plt .title ("Ridge Regression: Actual vs Predicted Values" )
0 commit comments