Skip to content

Commit c110a12

Browse files
committed
Updated code as per PR feedback 4
2 parents 106b6c7 + 2b4bf7d commit c110a12

File tree

1 file changed

+29
-5
lines changed

1 file changed

+29
-5
lines changed

machine_learning/ridge_regression.py

+29-5
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,12 @@
22
from matplotlib import pyplot as plt
33
from sklearn import datasets
44

5+
56
# Ridge Regression function
67
# reference : https://en.wikipedia.org/wiki/Ridge_regression
7-
def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float) -> float:
8+
def ridge_cost_function(
9+
x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float
10+
) -> float:
811
"""
912
Compute the Ridge regression cost function with L2 regularization.
1013
@@ -31,7 +34,15 @@ def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha:
3134

3235
return cost
3336

34-
def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float, learning_rate: float, max_iterations: int) -> np.ndarray:
37+
38+
def ridge_gradient_descent(
39+
x: np.ndarray,
40+
y: np.ndarray,
41+
theta: np.ndarray,
42+
alpha: float,
43+
learning_rate: float,
44+
max_iterations: int,
45+
) -> np.ndarray:
3546
"""
3647
Perform gradient descent to minimize the cost function and fit the Ridge regression model.
3748
@@ -62,9 +73,9 @@ def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alph
6273
return theta
6374

6475

65-
6676
if __name__ == "__main__":
6777
import doctest
78+
6879
doctest.testmod()
6980

7081
# Load California Housing dataset
@@ -84,18 +95,31 @@ def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alph
8495
learning_rate = 0.01
8596
max_iterations = 1000
8697

98+
<<<<<<< HEAD
8799
optimized_theta = ridge_gradient_descent(x, y, theta_initial, alpha, learning_rate, max_iterations)
88100
print(f"Optimized theta: {optimized_theta}")
89101

90102
# Prediction
91103
def predict(x, theta):
92104
return np.dot(x, theta)
93105
y_pred = predict(x, optimized_theta)
106+
=======
107+
optimized_theta = ridge_gradient_descent(
108+
X, y, theta_initial, alpha, learning_rate, max_iterations
109+
)
110+
print(f"Optimized theta: {optimized_theta}")
111+
112+
# Prediction
113+
def predict(X, theta):
114+
return np.dot(X, theta)
115+
116+
y_pred = predict(X, optimized_theta)
117+
>>>>>>> 2b4bf7dba7715b721dc9597852100a44acf47566
94118

95119
# Plotting the results (here we visualize predicted vs actual values)
96120
plt.figure(figsize=(10, 6))
97-
plt.scatter(y, y_pred, color='b', label='Predictions vs Actual')
98-
plt.plot([min(y), max(y)], [min(y), max(y)], color='r', label='Perfect Fit')
121+
plt.scatter(y, y_pred, color="b", label="Predictions vs Actual")
122+
plt.plot([min(y), max(y)], [min(y), max(y)], color="r", label="Perfect Fit")
99123
plt.xlabel("Actual values")
100124
plt.ylabel("Predicted values")
101125
plt.title("Ridge Regression: Actual vs Predicted Values")

0 commit comments

Comments
 (0)