Skip to content

Commit 5f56976

Browse files
committed
Updated code as per PR feedback 8
1 parent 7e0bb71 commit 5f56976

File tree

1 file changed

+9
-23
lines changed

1 file changed

+9
-23
lines changed

machine_learning/ridge_regression.py

+9-23
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,11 @@
1+
12
import numpy as np
23
from matplotlib import pyplot as plt
34
from sklearn import datasets
45

5-
66
# Ridge Regression function
77
# reference : https://en.wikipedia.org/wiki/Ridge_regression
8-
def ridge_cost_function(
9-
x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float
10-
) -> float:
8+
def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float) -> float:
119
"""
1210
Compute the Ridge regression cost function with L2 regularization.
1311
@@ -29,21 +27,12 @@ def ridge_cost_function(
2927
"""
3028
m = len(y)
3129
predictions = np.dot(x, theta)
32-
cost = (1 / (2 * m)) * np.sum((predictions - y) ** 2) + (alpha / 2) * np.sum(
33-
theta[1:] ** 2
34-
)
30+
cost = (1 / (2 * m)) * np.sum((predictions - y) ** 2) + \
31+
(alpha / 2) * np.sum(theta[1:] ** 2)
3532

3633
return cost
3734

38-
39-
def ridge_gradient_descent(
40-
x: np.ndarray,
41-
y: np.ndarray,
42-
theta: np.ndarray,
43-
alpha: float,
44-
learning_rate: float,
45-
max_iterations: int,
46-
) -> np.ndarray:
35+
def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float, learning_rate: float, max_iterations: int) -> np.ndarray:
4736
"""
4837
Perform gradient descent to minimize the
4938
cost function and fit the Ridge regression model.
@@ -74,10 +63,8 @@ def ridge_gradient_descent(
7463

7564
return theta
7665

77-
7866
if __name__ == "__main__":
7967
import doctest
80-
8168
doctest.testmod()
8269

8370
# Load California Housing dataset
@@ -97,9 +84,7 @@ def ridge_gradient_descent(
9784
learning_rate = 0.01
9885
max_iterations = 1000
9986

100-
optimized_theta = ridge_gradient_descent(
101-
x, y, theta_initial, alpha, learning_rate, max_iterations
102-
)
87+
optimized_theta = ridge_gradient_descent(x, y, theta_initial, alpha, learning_rate, max_iterations)
10388
print(f"Optimized theta: {optimized_theta}")
10489

10590
# Prediction
@@ -110,10 +95,11 @@ def predict(x, theta):
11095

11196
# Plotting the results (here we visualize predicted vs actual values)
11297
plt.figure(figsize=(10, 6))
113-
plt.scatter(y, y_pred, color="b", label="Predictions vs Actual")
114-
plt.plot([min(y), max(y)], [min(y), max(y)], color="r", label="Perfect Fit")
98+
plt.scatter(y, y_pred, color='b', label='Predictions vs Actual')
99+
plt.plot([min(y), max(y)], [min(y), max(y)], color='r', label='Perfect Fit')
115100
plt.xlabel("Actual values")
116101
plt.ylabel("Predicted values")
117102
plt.title("Ridge Regression: Actual vs Predicted Values")
118103
plt.legend()
119104
plt.show()
105+

0 commit comments

Comments
 (0)