Skip to content

Commit 2b4bf7d

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent 2722754 commit 2b4bf7d

File tree

1 file changed

+20
-6
lines changed

1 file changed

+20
-6
lines changed

machine_learning/ridge_regression.py

+20-6
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,12 @@
22
from matplotlib import pyplot as plt
33
from sklearn import datasets
44

5+
56
# Ridge Regression function
67
# reference : https://en.wikipedia.org/wiki/Ridge_regression
7-
def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float) -> float:
8+
def ridge_cost_function(
9+
x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float
10+
) -> float:
811
"""
912
Compute the Ridge regression cost function with L2 regularization.
1013
@@ -30,7 +33,15 @@ def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha:
3033
cost += (alpha / 2) * np.sum(theta[1:] ** 2)
3134
return cost
3235

33-
def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float, learning_rate: float, max_iterations: int) -> np.ndarray:
36+
37+
def ridge_gradient_descent(
38+
x: np.ndarray,
39+
y: np.ndarray,
40+
theta: np.ndarray,
41+
alpha: float,
42+
learning_rate: float,
43+
max_iterations: int,
44+
) -> np.ndarray:
3445
"""
3546
Perform gradient descent to minimize the cost function and fit the Ridge regression model.
3647
@@ -61,9 +72,9 @@ def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alph
6172
return theta
6273

6374

64-
6575
if __name__ == "__main__":
6676
import doctest
77+
6778
doctest.testmod()
6879

6980
# Load California Housing dataset
@@ -83,18 +94,21 @@ def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alph
8394
learning_rate = 0.01
8495
max_iterations = 1000
8596

86-
optimized_theta = ridge_gradient_descent(X, y, theta_initial, alpha, learning_rate, max_iterations)
97+
optimized_theta = ridge_gradient_descent(
98+
X, y, theta_initial, alpha, learning_rate, max_iterations
99+
)
87100
print(f"Optimized theta: {optimized_theta}")
88101

89102
# Prediction
90103
def predict(X, theta):
91104
return np.dot(X, theta)
105+
92106
y_pred = predict(X, optimized_theta)
93107

94108
# Plotting the results (here we visualize predicted vs actual values)
95109
plt.figure(figsize=(10, 6))
96-
plt.scatter(y, y_pred, color='b', label='Predictions vs Actual')
97-
plt.plot([min(y), max(y)], [min(y), max(y)], color='r', label='Perfect Fit')
110+
plt.scatter(y, y_pred, color="b", label="Predictions vs Actual")
111+
plt.plot([min(y), max(y)], [min(y), max(y)], color="r", label="Perfect Fit")
98112
plt.xlabel("Actual values")
99113
plt.ylabel("Predicted values")
100114
plt.title("Ridge Regression: Actual vs Predicted Values")

0 commit comments

Comments
 (0)