Skip to content

Commit 3547b71

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent a99a729 commit 3547b71

File tree

1 file changed

+23
-9
lines changed

1 file changed

+23
-9
lines changed

machine_learning/ridge_regression.py

+23-9
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
1-
21
import numpy as np
32
from matplotlib import pyplot as plt
43
from sklearn import datasets
54

5+
66
# Ridge Regression function
77
# reference : https://en.wikipedia.org/wiki/Ridge_regression
8-
def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float) -> float:
8+
def ridge_cost_function(
9+
x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float
10+
) -> float:
911
"""
1012
Compute the Ridge regression cost function with L2 regularization.
1113
@@ -27,12 +29,21 @@ def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha:
2729
"""
2830
m = len(y)
2931
predictions = np.dot(x, theta)
30-
cost = (1 / (2 * m)) * np.sum((predictions - y) ** 2) + \
31-
(alpha / 2) * np.sum(theta[1:] ** 2)
32+
cost = (1 / (2 * m)) * np.sum((predictions - y) ** 2) + (alpha / 2) * np.sum(
33+
theta[1:] ** 2
34+
)
3235

3336
return cost
3437

35-
def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float, learning_rate: float, max_iterations: int) -> np.ndarray:
38+
39+
def ridge_gradient_descent(
40+
x: np.ndarray,
41+
y: np.ndarray,
42+
theta: np.ndarray,
43+
alpha: float,
44+
learning_rate: float,
45+
max_iterations: int,
46+
) -> np.ndarray:
3647
"""
3748
Perform gradient descent to minimize the cost function and fit the Ridge regression model.
3849
@@ -62,8 +73,10 @@ def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alph
6273

6374
return theta
6475

76+
6577
if __name__ == "__main__":
6678
import doctest
79+
6780
doctest.testmod()
6881

6982
# Load California Housing dataset
@@ -83,7 +96,9 @@ def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alph
8396
learning_rate = 0.01
8497
max_iterations = 1000
8598

86-
optimized_theta = ridge_gradient_descent(x, y, theta_initial, alpha, learning_rate, max_iterations)
99+
optimized_theta = ridge_gradient_descent(
100+
x, y, theta_initial, alpha, learning_rate, max_iterations
101+
)
87102
print(f"Optimized theta: {optimized_theta}")
88103

89104
# Prediction
@@ -94,11 +109,10 @@ def predict(x, theta):
94109

95110
# Plotting the results (here we visualize predicted vs actual values)
96111
plt.figure(figsize=(10, 6))
97-
plt.scatter(y, y_pred, color='b', label='Predictions vs Actual')
98-
plt.plot([min(y), max(y)], [min(y), max(y)], color='r', label='Perfect Fit')
112+
plt.scatter(y, y_pred, color="b", label="Predictions vs Actual")
113+
plt.plot([min(y), max(y)], [min(y), max(y)], color="r", label="Perfect Fit")
99114
plt.xlabel("Actual values")
100115
plt.ylabel("Predicted values")
101116
plt.title("Ridge Regression: Actual vs Predicted Values")
102117
plt.legend()
103118
plt.show()
104-

0 commit comments

Comments
 (0)