Skip to content

Commit a99a729

Browse files
committed
Updated code as per PR feedback 6
1 parent 351e83b commit a99a729

File tree

1 file changed

+5
-29
lines changed

1 file changed

+5
-29
lines changed

machine_learning/ridge_regression.py

+5-29
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,9 @@
33
from matplotlib import pyplot as plt
44
from sklearn import datasets
55

6-
76
# Ridge Regression function
87
# reference : https://en.wikipedia.org/wiki/Ridge_regression
9-
def ridge_cost_function(
10-
x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float
11-
) -> float:
8+
def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float) -> float:
129
"""
1310
Compute the Ridge regression cost function with L2 regularization.
1411
@@ -30,26 +27,12 @@ def ridge_cost_function(
3027
"""
3128
m = len(y)
3229
predictions = np.dot(x, theta)
33-
<<<<<<< HEAD
3430
cost = (1 / (2 * m)) * np.sum((predictions - y) ** 2) + \
3531
(alpha / 2) * np.sum(theta[1:] ** 2)
36-
=======
37-
cost = (1 / (2 * m)) * np.sum((predictions - y) ** 2) + (alpha / 2) * np.sum(
38-
theta[1:] ** 2
39-
)
40-
>>>>>>> c8c1d9a5896ed6f64a71a2f9392eb4ecc7ffff12
4132

4233
return cost
4334

44-
45-
def ridge_gradient_descent(
46-
x: np.ndarray,
47-
y: np.ndarray,
48-
theta: np.ndarray,
49-
alpha: float,
50-
learning_rate: float,
51-
max_iterations: int,
52-
) -> np.ndarray:
35+
def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float, learning_rate: float, max_iterations: int) -> np.ndarray:
5336
"""
5437
Perform gradient descent to minimize the cost function and fit the Ridge regression model.
5538
@@ -79,13 +62,8 @@ def ridge_gradient_descent(
7962

8063
return theta
8164

82-
<<<<<<< HEAD
83-
=======
84-
85-
>>>>>>> c8c1d9a5896ed6f64a71a2f9392eb4ecc7ffff12
8665
if __name__ == "__main__":
8766
import doctest
88-
8967
doctest.testmod()
9068

9169
# Load California Housing dataset
@@ -105,9 +83,7 @@ def ridge_gradient_descent(
10583
learning_rate = 0.01
10684
max_iterations = 1000
10785

108-
optimized_theta = ridge_gradient_descent(
109-
x, y, theta_initial, alpha, learning_rate, max_iterations
110-
)
86+
optimized_theta = ridge_gradient_descent(x, y, theta_initial, alpha, learning_rate, max_iterations)
11187
print(f"Optimized theta: {optimized_theta}")
11288

11389
# Prediction
@@ -118,8 +94,8 @@ def predict(x, theta):
11894

11995
# Plotting the results (here we visualize predicted vs actual values)
12096
plt.figure(figsize=(10, 6))
121-
plt.scatter(y, y_pred, color="b", label="Predictions vs Actual")
122-
plt.plot([min(y), max(y)], [min(y), max(y)], color="r", label="Perfect Fit")
97+
plt.scatter(y, y_pred, color='b', label='Predictions vs Actual')
98+
plt.plot([min(y), max(y)], [min(y), max(y)], color='r', label='Perfect Fit')
12399
plt.xlabel("Actual values")
124100
plt.ylabel("Predicted values")
125101
plt.title("Ridge Regression: Actual vs Predicted Values")

0 commit comments

Comments
 (0)