Skip to content

Commit d564698

Browse files
committed
Updated code as per PR feedback 5
1 parent c110a12 commit d564698

File tree

1 file changed

+5
-29
lines changed

1 file changed

+5
-29
lines changed

machine_learning/ridge_regression.py

+5-29
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,9 @@
22
from matplotlib import pyplot as plt
33
from sklearn import datasets
44

5-
65
# Ridge Regression function
76
# reference : https://en.wikipedia.org/wiki/Ridge_regression
8-
def ridge_cost_function(
9-
x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float
10-
) -> float:
7+
def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float) -> float:
118
"""
129
Compute the Ridge regression cost function with L2 regularization.
1310
@@ -34,15 +31,7 @@ def ridge_cost_function(
3431

3532
return cost
3633

37-
38-
def ridge_gradient_descent(
39-
x: np.ndarray,
40-
y: np.ndarray,
41-
theta: np.ndarray,
42-
alpha: float,
43-
learning_rate: float,
44-
max_iterations: int,
45-
) -> np.ndarray:
34+
def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float, learning_rate: float, max_iterations: int) -> np.ndarray:
4635
"""
4736
Perform gradient descent to minimize the cost function and fit the Ridge regression model.
4837
@@ -73,9 +62,9 @@ def ridge_gradient_descent(
7362
return theta
7463

7564

65+
7666
if __name__ == "__main__":
7767
import doctest
78-
7968
doctest.testmod()
8069

8170
# Load California Housing dataset
@@ -95,31 +84,18 @@ def ridge_gradient_descent(
9584
learning_rate = 0.01
9685
max_iterations = 1000
9786

98-
<<<<<<< HEAD
9987
optimized_theta = ridge_gradient_descent(x, y, theta_initial, alpha, learning_rate, max_iterations)
10088
print(f"Optimized theta: {optimized_theta}")
10189

10290
# Prediction
10391
def predict(x, theta):
10492
return np.dot(x, theta)
10593
y_pred = predict(x, optimized_theta)
106-
=======
107-
optimized_theta = ridge_gradient_descent(
108-
X, y, theta_initial, alpha, learning_rate, max_iterations
109-
)
110-
print(f"Optimized theta: {optimized_theta}")
111-
112-
# Prediction
113-
def predict(X, theta):
114-
return np.dot(X, theta)
115-
116-
y_pred = predict(X, optimized_theta)
117-
>>>>>>> 2b4bf7dba7715b721dc9597852100a44acf47566
11894

11995
# Plotting the results (here we visualize predicted vs actual values)
12096
plt.figure(figsize=(10, 6))
121-
plt.scatter(y, y_pred, color="b", label="Predictions vs Actual")
122-
plt.plot([min(y), max(y)], [min(y), max(y)], color="r", label="Perfect Fit")
97+
plt.scatter(y, y_pred, color='b', label='Predictions vs Actual')
98+
plt.plot([min(y), max(y)], [min(y), max(y)], color='r', label='Perfect Fit')
12399
plt.xlabel("Actual values")
124100
plt.ylabel("Predicted values")
125101
plt.title("Ridge Regression: Actual vs Predicted Values")

0 commit comments

Comments
 (0)