Skip to content

Commit 2722754

Browse files
committed
Updated code as per PR feedback 2
1 parent b7f49ae commit 2722754

File tree

1 file changed

+4
-31
lines changed

1 file changed

+4
-31
lines changed

machine_learning/ridge_regression.py

+4-31
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,9 @@
22
from matplotlib import pyplot as plt
33
from sklearn import datasets
44

5-
65
# Ridge Regression function
76
# reference : https://en.wikipedia.org/wiki/Ridge_regression
8-
<<<<<<< HEAD
97
def ridge_cost_function(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float) -> float:
10-
=======
11-
def ridge_cost_function(
12-
X: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float
13-
) -> float:
14-
>>>>>>> a4f585c89d4426f2ddace3ead610ff1742922713
158
"""
169
Compute the Ridge regression cost function with L2 regularization.
1710
@@ -33,28 +26,11 @@ def ridge_cost_function(
3326
"""
3427
m = len(y)
3528
predictions = np.dot(X, theta)
36-
<<<<<<< HEAD
3729
cost = (1 / (2 * m)) * np.sum((predictions - y) ** 2)
3830
cost += (alpha / 2) * np.sum(theta[1:] ** 2)
3931
return cost
4032

4133
def ridge_gradient_descent(x: np.ndarray, y: np.ndarray, theta: np.ndarray, alpha: float, learning_rate: float, max_iterations: int) -> np.ndarray:
42-
=======
43-
cost = (1 / (2 * m)) * np.sum((predictions - y) ** 2) + (alpha / 2) * np.sum(
44-
theta[1:] ** 2
45-
)
46-
return cost
47-
48-
49-
def ridge_gradient_descent(
50-
X: np.ndarray,
51-
y: np.ndarray,
52-
theta: np.ndarray,
53-
alpha: float,
54-
learning_rate: float,
55-
max_iterations: int,
56-
) -> np.ndarray:
57-
>>>>>>> a4f585c89d4426f2ddace3ead610ff1742922713
5834
"""
5935
Perform gradient descent to minimize the cost function and fit the Ridge regression model.
6036
@@ -85,6 +61,7 @@ def ridge_gradient_descent(
8561
return theta
8662

8763

64+
8865
if __name__ == "__main__":
8966
import doctest
9067
doctest.testmod()
@@ -106,24 +83,20 @@ def ridge_gradient_descent(
10683
learning_rate = 0.01
10784
max_iterations = 1000
10885

109-
optimized_theta = ridge_gradient_descent(
110-
X, y, theta_initial, alpha, learning_rate, max_iterations
111-
)
86+
optimized_theta = ridge_gradient_descent(X, y, theta_initial, alpha, learning_rate, max_iterations)
11287
print(f"Optimized theta: {optimized_theta}")
11388

11489
# Prediction
11590
def predict(X, theta):
11691
return np.dot(X, theta)
117-
11892
y_pred = predict(X, optimized_theta)
11993

12094
# Plotting the results (here we visualize predicted vs actual values)
12195
plt.figure(figsize=(10, 6))
122-
plt.scatter(y, y_pred, color="b", label="Predictions vs Actual")
123-
plt.plot([min(y), max(y)], [min(y), max(y)], color="r", label="Perfect Fit")
96+
plt.scatter(y, y_pred, color='b', label='Predictions vs Actual')
97+
plt.plot([min(y), max(y)], [min(y), max(y)], color='r', label='Perfect Fit')
12498
plt.xlabel("Actual values")
12599
plt.ylabel("Predicted values")
126100
plt.title("Ridge Regression: Actual vs Predicted Values")
127101
plt.legend()
128-
#plots on a graph
129102
plt.show()

0 commit comments

Comments
 (0)