2
2
Implementation of gradient descent algorithm using momentum for minimizing cost of a linear hypothesis
3
3
function.
4
4
"""
5
+
5
6
import numpy as np
6
7
7
8
# List of input, output pairs
21
22
# Initialize velocity (for momentum)
22
23
velocity = [0 ] * len (parameter_vector )
23
24
25
+
24
26
def _error (example_no , data_set = "train" ):
25
27
"""
26
28
Calculate the error (difference between predicted and actual output) for a given example.
27
29
Args:
28
30
example_no (int): Index of the example in the dataset.
29
- data_set (str): The dataset to use, either "train" or "test".
31
+ data_set (str): The dataset to use, either "train" or "test".
30
32
Returns:
31
33
float: The difference between the predicted output and the actual output.
32
34
"""
33
- return calculate_hypothesis_value (example_no , data_set ) - output (example_no , data_set )
35
+ return calculate_hypothesis_value (example_no , data_set ) - output (
36
+ example_no , data_set
37
+ )
34
38
35
39
36
40
def _hypothesis_value (data_input_tuple ):
@@ -125,8 +129,13 @@ def run_gradient_descent_with_momentum():
125
129
cost_derivative = get_cost_derivative (i - 1 )
126
130
velocity [i ] = MOMENTUM * velocity [i ] + cost_derivative
127
131
temp_parameter_vector [i ] = parameter_vector [i ] - LEARNING_RATE * velocity [i ]
128
-
129
- if np .allclose (parameter_vector , temp_parameter_vector , atol = absolute_error_limit , rtol = relative_error_limit ):
132
+
133
+ if np .allclose (
134
+ parameter_vector ,
135
+ temp_parameter_vector ,
136
+ atol = absolute_error_limit ,
137
+ rtol = relative_error_limit ,
138
+ ):
130
139
break
131
140
parameter_vector = temp_parameter_vector
132
141
print (f"Number of iterations: { iteration } " )
@@ -140,7 +149,10 @@ def test_gradient_descent():
140
149
print (f"Actual output value: { output (i , 'test' )} " )
141
150
print (f"Hypothesis output: { calculate_hypothesis_value (i , 'test' )} " )
142
151
152
+
143
153
if __name__ == "__main__" :
144
154
run_gradient_descent_with_momentum ()
145
- print ("\n Testing gradient descent with momentum for a linear hypothesis function.\n " )
155
+ print (
156
+ "\n Testing gradient descent with momentum for a linear hypothesis function.\n "
157
+ )
146
158
test_gradient_descent ()
0 commit comments