Skip to content

Commit 785622c

Browse files
Added return type
1 parent e7939b2 commit 785622c

File tree

1 file changed

+8
-8
lines changed

1 file changed

+8
-8
lines changed

machine_learning/gradient_descent_momentum.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
velocity = [0] * len(parameter_vector)
2424

2525

26-
def _error(example_no, data_set="train"):
26+
def _error(example_no, data_set="train") -> float:
2727
"""
2828
Calculate the error for a given example.
2929
Args:
@@ -37,7 +37,7 @@ def _error(example_no, data_set="train"):
3737
return hypo_value - output_value
3838

3939

40-
def _hypothesis_value(data_input_tuple):
40+
def _hypothesis_value(data_input_tuple) -> float:
4141
"""
4242
Compute the hypothesis value (predicted output) for a given input tuple.
4343
Args:
@@ -52,7 +52,7 @@ def _hypothesis_value(data_input_tuple):
5252
return hyp_val
5353

5454

55-
def output(example_no, data_set):
55+
def output(example_no, data_set) -> int:
5656
"""
5757
Retrieve the actual output (label) for a given example
5858
from the specified dataset.
@@ -69,7 +69,7 @@ def output(example_no, data_set):
6969
return None
7070

7171

72-
def calculate_hypothesis_value(example_no, data_set):
72+
def calculate_hypothesis_value(example_no, data_set) -> float:
7373
"""
7474
Calculate the hypothesis value (predicted output) for a given example.
7575
Args:
@@ -85,7 +85,7 @@ def calculate_hypothesis_value(example_no, data_set):
8585
return None
8686

8787

88-
def summation_of_cost_derivative(index, end=m):
88+
def summation_of_cost_derivative(index, end=m) -> float:
8989
"""
9090
Calculate the summation of the cost derivative for a given index.
9191
Args:
@@ -104,7 +104,7 @@ def summation_of_cost_derivative(index, end=m):
104104
return summation_value
105105

106106

107-
def get_cost_derivative(index):
107+
def get_cost_derivative(index) -> float:
108108
"""
109109
Compute the cost derivative with respect to a parameter.
110110
Args:
@@ -115,7 +115,7 @@ def get_cost_derivative(index):
115115
return summation_of_cost_derivative(index, m) / m
116116

117117

118-
def run_gradient_descent_with_momentum():
118+
def run_gradient_descent_with_momentum() -> None:
119119
"""
120120
Run gradient descent with momentum to minimize the cost function.
121121
This function updates the parameter vector using velocity and the learning rate.
@@ -143,7 +143,7 @@ def run_gradient_descent_with_momentum():
143143
print(f"Number of iterations: {iteration}")
144144

145145

146-
def test_gradient_descent():
146+
def test_gradient_descent() -> None:
147147
"""
148148
Test the trained model on the test dataset and print actual vs predicted outputs.
149149
"""

0 commit comments

Comments
 (0)