Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit dc75522

Browse files
committedOct 8, 2024·
Fixed more return type issues
1 parent 785622c commit dc75522

File tree

2 files changed

+11
-10
lines changed

2 files changed

+11
-10
lines changed
 

‎machine_learning/frequent_pattern_growth.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,9 @@ def ascend_tree(leaf_node: TreeNode, prefix_path: list[str]) -> None:
240240
ascend_tree(leaf_node.parent, prefix_path)
241241

242242

243-
def find_prefix_path(base_pat: frozenset, tree_node: TreeNode | None) -> dict: # noqa: ARG001
243+
def find_prefix_path(
244+
base_pat: frozenset, tree_node: TreeNode | None
245+
) -> dict: # noqa: ARG001
244246
"""
245247
Find the conditional pattern base for a given base pattern.
246248

‎machine_learning/gradient_descent_momentum.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -14,14 +14,12 @@
1414
((11, 12, 13), 41),
1515
)
1616
test_data = (((515, 22, 13), 555), ((61, 35, 49), 150))
17-
parameter_vector = [2, 4, 1, 5]
17+
parameter_vector = [0.0, 0.0, 0.0, 0.0]
18+
velocity = [0.0] * len(parameter_vector)
1819
m = len(train_data)
1920
LEARNING_RATE = 0.009
2021
MOMENTUM = 0.9
2122

22-
# Initialize velocity (for momentum)
23-
velocity = [0] * len(parameter_vector)
24-
2523

2624
def _error(example_no, data_set="train") -> float:
2725
"""
@@ -45,7 +43,7 @@ def _hypothesis_value(data_input_tuple) -> float:
4543
Returns:
4644
float: The hypothesis value for the given input.
4745
"""
48-
hyp_val = 0
46+
hyp_val = 0.0
4947
for i in range(len(parameter_vector) - 1):
5048
hyp_val += data_input_tuple[i] * parameter_vector[i + 1]
5149
hyp_val += parameter_vector[0]
@@ -66,7 +64,7 @@ def output(example_no, data_set) -> int:
6664
return train_data[example_no][1]
6765
elif data_set == "test":
6866
return test_data[example_no][1]
69-
return None
67+
return -1
7068

7169

7270
def calculate_hypothesis_value(example_no, data_set) -> float:
@@ -82,7 +80,7 @@ def calculate_hypothesis_value(example_no, data_set) -> float:
8280
return _hypothesis_value(train_data[example_no][0])
8381
elif data_set == "test":
8482
return _hypothesis_value(test_data[example_no][0])
85-
return None
83+
return -1
8684

8785

8886
def summation_of_cost_derivative(index, end=m) -> float:
@@ -95,7 +93,7 @@ def summation_of_cost_derivative(index, end=m) -> float:
9593
Returns:
9694
float: The summation of the cost derivatives for the given parameter.
9795
"""
98-
summation_value = 0
96+
summation_value = 0.0
9997
for i in range(end):
10098
if index == -1:
10199
summation_value += _error(i)
@@ -124,9 +122,10 @@ def run_gradient_descent_with_momentum() -> None:
124122
absolute_error_limit = 0.000002
125123
relative_error_limit = 0
126124
iteration = 0
125+
127126
while True:
128127
iteration += 1
129-
temp_parameter_vector = [0] * len(parameter_vector)
128+
temp_parameter_vector = [0.0] * len(parameter_vector)
130129
for i in range(len(parameter_vector)):
131130
cost_derivative = get_cost_derivative(i - 1)
132131
velocity[i] = MOMENTUM * velocity[i] + cost_derivative

0 commit comments

Comments
 (0)
Please sign in to comment.