Skip to content

Commit 796d80b

Browse files
committed
Update genetic_algorithm_optimization.py
1 parent 0b06dae commit 796d80b

File tree

1 file changed

+11
-22
lines changed

1 file changed

+11
-22
lines changed

genetic_algorithm/genetic_algorithm_optimization.py

+11-22
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,11 @@
1-
from collections.abc import Callable # Fixes the UP035 warning
2-
import numpy as np
3-
1+
from collections.abc import Callable # Sorted import
2+
import numpy as np # Sorted import
43

54
class GeneticAlgorithmOptimizer:
65
def __init__(
76
self,
8-
objective_function: Callable[..., float],
9-
variable_bounds: list[tuple[float, float]],
7+
objective_function: Callable[..., float],
8+
variable_bounds: list[tuple[float, float]],
109
population_size: int = 100,
1110
max_generations: int = 500,
1211
crossover_probability: float = 0.9,
@@ -19,9 +18,7 @@ def __init__(
1918
self.crossover_probability = crossover_probability
2019
self.mutation_probability = mutation_probability
2120
self.num_variables = len(variable_bounds)
22-
23-
# Initialize the random number generator
24-
self.rng = np.random.default_rng()
21+
self.rng = np.random.default_rng() # Initialize random generator
2522

2623
def generate_initial_population(self) -> np.ndarray:
2724
"""
@@ -55,19 +52,14 @@ def perform_crossover(
5552
) -> tuple[np.ndarray, np.ndarray]:
5653
"""
5754
Perform one-point crossover between two parents to create offspring.
58-
Skip crossover for single-variable functions.
5955
"""
6056
if self.num_variables == 1:
6157
return parent1, parent2
6258

6359
if self.rng.random() < self.crossover_probability:
6460
crossover_point = self.rng.integers(1, self.num_variables)
65-
child1 = np.concatenate(
66-
(parent1[:crossover_point], parent2[crossover_point:])
67-
)
68-
child2 = np.concatenate(
69-
(parent2[:crossover_point], parent1[crossover_point:])
70-
)
61+
child1 = np.concatenate((parent1[:crossover_point], parent2[crossover_point:]))
62+
child2 = np.concatenate((parent2[:crossover_point], parent1[crossover_point:]))
7163
return child1, child2
7264
return parent1, parent2
7365

@@ -79,7 +71,7 @@ def apply_mutation(self, individual: np.ndarray) -> np.ndarray:
7971
mutation_index = self.rng.integers(0, self.num_variables)
8072
individual[mutation_index] = self.rng.uniform(
8173
self.variable_bounds[mutation_index, 0],
82-
self.variable_bounds[mutation_index, 1],
74+
self.variable_bounds[mutation_index, 1]
8375
)
8476
return individual
8577

@@ -113,15 +105,11 @@ def optimize(self) -> tuple[np.ndarray, float]:
113105
best_fitness_value = fitness_values[min_fitness_index]
114106
best_solution = population[min_fitness_index]
115107

116-
print(
117-
f"Generation {generation + 1}, Best Fitness Value: {best_fitness_value}"
118-
)
108+
print(f"Generation {generation + 1}, Best Fitness Value: {best_fitness_value}")
119109

120110
return best_solution, best_fitness_value
121111

122-
123112
if __name__ == "__main__":
124-
125113
def objective_function(x: float, y: float) -> float:
126114
"""
127115
Example objective function to minimize x^2 + y^2
@@ -131,7 +119,8 @@ def objective_function(x: float, y: float) -> float:
131119
variable_bounds: list[tuple[float, float]] = [(-10, 10), (-10, 10)]
132120

133121
optimizer = GeneticAlgorithmOptimizer(
134-
objective_function=objective_function, variable_bounds=variable_bounds
122+
objective_function=objective_function,
123+
variable_bounds=variable_bounds
135124
)
136125
best_solution, best_fitness_value = optimizer.optimize()
137126

0 commit comments

Comments
 (0)