1
- from collections .abc import Callable # Fixes the UP035 warning
2
- import numpy as np
3
-
1
+ from collections .abc import Callable # Sorted import
2
+ import numpy as np # Sorted import
4
3
5
4
class GeneticAlgorithmOptimizer :
6
5
def __init__ (
7
6
self ,
8
- objective_function : Callable [..., float ],
9
- variable_bounds : list [tuple [float , float ]],
7
+ objective_function : Callable [..., float ],
8
+ variable_bounds : list [tuple [float , float ]],
10
9
population_size : int = 100 ,
11
10
max_generations : int = 500 ,
12
11
crossover_probability : float = 0.9 ,
@@ -19,9 +18,7 @@ def __init__(
19
18
self .crossover_probability = crossover_probability
20
19
self .mutation_probability = mutation_probability
21
20
self .num_variables = len (variable_bounds )
22
-
23
- # Initialize the random number generator
24
- self .rng = np .random .default_rng ()
21
+ self .rng = np .random .default_rng () # Initialize random generator
25
22
26
23
def generate_initial_population (self ) -> np .ndarray :
27
24
"""
@@ -55,19 +52,14 @@ def perform_crossover(
55
52
) -> tuple [np .ndarray , np .ndarray ]:
56
53
"""
57
54
Perform one-point crossover between two parents to create offspring.
58
- Skip crossover for single-variable functions.
59
55
"""
60
56
if self .num_variables == 1 :
61
57
return parent1 , parent2
62
58
63
59
if self .rng .random () < self .crossover_probability :
64
60
crossover_point = self .rng .integers (1 , self .num_variables )
65
- child1 = np .concatenate (
66
- (parent1 [:crossover_point ], parent2 [crossover_point :])
67
- )
68
- child2 = np .concatenate (
69
- (parent2 [:crossover_point ], parent1 [crossover_point :])
70
- )
61
+ child1 = np .concatenate ((parent1 [:crossover_point ], parent2 [crossover_point :]))
62
+ child2 = np .concatenate ((parent2 [:crossover_point ], parent1 [crossover_point :]))
71
63
return child1 , child2
72
64
return parent1 , parent2
73
65
@@ -79,7 +71,7 @@ def apply_mutation(self, individual: np.ndarray) -> np.ndarray:
79
71
mutation_index = self .rng .integers (0 , self .num_variables )
80
72
individual [mutation_index ] = self .rng .uniform (
81
73
self .variable_bounds [mutation_index , 0 ],
82
- self .variable_bounds [mutation_index , 1 ],
74
+ self .variable_bounds [mutation_index , 1 ]
83
75
)
84
76
return individual
85
77
@@ -113,15 +105,11 @@ def optimize(self) -> tuple[np.ndarray, float]:
113
105
best_fitness_value = fitness_values [min_fitness_index ]
114
106
best_solution = population [min_fitness_index ]
115
107
116
- print (
117
- f"Generation { generation + 1 } , Best Fitness Value: { best_fitness_value } "
118
- )
108
+ print (f"Generation { generation + 1 } , Best Fitness Value: { best_fitness_value } " )
119
109
120
110
return best_solution , best_fitness_value
121
111
122
-
123
112
if __name__ == "__main__" :
124
-
125
113
def objective_function (x : float , y : float ) -> float :
126
114
"""
127
115
Example objective function to minimize x^2 + y^2
@@ -131,7 +119,8 @@ def objective_function(x: float, y: float) -> float:
131
119
variable_bounds : list [tuple [float , float ]] = [(- 10 , 10 ), (- 10 , 10 )]
132
120
133
121
optimizer = GeneticAlgorithmOptimizer (
134
- objective_function = objective_function , variable_bounds = variable_bounds
122
+ objective_function = objective_function ,
123
+ variable_bounds = variable_bounds
135
124
)
136
125
best_solution , best_fitness_value = optimizer .optimize ()
137
126
0 commit comments