1
+ from collections .abc import Callable # Fixes the UP035 warning
1
2
import numpy as np
2
- from typing import Callable , List , Tuple
3
-
4
3
5
4
class GeneticAlgorithmOptimizer :
6
5
def __init__ (
7
6
self ,
8
- objective_function : Callable [..., float ],
9
- variable_bounds : List [ Tuple [float , float ]],
7
+ objective_function : Callable [..., float ],
8
+ variable_bounds : list [ tuple [float , float ]],
10
9
population_size : int = 100 ,
11
10
max_generations : int = 500 ,
12
11
crossover_probability : float = 0.9 ,
@@ -26,33 +25,16 @@ def __init__(
26
25
def generate_initial_population (self ) -> np .ndarray :
27
26
"""
28
27
Generate a population of random solutions within the given variable bounds.
29
-
30
- >>> optimizer = GeneticAlgorithmOptimizer(
31
- ... objective_function=lambda x: x**2,
32
- ... variable_bounds=[(-10, 10)]
33
- ... )
34
- >>> population = optimizer.generate_initial_population()
35
- >>> population.shape == (optimizer.population_size, optimizer.num_variables)
36
- True
37
28
"""
38
29
return self .rng .uniform (
39
30
low = self .variable_bounds [:, 0 ],
40
31
high = self .variable_bounds [:, 1 ],
41
32
size = (self .population_size , self .num_variables ),
42
33
)
43
34
44
- def evaluate_fitness (self , individual : List [float ]) -> float :
35
+ def evaluate_fitness (self , individual : list [float ]) -> float :
45
36
"""
46
37
Evaluate the fitness of an individual by computing the value of the objective function.
47
-
48
- >>> optimizer = GeneticAlgorithmOptimizer(
49
- ... objective_function=lambda x: x**2,
50
- ... variable_bounds=[(-10, 10)]
51
- ... )
52
- >>> optimizer.evaluate_fitness([2])
53
- 4
54
- >>> optimizer.evaluate_fitness([0])
55
- 0
56
38
"""
57
39
return self .objective_function (* individual )
58
40
@@ -61,16 +43,6 @@ def select_parent(
61
43
) -> np .ndarray :
62
44
"""
63
45
Select a parent using tournament selection based on fitness values.
64
-
65
- >>> optimizer = GeneticAlgorithmOptimizer(
66
- ... objective_function=lambda x: x**2,
67
- ... variable_bounds=[(-10, 10)]
68
- ... )
69
- >>> population = optimizer.generate_initial_population()
70
- >>> fitness_values = np.array([optimizer.evaluate_fitness(ind) for ind in population])
71
- >>> parent = optimizer.select_parent(population, fitness_values)
72
- >>> len(parent) == optimizer.num_variables
73
- True
74
46
"""
75
47
selected_indices = self .rng .choice (
76
48
range (self .population_size ), size = 2 , replace = False
@@ -79,57 +51,34 @@ def select_parent(
79
51
80
52
def perform_crossover (
81
53
self , parent1 : np .ndarray , parent2 : np .ndarray
82
- ) -> Tuple [np .ndarray , np .ndarray ]:
54
+ ) -> tuple [np .ndarray , np .ndarray ]:
83
55
"""
84
56
Perform one-point crossover between two parents to create offspring.
85
57
Skip crossover for single-variable functions.
86
-
87
- >>> optimizer = GeneticAlgorithmOptimizer(
88
- ... objective_function=lambda x: x**2,
89
- ... variable_bounds=[(-10, 10)]
90
- ... )
91
- >>> parent1 = [1]
92
- >>> parent2 = [2]
93
- >>> child1, child2 = optimizer.perform_crossover(parent1, parent2)
94
- >>> child1 == parent1 and child2 == parent2
95
- True
96
58
"""
97
59
if self .num_variables == 1 :
98
60
return parent1 , parent2
99
61
100
62
if self .rng .random () < self .crossover_probability :
101
63
crossover_point = self .rng .integers (1 , self .num_variables )
102
- child1 = np .concatenate (
103
- (parent1 [:crossover_point ], parent2 [crossover_point :])
104
- )
105
- child2 = np .concatenate (
106
- (parent2 [:crossover_point ], parent1 [crossover_point :])
107
- )
64
+ child1 = np .concatenate ((parent1 [:crossover_point ], parent2 [crossover_point :]))
65
+ child2 = np .concatenate ((parent2 [:crossover_point ], parent1 [crossover_point :]))
108
66
return child1 , child2
109
67
return parent1 , parent2
110
68
111
69
def apply_mutation (self , individual : np .ndarray ) -> np .ndarray :
112
70
"""
113
71
Apply mutation to an individual based on the mutation probability.
114
-
115
- >>> optimizer = GeneticAlgorithmOptimizer(
116
- ... objective_function=lambda x: x**2,
117
- ... variable_bounds=[(-10, 10)]
118
- ... )
119
- >>> individual = [1]
120
- >>> mutated_individual = optimizer.apply_mutation(individual.copy())
121
- >>> len(mutated_individual) == len(individual)
122
- True
123
72
"""
124
73
if self .rng .random () < self .mutation_probability :
125
74
mutation_index = self .rng .integers (0 , self .num_variables )
126
75
individual [mutation_index ] = self .rng .uniform (
127
- self .variable_bounds [mutation_index , 0 ],
128
- self .variable_bounds [mutation_index , 1 ],
76
+ self .variable_bounds [mutation_index , 0 ],
77
+ self .variable_bounds [mutation_index , 1 ]
129
78
)
130
79
return individual
131
80
132
- def optimize (self ) -> Tuple [np .ndarray , float ]:
81
+ def optimize (self ) -> tuple [np .ndarray , float ]:
133
82
"""
134
83
Execute the genetic algorithm over a number of generations to find the optimal solution.
135
84
"""
@@ -159,32 +108,22 @@ def optimize(self) -> Tuple[np.ndarray, float]:
159
108
best_fitness_value = fitness_values [min_fitness_index ]
160
109
best_solution = population [min_fitness_index ]
161
110
162
- print (
163
- f"Generation { generation + 1 } , Best Fitness Value: { best_fitness_value } "
164
- )
111
+ print (f"Generation { generation + 1 } , Best Fitness Value: { best_fitness_value } " )
165
112
166
113
return best_solution , best_fitness_value
167
114
168
-
169
115
if __name__ == "__main__" :
170
-
171
116
def objective_function (x : float , y : float ) -> float :
172
117
"""
173
118
Example objective function to minimize x^2 + y^2
174
-
175
- >>> objective_function(0, 0)
176
- 0
177
- >>> objective_function(3, 4)
178
- 25
179
- >>> objective_function(-3, -4)
180
- 25
181
119
"""
182
120
return x ** 2 + y ** 2
183
121
184
- variable_bounds : List [ Tuple [float , float ]] = [(- 10 , 10 ), (- 10 , 10 )]
122
+ variable_bounds : list [ tuple [float , float ]] = [(- 10 , 10 ), (- 10 , 10 )]
185
123
186
124
optimizer = GeneticAlgorithmOptimizer (
187
- objective_function = objective_function , variable_bounds = variable_bounds
125
+ objective_function = objective_function ,
126
+ variable_bounds = variable_bounds
188
127
)
189
128
best_solution , best_fitness_value = optimizer .optimize ()
190
129
0 commit comments