Skip to content

Commit 096a033

Browse files
committed
Update genetic_algorithm_optimization.py
1 parent 57feaea commit 096a033

File tree

1 file changed

+68
-79
lines changed

1 file changed

+68
-79
lines changed
Original file line numberDiff line numberDiff line change
@@ -1,136 +1,125 @@
1-
from collections.abc import Callable # Sorted import
2-
import numpy as np # Sorted import
1+
import numpy as np
32

43

54
class GeneticAlgorithmOptimizer:
65
def __init__(
76
self,
8-
objective_function: Callable[..., float],
9-
variable_bounds: list[tuple[float, float]],
10-
population_size: int = 100,
11-
max_generations: int = 500,
12-
crossover_probability: float = 0.9,
13-
mutation_probability: float = 0.01,
14-
) -> None:
15-
self.objective_function = objective_function
16-
self.variable_bounds = np.array(variable_bounds)
7+
func,
8+
bounds,
9+
population_size=100,
10+
generations=500,
11+
crossover_prob=0.9,
12+
mutation_prob=0.01,
13+
):
14+
self.func = func
15+
self.bounds = np.array(bounds)
1716
self.population_size = population_size
18-
self.max_generations = max_generations
19-
self.crossover_probability = crossover_probability
20-
self.mutation_probability = mutation_probability
21-
self.num_variables = len(variable_bounds)
22-
self.rng = np.random.default_rng() # Initialize random generator
17+
self.generations = generations
18+
self.crossover_prob = crossover_prob
19+
self.mutation_prob = mutation_prob
20+
self.num_variables = len(bounds)
2321

24-
def generate_initial_population(self) -> np.ndarray:
22+
# Initialize the random number generator
23+
self.rng = np.random.default_rng()
24+
25+
def initialize_population(self):
2526
"""
26-
Generate a population of random solutions within the given variable bounds.
27+
Initialize a population of random solutions within the bounds.
2728
"""
2829
return self.rng.uniform(
29-
low=self.variable_bounds[:, 0],
30-
high=self.variable_bounds[:, 1],
30+
low=self.bounds[:, 0],
31+
high=self.bounds[:, 1],
3132
size=(self.population_size, self.num_variables),
3233
)
3334

34-
def evaluate_fitness(self, individual: list[float]) -> float:
35+
def fitness(self, individual):
3536
"""
36-
Evaluate the fitness of an individual by computing the value of the objective function.
37+
Evaluate the fitness of an individual.
38+
In minimization problems, we aim to minimize the function value.
3739
"""
38-
return self.objective_function(*individual)
40+
return self.func(*individual)
3941

40-
def select_parent(
41-
self, population: np.ndarray, fitness_values: np.ndarray
42-
) -> np.ndarray:
42+
def select_parents(self, population, fitness_scores):
4343
"""
44-
Select a parent using tournament selection based on fitness values.
44+
Select parents using tournament selection.
4545
"""
4646
selected_indices = self.rng.choice(
4747
range(self.population_size), size=2, replace=False
4848
)
49-
return population[selected_indices[np.argmin(fitness_values[selected_indices])]]
49+
return population[selected_indices[np.argmin(fitness_scores[selected_indices])]]
5050

51-
def perform_crossover(
52-
self, parent1: np.ndarray, parent2: np.ndarray
53-
) -> tuple[np.ndarray, np.ndarray]:
51+
def crossover(self, parent1, parent2):
5452
"""
55-
Perform one-point crossover between two parents to create offspring.
53+
Perform one-point crossover to create offspring.
54+
Skip crossover for single-variable functions.
5655
"""
5756
if self.num_variables == 1:
58-
return parent1, parent2
57+
return parent1, parent2 # No crossover needed for single-variable functions
5958

60-
if self.rng.random() < self.crossover_probability:
61-
crossover_point = self.rng.integers(1, self.num_variables)
62-
child1 = np.concatenate(
63-
(parent1[:crossover_point], parent2[crossover_point:])
64-
)
65-
child2 = np.concatenate(
66-
(parent2[:crossover_point], parent1[crossover_point:])
67-
)
59+
if self.rng.random() < self.crossover_prob:
60+
point = self.rng.integers(1, self.num_variables)
61+
child1 = np.concatenate((parent1[:point], parent2[point:]))
62+
child2 = np.concatenate((parent2[:point], parent1[point:]))
6863
return child1, child2
6964
return parent1, parent2
7065

71-
def apply_mutation(self, individual: np.ndarray) -> np.ndarray:
66+
def mutate(self, individual):
7267
"""
73-
Apply mutation to an individual based on the mutation probability.
68+
Apply mutation to an individual with a given mutation probability.
7469
"""
75-
if self.rng.random() < self.mutation_probability:
76-
mutation_index = self.rng.integers(0, self.num_variables)
77-
individual[mutation_index] = self.rng.uniform(
78-
self.variable_bounds[mutation_index, 0],
79-
self.variable_bounds[mutation_index, 1],
70+
if self.rng.random() < self.mutation_prob:
71+
index = self.rng.integers(0, self.num_variables)
72+
individual[index] = self.rng.uniform(
73+
self.bounds[index, 0], self.bounds[index, 1]
8074
)
8175
return individual
8276

83-
def optimize(self) -> tuple[np.ndarray, float]:
77+
def evolve(self):
8478
"""
85-
Execute the genetic algorithm over a number of generations to find the optimal solution.
79+
Run the genetic algorithm for a number of generations.
8680
"""
87-
population = self.generate_initial_population()
81+
population = self.initialize_population()
8882
best_solution = None
89-
best_fitness_value = float("inf")
83+
best_fitness = float("inf")
9084

91-
for generation in range(self.max_generations):
92-
fitness_values = np.array(
93-
[self.evaluate_fitness(individual) for individual in population]
85+
for gen in range(self.generations):
86+
fitness_scores = np.array(
87+
[self.fitness(individual) for individual in population]
9488
)
9589

9690
new_population = []
9791
for _ in range(self.population_size // 2):
98-
parent1 = self.select_parent(population, fitness_values)
99-
parent2 = self.select_parent(population, fitness_values)
100-
child1, child2 = self.perform_crossover(parent1, parent2)
101-
child1 = self.apply_mutation(child1)
102-
child2 = self.apply_mutation(child2)
92+
parent1 = self.select_parents(population, fitness_scores)
93+
parent2 = self.select_parents(population, fitness_scores)
94+
child1, child2 = self.crossover(parent1, parent2)
95+
child1 = self.mutate(child1)
96+
child2 = self.mutate(child2)
10397
new_population.extend([child1, child2])
10498

10599
population = np.array(new_population)
106100

107101
# Track the best solution
108-
min_fitness_index = np.argmin(fitness_values)
109-
if fitness_values[min_fitness_index] < best_fitness_value:
110-
best_fitness_value = fitness_values[min_fitness_index]
102+
min_fitness_index = np.argmin(fitness_scores)
103+
if fitness_scores[min_fitness_index] < best_fitness:
104+
best_fitness = fitness_scores[min_fitness_index]
111105
best_solution = population[min_fitness_index]
112106

113-
print(
114-
f"Generation {generation + 1}, Best Fitness Value: {best_fitness_value}"
115-
)
107+
print(f"Generation {gen + 1}, Best Fitness: {best_fitness}")
116108

117-
return best_solution, best_fitness_value
109+
return best_solution, best_fitness
118110

119111

120112
if __name__ == "__main__":
113+
# Define the function to optimize
114+
def func(x, y):
115+
return x**2 + y**2 # Example: Minimizing x^2 + y^2
121116

122-
def objective_function(x: float, y: float) -> float:
123-
"""
124-
Example objective function to minimize x^2 + y^2
125-
"""
126-
return x**2 + y**2
127-
128-
variable_bounds: list[tuple[float, float]] = [(-10, 10), (-10, 10)]
117+
# Define the bounds for each variable
118+
bounds = [(-10, 10), (-10, 10)]
129119

130-
optimizer = GeneticAlgorithmOptimizer(
131-
objective_function=objective_function, variable_bounds=variable_bounds
132-
)
133-
best_solution, best_fitness_value = optimizer.optimize()
120+
# Initialize and run the optimizer
121+
optimizer = GeneticAlgorithmOptimizer(func=func, bounds=bounds)
122+
best_solution, best_fitness = optimizer.evolve()
134123

135124
print("Best Solution:", best_solution)
136-
print("Best Fitness Value:", best_fitness_value)
125+
print("Best Fitness:", best_fitness)

0 commit comments

Comments
 (0)