1
- import random
2
1
import numpy as np
2
+ import random
3
+ from concurrent .futures import ThreadPoolExecutor
3
4
4
5
# Parameters
5
6
N_POPULATION = 100 # Population size
9
10
CROSSOVER_RATE = 0.8 # Probability of crossover
10
11
SEARCH_SPACE = (- 10 , 10 ) # Search space for the variables
11
12
13
+ # Random number generator
14
+ rng = np .random .default_rng ()
12
15
13
- # Genetic Algorithm for Function Optimization
14
16
class GeneticAlgorithm :
15
17
def __init__ (
16
18
self ,
@@ -35,11 +37,9 @@ def __init__(
35
37
self .population = self .initialize_population ()
36
38
37
39
def initialize_population (self ):
38
- # Generate random initial population within the search space
40
+ # Generate random initial population within the search space using the generator
39
41
return [
40
- np .random .uniform (
41
- low = self .bounds [i ][0 ], high = self .bounds [i ][1 ], size = self .dim
42
- )
42
+ rng .uniform (low = self .bounds [i ][0 ], high = self .bounds [i ][1 ], size = self .dim )
43
43
for i in range (self .population_size )
44
44
]
45
45
@@ -48,14 +48,10 @@ def fitness(self, individual):
48
48
value = self .function (* individual )
49
49
return value if self .maximize else - value # If minimizing, invert the fitness
50
50
51
- def select_parents (self ):
52
- # Rank individuals based on fitness and select top individuals for mating
53
- scores = [
54
- (individual , self .fitness (individual )) for individual in self .population
55
- ]
56
- scores .sort (key = lambda x : x [1 ], reverse = True )
57
- selected = [ind for ind , _ in scores [:N_SELECTED ]]
58
- return selected
51
+ def select_parents (self , population_score ):
52
+ # Select top N_SELECTED parents based on fitness
53
+ population_score .sort (key = lambda x : x [1 ], reverse = True )
54
+ return [ind for ind , _ in population_score [:N_SELECTED ]]
59
55
60
56
def crossover (self , parent1 , parent2 ):
61
57
# Perform uniform crossover
@@ -67,16 +63,28 @@ def crossover(self, parent1, parent2):
67
63
return parent1 , parent2
68
64
69
65
def mutate (self , individual ):
70
- # Apply mutation to an individual with some probability
66
+ # Apply mutation to an individual using the new random generator
71
67
for i in range (self .dim ):
72
68
if random .random () < self .mutation_prob :
73
- individual [i ] = np . random .uniform (self .bounds [i ][0 ], self .bounds [i ][1 ])
69
+ individual [i ] = rng .uniform (self .bounds [i ][0 ], self .bounds [i ][1 ])
74
70
return individual
75
71
72
+ def evaluate_population (self ):
73
+ # Multithreaded evaluation of population fitness
74
+ with ThreadPoolExecutor () as executor :
75
+ return list (executor .map (lambda ind : (ind , self .fitness (ind )), self .population ))
76
+
76
77
def evolve (self ):
77
78
for generation in range (self .generations ):
78
- # Select parents based on fitness
79
- parents = self .select_parents ()
79
+ # Evaluate population fitness (multithreaded)
80
+ population_score = self .evaluate_population ()
81
+
82
+ # Check the best individual
83
+ best_individual = max (population_score , key = lambda x : x [1 ])[0 ]
84
+ best_fitness = self .fitness (best_individual )
85
+
86
+ # Select parents for next generation
87
+ parents = self .select_parents (population_score )
80
88
next_generation = []
81
89
82
90
# Generate offspring using crossover and mutation
@@ -87,42 +95,33 @@ def evolve(self):
87
95
next_generation .append (self .mutate (child2 ))
88
96
89
97
# Ensure population size remains the same
90
- self .population = next_generation [: self .population_size ]
91
-
92
- # Track the best solution so far
93
- best_individual = max (self .population , key = self .fitness )
94
- best_fitness = self .fitness (best_individual )
98
+ self .population = next_generation [:self .population_size ]
95
99
96
100
if generation % 10 == 0 :
97
- print (
98
- f"Generation { generation } : Best Fitness = { best_fitness } , Best Individual = { best_individual } "
99
- )
101
+ print (f"Generation { generation } : Best Fitness = { best_fitness } " )
100
102
101
- # Return the best individual found
102
- return max (self .population , key = self .fitness )
103
+ return best_individual
103
104
104
105
105
- # Define a sample function to optimize (e.g., minimize the sum of squares)
106
+ # Example target function for optimization
106
107
def target_function (x , y ):
107
- return x ** 2 + y ** 2 # Example: simple parabolic surface (minimization)
108
+ return x ** 2 + y ** 2 # Simple parabolic surface (minimization)
108
109
109
110
110
111
# Set bounds for the variables (x, y)
111
112
bounds = [(- 10 , 10 ), (- 10 , 10 )] # Both x and y range from -10 to 10
112
113
113
- # Instantiate the genetic algorithm
114
+ # Instantiate and run the genetic algorithm
114
115
ga = GeneticAlgorithm (
115
116
function = target_function ,
116
117
bounds = bounds ,
117
118
population_size = N_POPULATION ,
118
119
generations = N_GENERATIONS ,
119
120
mutation_prob = MUTATION_PROBABILITY ,
120
121
crossover_rate = CROSSOVER_RATE ,
121
- maximize = False , # Set to False for minimization
122
+ maximize = False # Minimize the function
122
123
)
123
124
124
- # Run the genetic algorithm and find the optimal solution
125
125
best_solution = ga .evolve ()
126
-
127
126
print (f"Best solution found: { best_solution } " )
128
127
print (f"Best fitness (minimum value of function): { target_function (* best_solution )} " )
0 commit comments