|
| 1 | +import random |
| 2 | +import numpy as np |
| 3 | + |
| 4 | +# Parameters |
| 5 | +N_POPULATION = 100 # Population size |
| 6 | +N_GENERATIONS = 500 # Maximum number of generations |
| 7 | +N_SELECTED = 50 # Number of parents selected for the next generation |
| 8 | +MUTATION_PROBABILITY = 0.1 # Mutation probability |
| 9 | +CROSSOVER_RATE = 0.8 # Probability of crossover |
| 10 | +SEARCH_SPACE = (-10, 10) # Search space for the variables |
| 11 | + |
| 12 | +# Genetic Algorithm for Function Optimization |
| 13 | +class GeneticAlgorithm: |
| 14 | + def __init__(self, function, bounds, population_size, generations, mutation_prob, crossover_rate, maximize=True): |
| 15 | + self.function = function # Target function to optimize |
| 16 | + self.bounds = bounds # Search space bounds (for each variable) |
| 17 | + self.population_size = population_size |
| 18 | + self.generations = generations |
| 19 | + self.mutation_prob = mutation_prob |
| 20 | + self.crossover_rate = crossover_rate |
| 21 | + self.maximize = maximize |
| 22 | + self.dim = len(bounds) # Dimensionality of the function (number of variables) |
| 23 | + |
| 24 | + # Initialize population |
| 25 | + self.population = self.initialize_population() |
| 26 | + |
| 27 | + def initialize_population(self): |
| 28 | + # Generate random initial population within the search space |
| 29 | + return [np.random.uniform(low=self.bounds[i][0], high=self.bounds[i][1], size=self.dim) |
| 30 | + for i in range(self.population_size)] |
| 31 | + |
| 32 | + def fitness(self, individual): |
| 33 | + # Calculate the fitness value (function value) |
| 34 | + value = self.function(*individual) |
| 35 | + return value if self.maximize else -value # If minimizing, invert the fitness |
| 36 | + |
| 37 | + def select_parents(self): |
| 38 | + # Rank individuals based on fitness and select top individuals for mating |
| 39 | + scores = [(individual, self.fitness(individual)) for individual in self.population] |
| 40 | + scores.sort(key=lambda x: x[1], reverse=True) |
| 41 | + selected = [ind for ind, _ in scores[:N_SELECTED]] |
| 42 | + return selected |
| 43 | + |
| 44 | + def crossover(self, parent1, parent2): |
| 45 | + # Perform uniform crossover |
| 46 | + if random.random() < self.crossover_rate: |
| 47 | + cross_point = random.randint(1, self.dim - 1) |
| 48 | + child1 = np.concatenate((parent1[:cross_point], parent2[cross_point:])) |
| 49 | + child2 = np.concatenate((parent2[:cross_point], parent1[cross_point:])) |
| 50 | + return child1, child2 |
| 51 | + return parent1, parent2 |
| 52 | + |
| 53 | + def mutate(self, individual): |
| 54 | + # Apply mutation to an individual with some probability |
| 55 | + for i in range(self.dim): |
| 56 | + if random.random() < self.mutation_prob: |
| 57 | + individual[i] = np.random.uniform(self.bounds[i][0], self.bounds[i][1]) |
| 58 | + return individual |
| 59 | + |
| 60 | + def evolve(self): |
| 61 | + for generation in range(self.generations): |
| 62 | + # Select parents based on fitness |
| 63 | + parents = self.select_parents() |
| 64 | + next_generation = [] |
| 65 | + |
| 66 | + # Generate offspring using crossover and mutation |
| 67 | + for i in range(0, len(parents), 2): |
| 68 | + parent1, parent2 = parents[i], parents[(i + 1) % len(parents)] |
| 69 | + child1, child2 = self.crossover(parent1, parent2) |
| 70 | + next_generation.append(self.mutate(child1)) |
| 71 | + next_generation.append(self.mutate(child2)) |
| 72 | + |
| 73 | + # Ensure population size remains the same |
| 74 | + self.population = next_generation[:self.population_size] |
| 75 | + |
| 76 | + # Track the best solution so far |
| 77 | + best_individual = max(self.population, key=self.fitness) |
| 78 | + best_fitness = self.fitness(best_individual) |
| 79 | + |
| 80 | + if generation % 10 == 0: |
| 81 | + print(f"Generation {generation}: Best Fitness = {best_fitness}, Best Individual = {best_individual}") |
| 82 | + |
| 83 | + # Return the best individual found |
| 84 | + return max(self.population, key=self.fitness) |
| 85 | + |
| 86 | + |
| 87 | +# Define a sample function to optimize (e.g., minimize the sum of squares) |
| 88 | +def target_function(x, y): |
| 89 | + return x**2 + y**2 # Example: simple parabolic surface (minimization) |
| 90 | + |
| 91 | +# Set bounds for the variables (x, y) |
| 92 | +bounds = [(-10, 10), (-10, 10)] # Both x and y range from -10 to 10 |
| 93 | + |
| 94 | +# Instantiate the genetic algorithm |
| 95 | +ga = GeneticAlgorithm( |
| 96 | + function=target_function, |
| 97 | + bounds=bounds, |
| 98 | + population_size=N_POPULATION, |
| 99 | + generations=N_GENERATIONS, |
| 100 | + mutation_prob=MUTATION_PROBABILITY, |
| 101 | + crossover_rate=CROSSOVER_RATE, |
| 102 | + maximize=False # Set to False for minimization |
| 103 | +) |
| 104 | + |
| 105 | +# Run the genetic algorithm and find the optimal solution |
| 106 | +best_solution = ga.evolve() |
| 107 | + |
| 108 | +print(f"Best solution found: {best_solution}") |
| 109 | +print(f"Best fitness (minimum value of function): {target_function(*best_solution)}") |
0 commit comments