Skip to content

Commit c01d178

Browse files
Faraz126cclauss
andcommitted
Added implementation for simulated annealing (TheAlgorithms#1679)
* added hill climbing algorithm * Shorten long lines, streamline get_neighbors() * Update hill_climbing.py * Update and rename optimization/hill_climbing.py to searches/hill_climbing.py * added hill climbing algorithm * Shorten long lines, streamline get_neighbors() * Update hill_climbing.py * Rebased * added simulated annealing.py * added final comments and test * black formatted * restricted search domain Co-authored-by: Christian Clauss <[email protected]>
1 parent c5b376d commit c01d178

File tree

1 file changed

+134
-0
lines changed

1 file changed

+134
-0
lines changed

searches/simulated_annealing.py

+134
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,134 @@
1+
# https://en.wikipedia.org/wiki/Simulated_annealing
2+
import math, random
3+
from hill_climbing import SearchProblem
4+
5+
6+
def simulated_annealing(
7+
search_prob,
8+
find_max: bool = True,
9+
max_x: float = math.inf,
10+
min_x: float = -math.inf,
11+
max_y: float = math.inf,
12+
min_y: float = -math.inf,
13+
visualization: bool = False,
14+
start_temperate: float = 100,
15+
rate_of_decrease: float = 0.01,
16+
threshold_temp: float = 1,
17+
) -> SearchProblem:
18+
"""
19+
implementation of the simulated annealing algorithm. We start with a given state, find
20+
all its neighbors. Pick a random neighbor, if that neighbor improves the solution, we move
21+
in that direction, if that neighbor does not improve the solution, we generate a random
22+
real number between 0 and 1, if the number is within a certain range (calculated using
23+
temperature) we move in that direction, else we pick another neighbor randomly and repeat the process.
24+
Args:
25+
search_prob: The search state at the start.
26+
find_max: If True, the algorithm should find the minimum else the minimum.
27+
max_x, min_x, max_y, min_y: the maximum and minimum bounds of x and y.
28+
visualization: If True, a matplotlib graph is displayed.
29+
start_temperate: the initial temperate of the system when the program starts.
30+
rate_of_decrease: the rate at which the temperate decreases in each iteration.
31+
threshold_temp: the threshold temperature below which we end the search
32+
Returns a search state having the maximum (or minimum) score.
33+
"""
34+
search_end = False
35+
current_state = search_prob
36+
current_temp = start_temperate
37+
scores = []
38+
iterations = 0
39+
best_state = None
40+
41+
while not search_end:
42+
current_score = current_state.score()
43+
if best_state is None or current_score > best_state.score():
44+
best_state = current_state
45+
scores.append(current_score)
46+
iterations += 1
47+
next_state = None
48+
neighbors = current_state.get_neighbors()
49+
while (
50+
next_state is None and neighbors
51+
): # till we do not find a neighbor that we can move to
52+
index = random.randint(0, len(neighbors) - 1) # picking a random neighbor
53+
picked_neighbor = neighbors.pop(index)
54+
change = picked_neighbor.score() - current_score
55+
56+
if (
57+
picked_neighbor.x > max_x
58+
or picked_neighbor.x < min_x
59+
or picked_neighbor.y > max_y
60+
or picked_neighbor.y < min_y
61+
):
62+
continue # neighbor outside our bounds
63+
64+
if not find_max:
65+
change = change * -1 # incase we are finding minimum
66+
if change > 0: # improves the solution
67+
next_state = picked_neighbor
68+
else:
69+
probabililty = (math.e) ** (
70+
change / current_temp
71+
) # probability generation function
72+
if random.random() < probabililty: # random number within probability
73+
next_state = picked_neighbor
74+
current_temp = current_temp - (current_temp * rate_of_decrease)
75+
76+
if (
77+
current_temp < threshold_temp or next_state is None
78+
): # temperature below threshold, or
79+
# couldnt find a suitaable neighbor
80+
search_end = True
81+
else:
82+
current_state = next_state
83+
84+
if visualization:
85+
import matplotlib.pyplot as plt
86+
87+
plt.plot(range(iterations), scores)
88+
plt.xlabel("Iterations")
89+
plt.ylabel("Function values")
90+
plt.show()
91+
return best_state
92+
93+
94+
if __name__ == "__main__":
95+
96+
def test_f1(x, y):
97+
return (x ** 2) + (y ** 2)
98+
99+
# starting the problem with initial coordinates (12, 47)
100+
prob = SearchProblem(x=12, y=47, step_size=1, function_to_optimize=test_f1)
101+
local_min = simulated_annealing(
102+
prob, find_max=False, max_x=100, min_x=5, max_y=50, min_y=-5, visualization=True
103+
)
104+
print(
105+
"The minimum score for f(x, y) = x^2 + y^2 with the domain 100 > x > 5 "
106+
f"and 50 > y > - 5 found via hill climbing: {local_min.score()}"
107+
)
108+
109+
# starting the problem with initial coordinates (12, 47)
110+
prob = SearchProblem(x=12, y=47, step_size=1, function_to_optimize=test_f1)
111+
local_min = simulated_annealing(
112+
prob, find_max=True, max_x=100, min_x=5, max_y=50, min_y=-5, visualization=True
113+
)
114+
print(
115+
"The maximum score for f(x, y) = x^2 + y^2 with the domain 100 > x > 5 "
116+
f"and 50 > y > - 5 found via hill climbing: {local_min.score()}"
117+
)
118+
119+
def test_f2(x, y):
120+
return (3 * x ** 2) - (6 * y)
121+
122+
prob = SearchProblem(x=3, y=4, step_size=1, function_to_optimize=test_f1)
123+
local_min = simulated_annealing(prob, find_max=False, visualization=True)
124+
print(
125+
"The minimum score for f(x, y) = 3*x^2 - 6*y found via hill climbing: "
126+
f"{local_min.score()}"
127+
)
128+
129+
prob = SearchProblem(x=3, y=4, step_size=1, function_to_optimize=test_f1)
130+
local_min = simulated_annealing(prob, find_max=True, visualization=True)
131+
print(
132+
"The maximum score for f(x, y) = 3*x^2 - 6*y found via hill climbing: "
133+
f"{local_min.score()}"
134+
)

0 commit comments

Comments
 (0)