@@ -9,7 +9,9 @@ static size_t right(size_t i) { return (i << 1) + 1; }
9
9
BinaryHeap::BinaryHeap ()
10
10
: heap_(nullptr )
11
11
, heap_size_(0 )
12
- , heap_tail_(0 ) {}
12
+ , heap_tail_(0 )
13
+ , max_index_(std::numeric_limits<size_t >::max())
14
+ , prune_limit_(std::numeric_limits<size_t >::max()) {}
13
15
14
16
BinaryHeap::~BinaryHeap () {
15
17
free_all_memory ();
@@ -26,7 +28,7 @@ void BinaryHeap::free(t_heap* hptr) {
26
28
// or realloc() must be eliminated from add_to_heap()
27
29
// because there is no C++ equivalent.
28
30
void BinaryHeap::init_heap (const DeviceGrid& grid) {
29
- ssize_t target_heap_size = (grid.width () - 1 ) * (grid.height () - 1 );
31
+ size_t target_heap_size = (grid.width () - 1 ) * (grid.height () - 1 );
30
32
if (heap_ == nullptr || heap_size_ < target_heap_size) {
31
33
if (heap_ != nullptr ) {
32
34
// coverity[offset_free : Intentional]
@@ -45,6 +47,11 @@ void BinaryHeap::add_to_heap(t_heap* hptr) {
45
47
// start with undefined hole
46
48
++heap_tail_;
47
49
sift_up (heap_tail_ - 1 , hptr);
50
+
51
+ // If we have pruned, rebuild the heap now.
52
+ if (check_prune_limit ()) {
53
+ build_heap ();
54
+ }
48
55
}
49
56
50
57
bool BinaryHeap::is_empty_heap () const {
@@ -70,7 +77,7 @@ t_heap* BinaryHeap::get_heap_head() {
70
77
hole = 1 ;
71
78
child = 2 ;
72
79
--heap_tail_;
73
- while (( int ) child < heap_tail_) {
80
+ while (child < heap_tail_) {
74
81
if (heap_[child + 1 ]->cost < heap_[child]->cost )
75
82
++child; // become right child
76
83
heap_[hole] = heap_[child];
@@ -85,20 +92,20 @@ t_heap* BinaryHeap::get_heap_head() {
85
92
}
86
93
87
94
void BinaryHeap::empty_heap () {
88
- for (int i = 1 ; i < heap_tail_; i++)
95
+ for (size_t i = 1 ; i < heap_tail_; i++)
89
96
free (heap_[i]);
90
97
91
98
heap_tail_ = 1 ;
92
99
}
93
100
94
- size_t BinaryHeap::size () const { return static_cast < size_t >( heap_tail_ - 1 ) ; } // heap[0] is not valid element
101
+ size_t BinaryHeap::size () const { return heap_tail_ - 1 ; } // heap[0] is not valid element
95
102
96
103
// make a heap rooted at index hole by **sifting down** in O(lgn) time
97
104
void BinaryHeap::sift_down (size_t hole) {
98
105
t_heap* head{heap_[hole]};
99
106
size_t child{left (hole)};
100
- while (( int ) child < heap_tail_) {
101
- if (( int ) child + 1 < heap_tail_ && heap_[child + 1 ]->cost < heap_[child]->cost )
107
+ while (child < heap_tail_) {
108
+ if (child + 1 < heap_tail_ && heap_[child + 1 ]->cost < heap_[child]->cost )
102
109
++child;
103
110
if (heap_[child]->cost < head->cost ) {
104
111
heap_[hole] = heap_[child];
@@ -118,6 +125,14 @@ void BinaryHeap::build_heap() {
118
125
sift_down (i);
119
126
}
120
127
128
+ void BinaryHeap::set_prune_limit (size_t max_index, size_t prune_limit) {
129
+ if (prune_limit != std::numeric_limits<size_t >::max ()) {
130
+ VTR_ASSERT (max_index < prune_limit);
131
+ }
132
+ max_index_ = max_index;
133
+ prune_limit_ = prune_limit;
134
+ }
135
+
121
136
// O(lgn) sifting up to maintain heap property after insertion (should sift down when building heap)
122
137
void BinaryHeap::sift_up (size_t leaf, t_heap* const node) {
123
138
while ((leaf > 1 ) && (node->cost < heap_[parent (leaf)]->cost )) {
@@ -142,16 +157,18 @@ void BinaryHeap::push_back(t_heap* const hptr) {
142
157
expand_heap_if_full ();
143
158
heap_[heap_tail_] = hptr;
144
159
++heap_tail_;
160
+
161
+ check_prune_limit ();
145
162
}
146
163
147
164
bool BinaryHeap::is_valid () const {
148
165
if (heap_ == nullptr ) {
149
166
return false ;
150
167
}
151
168
152
- for (size_t i = 1 ; ( int ) i <= heap_tail_ >> 1 ; ++i) {
153
- if (( int ) left (i) < heap_tail_ && heap_[left (i)]->cost < heap_[i]->cost ) return false ;
154
- if (( int ) right (i) < heap_tail_ && heap_[right (i)]->cost < heap_[i]->cost ) return false ;
169
+ for (size_t i = 1 ; i <= heap_tail_ >> 1 ; ++i) {
170
+ if (left (i) < heap_tail_ && heap_[left (i)]->cost < heap_[i]->cost ) return false ;
171
+ if (right (i) < heap_tail_ && heap_[right (i)]->cost < heap_[i]->cost ) return false ;
155
172
}
156
173
return true ;
157
174
}
@@ -166,7 +183,7 @@ void BinaryHeap::invalidate_heap_entries(int sink_node, int ipin_node) {
166
183
* architectures.
167
184
* */
168
185
169
- for (int i = 1 ; i < heap_tail_; i++) {
186
+ for (size_t i = 1 ; i < heap_tail_; i++) {
170
187
if (heap_[i]->index == sink_node) {
171
188
if (heap_[i]->prev_node () == ipin_node) {
172
189
heap_[i]->index = OPEN; /* Invalid. */
@@ -188,3 +205,57 @@ void BinaryHeap::free_all_memory() {
188
205
189
206
storage_.free_all_memory ();
190
207
}
208
+
209
+ bool BinaryHeap::check_prune_limit () {
210
+ if (heap_tail_ > prune_limit_) {
211
+ prune_heap ();
212
+ return true ;
213
+ }
214
+
215
+ return false ;
216
+ }
217
+
218
+ void BinaryHeap::prune_heap () {
219
+ VTR_ASSERT (max_index_ < prune_limit_);
220
+
221
+ std::vector<t_heap*> best_heap_item (max_index_, nullptr );
222
+
223
+ // Find the cheapest instance of each index and store it.
224
+ for (size_t i = 1 ; i < heap_tail_; i++) {
225
+ if (heap_[i] == nullptr ) {
226
+ continue ;
227
+ }
228
+
229
+ if (heap_[i]->index == OPEN) {
230
+ free (heap_[i]);
231
+ heap_[i] = nullptr ;
232
+ continue ;
233
+ }
234
+
235
+ VTR_ASSERT (static_cast <size_t >(heap_[i]->index ) < max_index_);
236
+
237
+ if (best_heap_item[heap_[i]->index ] == nullptr || best_heap_item[heap_[i]->index ]->cost > heap_[i]->cost ) {
238
+ best_heap_item[heap_[i]->index ] = heap_[i];
239
+ }
240
+ }
241
+
242
+ // Free unused nodes.
243
+ for (size_t i = 1 ; i < heap_tail_; i++) {
244
+ if (heap_[i] == nullptr ) {
245
+ continue ;
246
+ }
247
+
248
+ if (best_heap_item[heap_[i]->index ] != heap_[i]) {
249
+ free (heap_[i]);
250
+ heap_[i] = nullptr ;
251
+ }
252
+ }
253
+
254
+ heap_tail_ = 1 ;
255
+
256
+ for (size_t i = 0 ; i < max_index_; ++i) {
257
+ if (best_heap_item[i] != nullptr ) {
258
+ heap_[heap_tail_++] = best_heap_item[i];
259
+ }
260
+ }
261
+ }
0 commit comments