@@ -567,8 +567,10 @@ impl<Tag> Deref for Relocations<Tag> {
567
567
}
568
568
569
569
/// A partial, owned list of relocations to transfer into another allocation.
570
+ ///
571
+ /// Offsets are already adjusted to the destination allocation.
570
572
pub struct AllocationRelocations < Tag > {
571
- relative_relocations : Vec < ( Size , Tag ) > ,
573
+ dest_relocations : Vec < ( Size , Tag ) > ,
572
574
}
573
575
574
576
impl < Tag : Copy , Extra > Allocation < Tag , Extra > {
@@ -581,12 +583,17 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
581
583
) -> AllocationRelocations < Tag > {
582
584
let relocations = self . get_relocations ( cx, src) ;
583
585
if relocations. is_empty ( ) {
584
- return AllocationRelocations { relative_relocations : Vec :: new ( ) } ;
586
+ return AllocationRelocations { dest_relocations : Vec :: new ( ) } ;
585
587
}
586
588
587
589
let size = src. size ;
588
590
let mut new_relocations = Vec :: with_capacity ( relocations. len ( ) * ( count as usize ) ) ;
589
591
592
+ // If `count` is large, this is rather wasteful -- we are allocating a big array here, which
593
+ // is mostly filled with redundant information since it's just N copies of the same `Tag`s
594
+ // at slightly adjusted offsets. The reason we do this is so that in `mark_relocation_range`
595
+ // we can use `insert_presorted`. That wouldn't work with an `Iterator` that just produces
596
+ // the right sequence of relocations for all N copies.
590
597
for i in 0 ..count {
591
598
new_relocations. extend ( relocations. iter ( ) . map ( |& ( offset, reloc) | {
592
599
// compute offset for current repetition
@@ -599,7 +606,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
599
606
} ) ) ;
600
607
}
601
608
602
- AllocationRelocations { relative_relocations : new_relocations }
609
+ AllocationRelocations { dest_relocations : new_relocations }
603
610
}
604
611
605
612
/// Applies a relocation copy.
@@ -609,7 +616,7 @@ impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
609
616
/// This is dangerous to use as it can violate internal `Allocation` invariants!
610
617
/// It only exists to support an efficient implementation of `mem_copy_repeatedly`.
611
618
pub fn mark_relocation_range ( & mut self , relocations : AllocationRelocations < Tag > ) {
612
- self . relocations . 0 . insert_presorted ( relocations. relative_relocations ) ;
619
+ self . relocations . 0 . insert_presorted ( relocations. dest_relocations ) ;
613
620
}
614
621
}
615
622
0 commit comments