@@ -425,10 +425,7 @@ impl BootServices {
425
425
///
426
426
/// * [`uefi::Status::BUFFER_TOO_SMALL`]
427
427
/// * [`uefi::Status::INVALID_PARAMETER`]
428
- pub fn memory_map < ' buf > (
429
- & self ,
430
- buffer : & ' buf mut [ u8 ] ,
431
- ) -> Result < ( MemoryMapKey , MemoryMapIter < ' buf > ) > {
428
+ pub fn memory_map < ' buf > ( & self , buffer : & ' buf mut [ u8 ] ) -> Result < MemoryMap < ' buf > > {
432
429
let mut map_size = buffer. len ( ) ;
433
430
MemoryDescriptor :: assert_aligned ( buffer) ;
434
431
let map_buffer = buffer. as_mut_ptr ( ) . cast :: < MemoryDescriptor > ( ) ;
@@ -453,13 +450,13 @@ impl BootServices {
453
450
}
454
451
. into_with_val ( move || {
455
452
let len = map_size / entry_size;
456
- let iter = MemoryMapIter {
457
- buffer,
453
+
454
+ MemoryMap {
455
+ key : map_key,
456
+ buf : buffer,
458
457
entry_size,
459
- index : 0 ,
460
458
len,
461
- } ;
462
- ( map_key, iter)
459
+ }
463
460
} )
464
461
}
465
462
@@ -1993,6 +1990,103 @@ pub struct MemoryMapSize {
1993
1990
pub map_size : usize ,
1994
1991
}
1995
1992
1993
+ /// An iterator of [`MemoryDescriptor`] which returns elements in sorted order. The underlying memory map is always
1994
+ /// associated with the unique [`MemoryMapKey`] contained in the struct.
1995
+ pub struct MemoryMap < ' buf > {
1996
+ key : MemoryMapKey ,
1997
+ buf : & ' buf mut [ u8 ] ,
1998
+ entry_size : usize ,
1999
+ len : usize ,
2000
+ }
2001
+
2002
+ impl < ' buf > MemoryMap < ' buf > {
2003
+ #[ must_use]
2004
+ /// Returns the unique [`MemoryMapKey`] associated with the memory map.
2005
+ pub fn key ( & self ) -> MemoryMapKey {
2006
+ self . key
2007
+ }
2008
+
2009
+ /// Sorts the memory map by physical address in place.
2010
+ pub fn sort ( & mut self ) {
2011
+ unsafe {
2012
+ self . qsort ( 0 , self . len - 1 ) ;
2013
+ }
2014
+ }
2015
+
2016
+ /// Hoare partition scheme for quicksort.
2017
+ /// Must be called with `low` and `high` being indices within bounds.
2018
+ unsafe fn qsort ( & mut self , low : usize , high : usize ) {
2019
+ if low >= high {
2020
+ return ;
2021
+ }
2022
+
2023
+ let p = self . partition ( low, high) ;
2024
+ self . qsort ( low, p) ;
2025
+ self . qsort ( p + 1 , high) ;
2026
+ }
2027
+
2028
+ unsafe fn partition ( & mut self , low : usize , high : usize ) -> usize {
2029
+ let pivot = self . get_element_phys_addr ( low + ( high - low) / 2 ) ;
2030
+
2031
+ let mut left_index = low. wrapping_sub ( 1 ) ;
2032
+ let mut right_index = high. wrapping_add ( 1 ) ;
2033
+
2034
+ loop {
2035
+ while {
2036
+ left_index = left_index. wrapping_add ( 1 ) ;
2037
+
2038
+ self . get_element_phys_addr ( left_index) < pivot
2039
+ } { }
2040
+
2041
+ while {
2042
+ right_index = right_index. wrapping_sub ( 1 ) ;
2043
+
2044
+ self . get_element_phys_addr ( right_index) > pivot
2045
+ } { }
2046
+
2047
+ if left_index >= right_index {
2048
+ return right_index;
2049
+ }
2050
+
2051
+ self . swap ( left_index, right_index) ;
2052
+ }
2053
+ }
2054
+
2055
+ /// Indices must be smaller than len.
2056
+ unsafe fn swap ( & mut self , index1 : usize , index2 : usize ) {
2057
+ if index1 == index2 {
2058
+ return ;
2059
+ }
2060
+
2061
+ let base = self . buf . as_mut_ptr ( ) ;
2062
+
2063
+ unsafe {
2064
+ ptr:: swap_nonoverlapping (
2065
+ base. add ( index1 * self . entry_size ) ,
2066
+ base. add ( index2 * self . entry_size ) ,
2067
+ self . entry_size ,
2068
+ ) ;
2069
+ }
2070
+ }
2071
+
2072
+ fn get_element_phys_addr ( & self , index : usize ) -> PhysicalAddress {
2073
+ let offset = index. checked_mul ( self . entry_size ) . unwrap ( ) ;
2074
+ let elem = unsafe { & * self . buf . as_ptr ( ) . add ( offset) . cast :: < MemoryDescriptor > ( ) } ;
2075
+ elem. phys_start
2076
+ }
2077
+
2078
+ #[ must_use]
2079
+ /// Returns an iterator over the contained memory map
2080
+ pub fn entries ( & self ) -> MemoryMapIter {
2081
+ MemoryMapIter {
2082
+ buffer : self . buf ,
2083
+ entry_size : self . entry_size ,
2084
+ index : 0 ,
2085
+ len : self . len ,
2086
+ }
2087
+ }
2088
+ }
2089
+
1996
2090
/// An iterator of [`MemoryDescriptor`]. The underlying memory map is always
1997
2091
/// associated with a unique [`MemoryMapKey`].
1998
2092
#[ derive( Debug , Clone ) ]
@@ -2014,12 +2108,16 @@ impl<'buf> Iterator for MemoryMapIter<'buf> {
2014
2108
2015
2109
fn next ( & mut self ) -> Option < Self :: Item > {
2016
2110
if self . index < self . len {
2017
- let ptr = self . buffer . as_ptr ( ) as usize + self . entry_size * self . index ;
2111
+ let descriptor = unsafe {
2112
+ & * self
2113
+ . buffer
2114
+ . as_ptr ( )
2115
+ . add ( self . entry_size * self . index )
2116
+ . cast :: < MemoryDescriptor > ( )
2117
+ } ;
2018
2118
2019
2119
self . index += 1 ;
2020
2120
2021
- let descriptor = unsafe { & * ( ptr as * const MemoryDescriptor ) } ;
2022
-
2023
2121
Some ( descriptor)
2024
2122
} else {
2025
2123
None
@@ -2197,3 +2295,94 @@ pub enum InterfaceType: i32 => {
2197
2295
#[ derive( Debug , Clone , Copy ) ]
2198
2296
#[ repr( transparent) ]
2199
2297
pub struct ProtocolSearchKey ( NonNull < c_void > ) ;
2298
+
2299
+ #[ cfg( test) ]
2300
+ mod tests {
2301
+ use core:: mem:: size_of;
2302
+
2303
+ use crate :: table:: boot:: { MemoryAttribute , MemoryMap , MemoryMapKey , MemoryType } ;
2304
+
2305
+ use super :: { MemoryDescriptor , MemoryMapIter } ;
2306
+
2307
+ #[ test]
2308
+ fn mem_map_sorting ( ) {
2309
+ // Doesn't matter what type it is.
2310
+ const TY : MemoryType = MemoryType :: RESERVED ;
2311
+
2312
+ const BASE : MemoryDescriptor = MemoryDescriptor {
2313
+ ty : TY ,
2314
+ phys_start : 0 ,
2315
+ virt_start : 0 ,
2316
+ page_count : 0 ,
2317
+ att : MemoryAttribute :: empty ( ) ,
2318
+ } ;
2319
+
2320
+ let mut buffer = [
2321
+ MemoryDescriptor {
2322
+ phys_start : 2000 ,
2323
+ ..BASE
2324
+ } ,
2325
+ MemoryDescriptor {
2326
+ phys_start : 3000 ,
2327
+ ..BASE
2328
+ } ,
2329
+ BASE ,
2330
+ MemoryDescriptor {
2331
+ phys_start : 1000 ,
2332
+ ..BASE
2333
+ } ,
2334
+ ] ;
2335
+
2336
+ let desc_count = buffer. len ( ) ;
2337
+
2338
+ let byte_buffer = {
2339
+ let size = desc_count * size_of :: < MemoryDescriptor > ( ) ;
2340
+ unsafe { core:: slice:: from_raw_parts_mut ( buffer. as_mut_ptr ( ) as * mut u8 , size) }
2341
+ } ;
2342
+
2343
+ let mut mem_map = MemoryMap {
2344
+ // Key doesn't matter
2345
+ key : MemoryMapKey ( 0 ) ,
2346
+ len : desc_count,
2347
+ buf : byte_buffer,
2348
+ entry_size : size_of :: < MemoryDescriptor > ( ) ,
2349
+ } ;
2350
+
2351
+ mem_map. sort ( ) ;
2352
+
2353
+ if !is_sorted ( & mem_map. entries ( ) ) {
2354
+ panic ! ( "mem_map is not sorted: {}" , mem_map) ;
2355
+ }
2356
+ }
2357
+
2358
+ // Added for debug purposes on test failure
2359
+ #[ cfg( test) ]
2360
+ impl core:: fmt:: Display for MemoryMap < ' _ > {
2361
+ fn fmt ( & self , f : & mut core:: fmt:: Formatter < ' _ > ) -> core:: fmt:: Result {
2362
+ writeln ! ( f) ?;
2363
+ for desc in self . entries ( ) {
2364
+ writeln ! ( f, "{:?}" , desc) ?;
2365
+ }
2366
+ Ok ( ( ) )
2367
+ }
2368
+ }
2369
+
2370
+ fn is_sorted ( iter : & MemoryMapIter ) -> bool {
2371
+ let mut iter = iter. clone ( ) ;
2372
+ let mut curr_start;
2373
+
2374
+ if let Some ( val) = iter. next ( ) {
2375
+ curr_start = val. phys_start ;
2376
+ } else {
2377
+ return true ;
2378
+ }
2379
+
2380
+ for desc in iter {
2381
+ if desc. phys_start <= curr_start {
2382
+ return false ;
2383
+ }
2384
+ curr_start = desc. phys_start
2385
+ }
2386
+ true
2387
+ }
2388
+ }
0 commit comments