Skip to content

Contracts & Harnesses for NonNull::read, NonNull::read_volatile, NonNull::read_unaligned #156

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
90 changes: 88 additions & 2 deletions library/core/src/ptr/non_null.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::pin::PinCoerceUnsized;
use crate::ptr::Unique;
use crate::slice::{self, SliceIndex};
use crate::ub_checks::assert_unsafe_precondition;
use crate::{fmt, hash, intrinsics, ptr};
use crate::{fmt, hash, intrinsics, ptr, ub_checks};
use safety::{ensures, requires};


Expand Down Expand Up @@ -898,12 +898,16 @@ impl<T: ?Sized> NonNull<T> {
/// memory in `self` unchanged.
///
/// See [`ptr::read`] for safety concerns and examples.
///
/// src must be valid for reads.
/// src must be properly aligned. Use read_unaligned if this is not the case.
/// src must point to a properly initialized value of type T.
/// [`ptr::read`]: crate::ptr::read()
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[stable(feature = "non_null_convenience", since = "1.80.0")]
#[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
#[requires(self.pointer.is_aligned() && ub_checks::can_dereference(self.pointer))]
#[ensures(|result| self.pointer.is_aligned())] // unsafe { *result == *self.pointer } can't be used due to unsized and lack of PartialEq implementation, moved to harness
pub const unsafe fn read(self) -> T
where
T: Sized,
Expand All @@ -925,6 +929,8 @@ impl<T: ?Sized> NonNull<T> {
#[inline]
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[stable(feature = "non_null_convenience", since = "1.80.0")]
#[requires(self.pointer.is_aligned() && ub_checks::can_dereference(self.pointer))]
#[ensures(|result| self.pointer.is_aligned())]
pub unsafe fn read_volatile(self) -> T
where
T: Sized,
Expand All @@ -945,6 +951,7 @@ impl<T: ?Sized> NonNull<T> {
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
#[stable(feature = "non_null_convenience", since = "1.80.0")]
#[rustc_const_stable(feature = "non_null_convenience", since = "1.80.0")]
#[requires(ub_checks::can_dereference(self.pointer))]
pub const unsafe fn read_unaligned(self) -> T
where
T: Sized,
Expand Down Expand Up @@ -1829,4 +1836,83 @@ mod verify {
let maybe_null_ptr = if kani::any() { xptr as *mut i32 } else { null_mut() };
let _ = NonNull::new(maybe_null_ptr);
}

// pub const unsafe fn read(self) -> T where T: Sized
#[kani::proof_for_contract(NonNull::read)]
pub fn non_null_check_read() {
let ptr_u8: *mut u8 = &mut kani::any();
let nonnull_ptr_u8 = NonNull::new(ptr_u8).unwrap();
unsafe {
let result = nonnull_ptr_u8.read();
kani::assert(*ptr_u8 == result, "read returns the correct value");
}

// array example
const arr_len: usize = 10;
let offset = kani::any_where(|x| * x < arr_len);
kani::assume(offset >= 0);
let arr: [i8; arr_len] = kani::any();
let raw_ptr: *mut i8 = arr.as_ptr() as *mut i8;
let nonnull_ptr = unsafe { NonNull::new(raw_ptr).unwrap()};
unsafe {
let result = nonnull_ptr.add(offset).read();
kani::assert( *nonnull_ptr.as_ptr().add(offset) == result, "read returns the correct value");
}
}

// pub unsafe fn read_volatile(self) -> T where T: Sized
#[kani::proof_for_contract(NonNull::read_volatile)]
pub fn non_null_check_read_volatile() {
let ptr_u8: *mut u8 = &mut kani::any();
let nonnull_ptr_u8 = NonNull::new(ptr_u8).unwrap();
unsafe {
let result = nonnull_ptr_u8.read_volatile();
kani::assert(*ptr_u8 == result, "read returns the correct value");
}
}

#[repr(packed, C)]
struct Packed {
_padding: u8,
unaligned: u32,
}

// pub const unsafe fn read_unaligned(self) -> T where T: Sized
#[kani::proof_for_contract(NonNull::read_unaligned)]
pub fn non_null_check_read_unaligned() {
const SIZE: usize = 100;
// cast a random offset of an u8 array to usize
let offset: usize = kani::any();
//const bound: usize = core::mem::size_of::<usize>();
kani::assume(offset >= 0 && offset < SIZE - core::mem::size_of::<usize>());
let arr: [u8; SIZE] = kani::any();
let unaligned_ptr = &arr[offset] as *const u8 as *const usize;
// create a NonNull pointer from the unaligned pointer
let nonnull_ptr = NonNull::new(unaligned_ptr as *mut usize).unwrap();
unsafe {
let result = nonnull_ptr.read_unaligned();

let raw_result_ptr = &result as *const usize as *const u8;
let raw_original_ptr = &arr[offset] as *const u8;
for i in 0..core::mem::size_of::<usize>() {
unsafe {
// Dereference and compare each byte
assert_eq!(*raw_original_ptr.add(i), *raw_result_ptr.add(i));
}

}
}

// read an unaligned value from a packed struct
let unaligned_value: u32 = kani::any();
let packed = Packed {
_padding: kani::any::<u8>(),
unaligned: unaligned_value,
};

let unaligned_ptr = ptr::addr_of!(packed.unaligned);
let nonnull_packed_ptr = NonNull::new(unaligned_ptr as *mut u32).unwrap();
let v = unsafe { nonnull_packed_ptr.read_unaligned() };
assert_eq!(v, unaligned_value);
}
}