From 0ccf81e6cc54a310c696c546aef4cc9e4c68a590 Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Thu, 14 Nov 2024 14:53:04 -0800 Subject: [PATCH 01/16] Adds contracts for byte_add, byte_sub and byte_offset - Also adds proofs verifying unit, int and composite types. --- library/core/src/ptr/mut_ptr.rs | 244 +++++++++++++++++++++++++++++++- 1 file changed, 237 insertions(+), 7 deletions(-) diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 7aa6a309a06b5..7be6f303f2754 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -3,6 +3,11 @@ use crate::cmp::Ordering::{Equal, Greater, Less}; use crate::intrinsics::const_eval_select; use crate::mem::SizedTypeProperties; use crate::slice::{self, SliceIndex}; +use safety::{ensures, requires}; + +#[cfg(kani)] +use crate::kani; +use core::mem; impl *mut T { /// Returns `true` if the pointer is null. @@ -94,7 +99,10 @@ impl *mut T { /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`. /// println!("{:?}", unsafe { &*bad }); #[unstable(feature = "set_ptr_value", issue = "75091")] - #[cfg_attr(bootstrap, rustc_const_stable(feature = "ptr_metadata_const", since = "1.83.0"))] + #[cfg_attr( + bootstrap, + rustc_const_stable(feature = "ptr_metadata_const", since = "1.83.0") + )] #[must_use = "returns a new pointer rather than modifying its argument"] #[inline] pub const fn with_metadata_of(self, meta: *const U) -> *mut U @@ -277,7 +285,11 @@ impl *mut T { pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> { // SAFETY: the caller must guarantee that `self` is valid for a // reference if it isn't null. - if self.is_null() { None } else { unsafe { Some(&*self) } } + if self.is_null() { + None + } else { + unsafe { Some(&*self) } + } } /// Returns a shared reference to the value behind the pointer. @@ -352,7 +364,11 @@ impl *mut T { { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. - if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit) }) } + if self.is_null() { + None + } else { + Some(unsafe { &*(self as *const MaybeUninit) }) + } } /// Adds a signed offset to a pointer. @@ -457,6 +473,22 @@ impl *mut T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[requires( + // If the size of the pointee is zero, then `count` must also be zero + (mem::size_of_val_raw(self) == 0 && count == 0) || + // If the size of the pointee is not zero, then ensure that adding `count` + // bytes doesn't cause overflow and that both pointers `self` and the result + // are pointing to the same address or in the same allocation + (mem::size_of_val_raw(self) != 0 && + (self as *mut u8 as isize).checked_add(count).is_some() && + ((self as *mut u8 as usize) == (self.wrapping_byte_offset(count) as *mut u8 as usize) || + kani::mem::same_allocation(self as *const T, self.wrapping_byte_offset(count) as *const T))) + )] + #[ensures(|result| + // The resulting pointer should either be unchanged or still point to the same allocation + ((self as *mut u8 as usize) == (*result as *mut u8 as usize)) || + (kani::mem::same_allocation(self as *const T, *result as *const T)) + )] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. unsafe { self.cast::().offset(count).with_metadata_of(self) } @@ -537,7 +569,9 @@ impl *mut T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] pub const fn wrapping_byte_offset(self, count: isize) -> Self { - self.cast::().wrapping_offset(count).with_metadata_of(self) + self.cast::() + .wrapping_offset(count) + .with_metadata_of(self) } /// Masks out bits of the pointer according to a mask. @@ -578,7 +612,9 @@ impl *mut T { #[must_use = "returns a new pointer rather than modifying its argument"] #[inline(always)] pub fn mask(self, mask: usize) -> *mut T { - intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self) + intrinsics::ptr_mask(self.cast::<()>(), mask) + .cast_mut() + .with_metadata_of(self) } /// Returns `None` if the pointer is null, or else returns a unique reference to @@ -628,7 +664,11 @@ impl *mut T { pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> { // SAFETY: the caller must guarantee that `self` is be valid for // a mutable reference if it isn't null. - if self.is_null() { None } else { unsafe { Some(&mut *self) } } + if self.is_null() { + None + } else { + unsafe { Some(&mut *self) } + } } /// Returns a unique reference to the value behind the pointer. @@ -689,7 +729,11 @@ impl *mut T { { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. - if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit) }) } + if self.is_null() { + None + } else { + Some(unsafe { &mut *(self as *mut MaybeUninit) }) + } } /// Returns whether two pointers are guaranteed to be equal. @@ -1052,6 +1096,22 @@ impl *mut T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[requires( + // If the size of the pointee is zero, then `count` must also be zero + (mem::size_of_val_raw(self) == 0 && count == 0) || + // If the size of the pointee is not zero, then ensure that adding `count` + // bytes doesn't cause overflow and that both pointers `self` and the result + // are pointing to the same address or in the same allocation + (mem::size_of_val_raw(self) != 0 && + (self as *mut u8 as isize).checked_add(count as isize).is_some() && + ((self as *mut u8 as usize) == (self.wrapping_byte_add(count) as *mut u8 as usize) || + kani::mem::same_allocation(self as *const T, self.wrapping_byte_add(count) as *const T))) + )] + #[ensures(|result| + // The resulting pointer should either be unchanged or still point to the same allocation + ((self as *mut u8 as usize) == (*result as *mut u8 as usize)) || + (kani::mem::same_allocation(self as *const T, *result as *const T)) + )] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. unsafe { self.cast::().add(count).with_metadata_of(self) } @@ -1167,6 +1227,22 @@ impl *mut T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[requires( + // If the size of the pointee is zero, then `count` must also be zero + (mem::size_of_val_raw(self) == 0 && count == 0) || + // If the size of the pointee is not zero then ensure that adding `count` + // bytes doesn't cause overflow and that both pointers `self` and the result + // would be pointing to the same address or in the same allocation + (mem::size_of_val_raw(self) != 0 && + (self as *mut u8 as isize).checked_sub(count as isize).is_some() && + ((self as *mut u8 as usize) == (self.wrapping_byte_sub(count) as *mut u8 as usize) || + kani::mem::same_allocation(self as *const T, self.wrapping_byte_sub(count) as *const T))) + )] + #[ensures(|result| + // The resulting pointer should either be unchanged or still point to the same allocation + ((self as *mut u8 as isize) == (*result as *mut u8 as isize)) || + (kani::mem::same_allocation(self as *const T, *result as *const T)) + )] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. unsafe { self.cast::().sub(count).with_metadata_of(self) } @@ -2302,3 +2378,157 @@ impl PartialOrd for *mut T { *self >= *other } } + +#[cfg(kani)] +#[unstable(feature = "kani", issue = "none")] +pub mod verify { + use crate::kani; + use core::mem; + use kani::PointerGenerator; + + // generate proof for contracts for byte_add, byte_sub and byte_offset for unint types + macro_rules! gen_mut_byte_arith_unit_harness { + (byte_offset, $proof_name:ident) => { + #[kani::proof_for_contract(<*mut ()>::byte_offset)] + pub fn $proof_name() { + let mut val = (); + let ptr: *mut () = &mut val as *mut (); + let count: isize = kani::any(); + unsafe { + ptr.byte_offset(count); + } + } + }; + + ($fn_name:ident, $proof_name:ident) => { + #[kani::proof_for_contract(<*mut ()>::$fn_name)] + pub fn $proof_name() { + let mut val = (); + let ptr: *mut () = &mut val as *mut (); + //byte_add and byte_sub need count to be usize unlike byte_offset + let count: usize = kani::any(); + unsafe { + ptr.$fn_name(count); + } + } + }; + } + + gen_mut_byte_arith_unit_harness!(byte_add, check_mut_byte_add_unit); + gen_mut_byte_arith_unit_harness!(byte_sub, check_mut_byte_sub_unit); + gen_mut_byte_arith_unit_harness!(byte_offset, check_mut_byte_offset_unit); + + const ARRAY_LEN: usize = 40; + + // generate proof for contracts for byte_add, byte_sub and byte_offset + macro_rules! gen_mut_byte_arith_harness { + ($type:ty, byte_offset, $proof_name:ident) => { + #[kani::proof_for_contract(<*mut $type>::byte_offset)] + pub fn $proof_name() { + // generator with space for single element + let mut generator1 = PointerGenerator::<{ mem::size_of::<$type>() }>::new(); + // generator with space for multiple elements + let mut generator2 = + PointerGenerator::<{ mem::size_of::<$type>() * ARRAY_LEN }>::new(); + + let ptr: *mut $type = if kani::any() { + generator1.any_in_bounds().ptr as *mut $type + } else { + generator2.any_in_bounds().ptr as *mut $type + }; + let count: isize = kani::any(); + + unsafe { + ptr.byte_offset(count); + } + } + }; + + ($type:ty, $fn_name:ident, $proof_name:ident) => { + #[kani::proof_for_contract(<*mut $type>::$fn_name)] + pub fn $proof_name() { + // generator with space for single element + let mut generator1 = PointerGenerator::<{ mem::size_of::<$type>() }>::new(); + // generator with space for multiple elements + let mut generator2 = + PointerGenerator::<{ mem::size_of::<$type>() * ARRAY_LEN }>::new(); + + let ptr: *mut $type = if kani::any() { + generator1.any_in_bounds().ptr as *mut $type + } else { + generator2.any_in_bounds().ptr as *mut $type + }; + + //byte_add and byte_sub need count to be usize unlike byte_offset + let count: usize = kani::any(); + + unsafe { + ptr.$fn_name(count); + } + } + }; + } + + gen_mut_byte_arith_harness!(i8, byte_add, check_mut_byte_add_i8); + gen_mut_byte_arith_harness!(i16, byte_add, check_mut_byte_add_i16); + gen_mut_byte_arith_harness!(i32, byte_add, check_mut_byte_add_i32); + gen_mut_byte_arith_harness!(i64, byte_add, check_mut_byte_add_i64); + gen_mut_byte_arith_harness!(i128, byte_add, check_mut_byte_add_i128); + gen_mut_byte_arith_harness!(isize, byte_add, check_mut_byte_add_isize); + gen_mut_byte_arith_harness!(u8, byte_add, check_mut_byte_add_u8); + gen_mut_byte_arith_harness!(u16, byte_add, check_mut_byte_add_u16); + gen_mut_byte_arith_harness!(u32, byte_add, check_mut_byte_add_u32); + gen_mut_byte_arith_harness!(u64, byte_add, check_mut_byte_add_u64); + gen_mut_byte_arith_harness!(u128, byte_add, check_mut_byte_add_u128); + gen_mut_byte_arith_harness!(usize, byte_add, check_mut_byte_add_usize); + gen_mut_byte_arith_harness!((i8, i8), byte_add, check_mut_byte_add_tuple_1); + gen_mut_byte_arith_harness!((f64, bool), byte_add, check_mut_byte_add_tuple_2); + gen_mut_byte_arith_harness!((i32, f64, bool), byte_add, check_mut_byte_add_tuple_3); + gen_mut_byte_arith_harness!( + (i8, u16, i32, u64, isize), + byte_add, + check_mut_byte_add_tuple_4 + ); + + gen_mut_byte_arith_harness!(i8, byte_sub, check_mut_byte_sub_i8); + gen_mut_byte_arith_harness!(i16, byte_sub, check_mut_byte_sub_i16); + gen_mut_byte_arith_harness!(i32, byte_sub, check_mut_byte_sub_i32); + gen_mut_byte_arith_harness!(i64, byte_sub, check_mut_byte_sub_i64); + gen_mut_byte_arith_harness!(i128, byte_sub, check_mut_byte_sub_i128); + gen_mut_byte_arith_harness!(isize, byte_sub, check_mut_byte_sub_isize); + gen_mut_byte_arith_harness!(u8, byte_sub, check_mut_byte_sub_u8); + gen_mut_byte_arith_harness!(u16, byte_sub, check_mut_byte_sub_u16); + gen_mut_byte_arith_harness!(u32, byte_sub, check_mut_byte_sub_u32); + gen_mut_byte_arith_harness!(u64, byte_sub, check_mut_byte_sub_u64); + gen_mut_byte_arith_harness!(u128, byte_sub, check_mut_byte_sub_u128); + gen_mut_byte_arith_harness!(usize, byte_sub, check_mut_byte_sub_usize); + gen_mut_byte_arith_harness!((i8, i8), byte_sub, check_mut_byte_sub_tuple_1); + gen_mut_byte_arith_harness!((f64, bool), byte_sub, check_mut_byte_sub_tuple_2); + gen_mut_byte_arith_harness!((i32, f64, bool), byte_sub, check_mut_byte_sub_tuple_3); + gen_mut_byte_arith_harness!( + (i8, u16, i32, u64, isize), + byte_sub, + check_mut_byte_sub_tuple_4 + ); + + gen_mut_byte_arith_harness!(i8, byte_offset, check_mut_byte_offset_i8); + gen_mut_byte_arith_harness!(i16, byte_offset, check_mut_byte_offset_i16); + gen_mut_byte_arith_harness!(i32, byte_offset, check_mut_byte_offset_i32); + gen_mut_byte_arith_harness!(i64, byte_offset, check_mut_byte_offset_i64); + gen_mut_byte_arith_harness!(i128, byte_offset, check_mut_byte_offset_i128); + gen_mut_byte_arith_harness!(isize, byte_offset, check_mut_byte_offset_isize); + gen_mut_byte_arith_harness!(u8, byte_offset, check_mut_byte_offset_u8); + gen_mut_byte_arith_harness!(u16, byte_offset, check_mut_byte_offset_u16); + gen_mut_byte_arith_harness!(u32, byte_offset, check_mut_byte_offset_u32); + gen_mut_byte_arith_harness!(u64, byte_offset, check_mut_byte_offset_u64); + gen_mut_byte_arith_harness!(u128, byte_offset, check_mut_byte_offset_u128); + gen_mut_byte_arith_harness!(usize, byte_offset, check_mut_byte_offset_usize); + gen_mut_byte_arith_harness!((i8, i8), byte_offset, check_mut_byte_offset_tuple_1); + gen_mut_byte_arith_harness!((f64, bool), byte_offset, check_mut_byte_offset_tuple_2); + gen_mut_byte_arith_harness!((i32, f64, bool), byte_offset, check_mut_byte_offset_tuple_3); + gen_mut_byte_arith_harness!( + (i8, u16, i32, u64, isize), + byte_offset, + check_mut_byte_offset_tuple_4 + ); +} From aec0394315e140eb95ce5bedd85f5b570af1492f Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Thu, 14 Nov 2024 19:59:09 -0800 Subject: [PATCH 02/16] Removes unnecessary rustfmt formnatting --- library/core/src/ptr/mut_ptr.rs | 37 +++++++-------------------------- 1 file changed, 7 insertions(+), 30 deletions(-) diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 7be6f303f2754..9474b680f7985 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -99,10 +99,7 @@ impl *mut T { /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`. /// println!("{:?}", unsafe { &*bad }); #[unstable(feature = "set_ptr_value", issue = "75091")] - #[cfg_attr( - bootstrap, - rustc_const_stable(feature = "ptr_metadata_const", since = "1.83.0") - )] + #[cfg_attr(bootstrap, rustc_const_stable(feature = "ptr_metadata_const", since = "1.83.0"))] #[must_use = "returns a new pointer rather than modifying its argument"] #[inline] pub const fn with_metadata_of(self, meta: *const U) -> *mut U @@ -285,11 +282,7 @@ impl *mut T { pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> { // SAFETY: the caller must guarantee that `self` is valid for a // reference if it isn't null. - if self.is_null() { - None - } else { - unsafe { Some(&*self) } - } + if self.is_null() { None } else { unsafe { Some(&*self) } } } /// Returns a shared reference to the value behind the pointer. @@ -364,11 +357,7 @@ impl *mut T { { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. - if self.is_null() { - None - } else { - Some(unsafe { &*(self as *const MaybeUninit) }) - } + if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit) }) } } /// Adds a signed offset to a pointer. @@ -569,9 +558,7 @@ impl *mut T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] pub const fn wrapping_byte_offset(self, count: isize) -> Self { - self.cast::() - .wrapping_offset(count) - .with_metadata_of(self) + self.cast::().wrapping_offset(count).with_metadata_of(self) } /// Masks out bits of the pointer according to a mask. @@ -612,9 +599,7 @@ impl *mut T { #[must_use = "returns a new pointer rather than modifying its argument"] #[inline(always)] pub fn mask(self, mask: usize) -> *mut T { - intrinsics::ptr_mask(self.cast::<()>(), mask) - .cast_mut() - .with_metadata_of(self) + intrinsics::ptr_mask(self.cast::<()>(), mask).cast_mut().with_metadata_of(self) } /// Returns `None` if the pointer is null, or else returns a unique reference to @@ -664,11 +649,7 @@ impl *mut T { pub const unsafe fn as_mut<'a>(self) -> Option<&'a mut T> { // SAFETY: the caller must guarantee that `self` is be valid for // a mutable reference if it isn't null. - if self.is_null() { - None - } else { - unsafe { Some(&mut *self) } - } + if self.is_null() { None } else { unsafe { Some(&mut *self) } } } /// Returns a unique reference to the value behind the pointer. @@ -729,11 +710,7 @@ impl *mut T { { // SAFETY: the caller must guarantee that `self` meets all the // requirements for a reference. - if self.is_null() { - None - } else { - Some(unsafe { &mut *(self as *mut MaybeUninit) }) - } + if self.is_null() { None } else { Some(unsafe { &mut *(self as *mut MaybeUninit) }) } } /// Returns whether two pointers are guaranteed to be equal. From 60ba93ec81f626b673b303b5dedaec42e4f5b47d Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Thu, 14 Nov 2024 20:01:09 -0800 Subject: [PATCH 03/16] Adds comment for amgic numbers --- library/core/src/ptr/mut_ptr.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 9474b680f7985..55bb607cba97e 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -2395,6 +2395,7 @@ pub mod verify { gen_mut_byte_arith_unit_harness!(byte_sub, check_mut_byte_sub_unit); gen_mut_byte_arith_unit_harness!(byte_offset, check_mut_byte_offset_unit); + // bounding space for PointerGenerator to accommodate 40 elements. const ARRAY_LEN: usize = 40; // generate proof for contracts for byte_add, byte_sub and byte_offset From 57c591f51d2dcb3d5c94d641c901c24c32c23463 Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Thu, 14 Nov 2024 20:20:10 -0800 Subject: [PATCH 04/16] Adds some comments --- library/core/src/ptr/mut_ptr.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 55bb607cba97e..1649570b512fb 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -2363,7 +2363,10 @@ pub mod verify { use core::mem; use kani::PointerGenerator; - // generate proof for contracts for byte_add, byte_sub and byte_offset for unint types + // generate proof for contracts of byte_add, byte_sub and byte_offset to verify + // unit pointee type + // - `$fn_name`: function for which the contract must be verified + // - `$proof_name`: name of the harness generated macro_rules! gen_mut_byte_arith_unit_harness { (byte_offset, $proof_name:ident) => { #[kani::proof_for_contract(<*mut ()>::byte_offset)] @@ -2399,6 +2402,9 @@ pub mod verify { const ARRAY_LEN: usize = 40; // generate proof for contracts for byte_add, byte_sub and byte_offset + // - `$type`: pointee type + // - `$fn_name`: function for which the contract must be verified + // - `$proof_name`: name of the harness generated macro_rules! gen_mut_byte_arith_harness { ($type:ty, byte_offset, $proof_name:ident) => { #[kani::proof_for_contract(<*mut $type>::byte_offset)] From a02ed87ccabe595dfcbc9de4d291d63b74af7d59 Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Fri, 15 Nov 2024 15:02:33 -0800 Subject: [PATCH 05/16] Adds slice harnesses --- library/core/src/ptr/mut_ptr.rs | 80 +++++++++++++++++++++++++++++++-- 1 file changed, 76 insertions(+), 4 deletions(-) diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 1649570b512fb..c1bbfbdffa363 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -2367,7 +2367,7 @@ pub mod verify { // unit pointee type // - `$fn_name`: function for which the contract must be verified // - `$proof_name`: name of the harness generated - macro_rules! gen_mut_byte_arith_unit_harness { + macro_rules! gen_mut_byte_arith_harness_for_unit { (byte_offset, $proof_name:ident) => { #[kani::proof_for_contract(<*mut ()>::byte_offset)] pub fn $proof_name() { @@ -2394,9 +2394,9 @@ pub mod verify { }; } - gen_mut_byte_arith_unit_harness!(byte_add, check_mut_byte_add_unit); - gen_mut_byte_arith_unit_harness!(byte_sub, check_mut_byte_sub_unit); - gen_mut_byte_arith_unit_harness!(byte_offset, check_mut_byte_offset_unit); + gen_mut_byte_arith_harness_for_unit!(byte_add, check_mut_byte_add_unit); + gen_mut_byte_arith_harness_for_unit!(byte_sub, check_mut_byte_sub_unit); + gen_mut_byte_arith_harness_for_unit!(byte_offset, check_mut_byte_offset_unit); // bounding space for PointerGenerator to accommodate 40 elements. const ARRAY_LEN: usize = 40; @@ -2515,4 +2515,76 @@ pub mod verify { byte_offset, check_mut_byte_offset_tuple_4 ); + + macro_rules! gen_mut_byte_arith_harness_for_slice { + ($type:ty, byte_offset, $proof_name:ident) => { + #[kani::proof_for_contract(<*mut [$type]>::byte_offset)] + pub fn $proof_name() { + let mut arr: [$type; ARRAY_LEN] = kani::Arbitrary::any_array(); + let slice: &mut [$type] = kani::slice::any_slice_of_array_mut(&mut arr); + let ptr: *mut [$type] = slice as *mut [$type]; + + let count: isize = kani::any(); + + unsafe { + ptr.byte_offset(count); + } + } + }; + + ($type:ty, $fn_name: ident, $proof_name:ident) => { + #[kani::proof_for_contract(<*mut [$type]>::$fn_name)] + pub fn $proof_name() { + let mut arr: [$type; ARRAY_LEN] = kani::Arbitrary::any_array(); + let slice: &mut [$type] = kani::slice::any_slice_of_array_mut(&mut arr); + let ptr: *mut [$type] = slice as *mut [$type]; + + //byte_add and byte_sub need count to be usize unlike byte_offset + let count: usize = kani::any(); + + unsafe { + ptr.$fn_name(count); + } + } + }; + } + + gen_mut_byte_arith_harness_for_slice!(i8, byte_add, check_mut_byte_add_i8_slice); + gen_mut_byte_arith_harness_for_slice!(i16, byte_add, check_mut_byte_add_i16_slice); + gen_mut_byte_arith_harness_for_slice!(i32, byte_add, check_mut_byte_add_i32_slice); + gen_mut_byte_arith_harness_for_slice!(i64, byte_add, check_mut_byte_add_i64_slice); + gen_mut_byte_arith_harness_for_slice!(i128, byte_add, check_mut_byte_add_i128_slice); + gen_mut_byte_arith_harness_for_slice!(isize, byte_add, check_mut_byte_add_isize_slice); + gen_mut_byte_arith_harness_for_slice!(u8, byte_add, check_mut_byte_add_u8_slice); + gen_mut_byte_arith_harness_for_slice!(u16, byte_add, check_mut_byte_add_u16_slice); + gen_mut_byte_arith_harness_for_slice!(u32, byte_add, check_mut_byte_add_u32_slice); + gen_mut_byte_arith_harness_for_slice!(u64, byte_add, check_mut_byte_add_u64_slice); + gen_mut_byte_arith_harness_for_slice!(u128, byte_add, check_mut_byte_add_u128_slice); + gen_mut_byte_arith_harness_for_slice!(usize, byte_add, check_mut_byte_add_usize_slice); + + gen_mut_byte_arith_harness_for_slice!(i8, byte_sub, check_mut_byte_sub_i8_slice); + gen_mut_byte_arith_harness_for_slice!(i16, byte_sub, check_mut_byte_sub_i16_slice); + gen_mut_byte_arith_harness_for_slice!(i32, byte_sub, check_mut_byte_sub_i32_slice); + gen_mut_byte_arith_harness_for_slice!(i64, byte_sub, check_mut_byte_sub_i64_slice); + gen_mut_byte_arith_harness_for_slice!(i128, byte_sub, check_mut_byte_sub_i128_slice); + gen_mut_byte_arith_harness_for_slice!(isize, byte_sub, check_mut_byte_sub_isize_slice); + gen_mut_byte_arith_harness_for_slice!(u8, byte_sub, check_mut_byte_sub_u8_slice); + gen_mut_byte_arith_harness_for_slice!(u16, byte_sub, check_mut_byte_sub_u16_slice); + gen_mut_byte_arith_harness_for_slice!(u32, byte_sub, check_mut_byte_sub_u32_slice); + gen_mut_byte_arith_harness_for_slice!(u64, byte_sub, check_mut_byte_sub_u64_slice); + gen_mut_byte_arith_harness_for_slice!(u128, byte_sub, check_mut_byte_sub_u128_slice); + gen_mut_byte_arith_harness_for_slice!(usize, byte_sub, check_mut_byte_sub_usize_slice); + + gen_mut_byte_arith_harness_for_slice!(i8, byte_offset, check_mut_byte_offset_i8_slice); + gen_mut_byte_arith_harness_for_slice!(i16, byte_offset, check_mut_byte_offset_i16_slice); + gen_mut_byte_arith_harness_for_slice!(i32, byte_offset, check_mut_byte_offset_i32_slice); + gen_mut_byte_arith_harness_for_slice!(i64, byte_offset, check_mut_byte_offset_i64_slice); + gen_mut_byte_arith_harness_for_slice!(i128, byte_offset, check_mut_byte_offset_i128_slice); + gen_mut_byte_arith_harness_for_slice!(isize, byte_offset, check_mut_byte_offset_isize_slice); + gen_mut_byte_arith_harness_for_slice!(u8, byte_offset, check_mut_byte_offset_u8_slice); + gen_mut_byte_arith_harness_for_slice!(u16, byte_offset, check_mut_byte_offset_u16_slice); + gen_mut_byte_arith_harness_for_slice!(u32, byte_offset, check_mut_byte_offset_u32_slice); + gen_mut_byte_arith_harness_for_slice!(u64, byte_offset, check_mut_byte_offset_u64_slice); + gen_mut_byte_arith_harness_for_slice!(u128, byte_offset, check_mut_byte_offset_u128_slice); + gen_mut_byte_arith_harness_for_slice!(usize, byte_offset, check_mut_byte_offset_usize_slice); } From 2fdebd6dcfdf1513ba9c54b132bb860817786207 Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Sat, 16 Nov 2024 01:35:11 -0800 Subject: [PATCH 06/16] Adds contracts for byte_offset, byte_add and byte_sub - Adds harnesses for int, unit, composite and slice types --- library/core/src/ptr/const_ptr.rs | 288 ++++++++++++++++++++++++++++++ 1 file changed, 288 insertions(+) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 57a7c0fc0925c..a501e6113683d 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -3,6 +3,11 @@ use crate::cmp::Ordering::{Equal, Greater, Less}; use crate::intrinsics::const_eval_select; use crate::mem::SizedTypeProperties; use crate::slice::{self, SliceIndex}; +use safety::{ensures, requires}; + +#[cfg(kani)] +use crate::kani; +use core::mem; impl *const T { /// Returns `true` if the pointer is null. @@ -459,6 +464,22 @@ impl *const T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[requires( + // If the size of the pointee is zero, then `count` must also be zero + (mem::size_of_val_raw(self) == 0 && count == 0) || + // If the size of the pointee is not zero, then ensure that adding `count` + // bytes doesn't cause overflow and that both pointers `self` and the result + // are pointing to the same address or in the same allocation + (mem::size_of_val_raw(self) != 0 && + (self as *mut u8 as isize).checked_add(count).is_some() && + ((self as *mut u8 as usize) == (self.wrapping_byte_offset(count) as *mut u8 as usize) || + kani::mem::same_allocation(self as *const T, self.wrapping_byte_offset(count) as *const T))) + )] + #[ensures(|result| + // The resulting pointer should either be unchanged or still point to the same allocation + ((self as *mut u8 as usize) == (*result as *mut u8 as usize)) || + (kani::mem::same_allocation(self as *const T, *result as *const T)) + )] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. unsafe { self.cast::().offset(count).with_metadata_of(self) } @@ -972,6 +993,22 @@ impl *const T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[requires( + // If the size of the pointee is zero, then `count` must also be zero + (mem::size_of_val_raw(self) == 0 && count == 0) || + // If the size of the pointee is not zero, then ensure that adding `count` + // bytes doesn't cause overflow and that both pointers `self` and the result + // are pointing to the same address or in the same allocation + (mem::size_of_val_raw(self) != 0 && + (self as *mut u8 as isize).checked_add(count as isize).is_some() && + ((self as *mut u8 as usize) == (self.wrapping_byte_add(count) as *mut u8 as usize) || + kani::mem::same_allocation(self as *const T, self.wrapping_byte_add(count) as *const T))) + )] + #[ensures(|result| + // The resulting pointer should either be unchanged or still point to the same allocation + ((self as *mut u8 as usize) == (*result as *mut u8 as usize)) || + (kani::mem::same_allocation(self as *const T, *result as *const T)) + )] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. unsafe { self.cast::().add(count).with_metadata_of(self) } @@ -1087,6 +1124,22 @@ impl *const T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[requires( + // If the size of the pointee is zero, then `count` must also be zero + (mem::size_of_val_raw(self) == 0 && count == 0) || + // If the size of the pointee is not zero then ensure that adding `count` + // bytes doesn't cause overflow and that both pointers `self` and the result + // would be pointing to the same address or in the same allocation + (mem::size_of_val_raw(self) != 0 && + (self as *mut u8 as isize).checked_sub(count as isize).is_some() && + ((self as *mut u8 as usize) == (self.wrapping_byte_sub(count) as *mut u8 as usize) || + kani::mem::same_allocation(self as *const T, self.wrapping_byte_sub(count) as *const T))) + )] + #[ensures(|result| + // The resulting pointer should either be unchanged or still point to the same allocation + ((self as *mut u8 as isize) == (*result as *mut u8 as isize)) || + (kani::mem::same_allocation(self as *const T, *result as *const T)) + )] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. unsafe { self.cast::().sub(count).with_metadata_of(self) } @@ -1899,3 +1952,238 @@ impl PartialOrd for *const T { *self >= *other } } + +#[cfg(kani)] +#[unstable(feature = "kani", issue = "none")] +pub mod verify { + use crate::kani; + use core::mem; + use kani::PointerGenerator; + + // generate proof for contracts of byte_add, byte_sub and byte_offset to verify + // unit pointee type + // - `$fn_name`: function for which the contract must be verified + // - `$proof_name`: name of the harness generated + macro_rules! gen_const_byte_arith_harness_for_unit { + (byte_offset, $proof_name:ident) => { + #[kani::proof_for_contract(<*const ()>::byte_offset)] + pub fn $proof_name() { + let val = (); + let ptr: *const () = &val; + let count: isize = kani::any(); + unsafe { + ptr.byte_offset(count); + } + } + }; + + ($fn_name:ident, $proof_name:ident) => { + #[kani::proof_for_contract(<*const ()>::$fn_name)] + pub fn $proof_name() { + let val = (); + let ptr: *const () = &val; + //byte_add and byte_sub need count to be usize unlike byte_offset + let count: usize = kani::any(); + unsafe { + ptr.$fn_name(count); + } + } + }; + } + + gen_const_byte_arith_harness_for_unit!(byte_add, check_const_byte_add_unit); + gen_const_byte_arith_harness_for_unit!(byte_sub, check_const_byte_sub_unit); + gen_const_byte_arith_harness_for_unit!(byte_offset, check_const_byte_offset_unit); + + // bounding space for PointerGenerator to accommodate 40 elements. + const ARRAY_LEN: usize = 40; + + + // generate proof for contracts for byte_add, byte_sub and byte_offset + // - `$type`: pointee type + // - `$fn_name`: function for which the contract must be verified + // - `$proof_name`: name of the harness generated + macro_rules! gen_const_byte_arith_harness { + ($type:ty, byte_offset, $proof_name:ident) => { + #[kani::proof_for_contract(<*const $type>::byte_offset)] + pub fn $proof_name() { + // generator with space for single element + let mut generator1 = PointerGenerator::<{ mem::size_of::<$type>() }>::new(); + // generator with space for multiple elements + let mut generator2 = + PointerGenerator::<{ mem::size_of::<$type>() * ARRAY_LEN }>::new(); + + let ptr: *const $type = if kani::any() { + generator1.any_in_bounds().ptr + } else { + generator2.any_in_bounds().ptr + }; + + let count: isize = kani::any(); + + unsafe { + ptr.byte_offset(count); + } + } + }; + + ($type:ty, $fn_name:ident, $proof_name:ident) => { + #[kani::proof_for_contract(<*const $type>::$fn_name)] + pub fn $proof_name() { + // generator with space for single element + let mut generator1 = PointerGenerator::<{ mem::size_of::<$type>() }>::new(); + // generator with space for multiple elements + let mut generator2 = + PointerGenerator::<{ mem::size_of::<$type>() * ARRAY_LEN }>::new(); + + let ptr: *const $type = if kani::any() { + generator1.any_in_bounds().ptr + } else { + generator2.any_in_bounds().ptr + }; + + //byte_add and byte_sub need count to be usize unlike byte_offset + let count: usize = kani::any(); + + unsafe { + ptr.$fn_name(count); + } + } + }; + } + + gen_const_byte_arith_harness!(i8, byte_add, check_const_byte_add_i8); + gen_const_byte_arith_harness!(i16, byte_add, check_const_byte_add_i16); + gen_const_byte_arith_harness!(i32, byte_add, check_const_byte_add_i32); + gen_const_byte_arith_harness!(i64, byte_add, check_const_byte_add_i64); + gen_const_byte_arith_harness!(i128, byte_add, check_const_byte_add_i128); + gen_const_byte_arith_harness!(isize, byte_add, check_const_byte_add_isize); + gen_const_byte_arith_harness!(u8, byte_add, check_const_byte_add_u8); + gen_const_byte_arith_harness!(u16, byte_add, check_const_byte_add_u16); + gen_const_byte_arith_harness!(u32, byte_add, check_const_byte_add_u32); + gen_const_byte_arith_harness!(u64, byte_add, check_const_byte_add_u64); + gen_const_byte_arith_harness!(u128, byte_add, check_const_byte_add_u128); + gen_const_byte_arith_harness!(usize, byte_add, check_const_byte_add_usize); + gen_const_byte_arith_harness!((i8, i8), byte_add, check_const_byte_add_tuple_1); + gen_const_byte_arith_harness!((f64, bool), byte_add, check_const_byte_add_tuple_2); + gen_const_byte_arith_harness!((i32, f64, bool), byte_add, check_const_byte_add_tuple_3); + gen_const_byte_arith_harness!( + (i8, u16, i32, u64, isize), + byte_add, + check_const_byte_add_tuple_4 + ); + + gen_const_byte_arith_harness!(i8, byte_sub, check_const_byte_sub_i8); + gen_const_byte_arith_harness!(i16, byte_sub, check_const_byte_sub_i16); + gen_const_byte_arith_harness!(i32, byte_sub, check_const_byte_sub_i32); + gen_const_byte_arith_harness!(i64, byte_sub, check_const_byte_sub_i64); + gen_const_byte_arith_harness!(i128, byte_sub, check_const_byte_sub_i128); + gen_const_byte_arith_harness!(isize, byte_sub, check_const_byte_sub_isize); + gen_const_byte_arith_harness!(u8, byte_sub, check_const_byte_sub_u8); + gen_const_byte_arith_harness!(u16, byte_sub, check_const_byte_sub_u16); + gen_const_byte_arith_harness!(u32, byte_sub, check_const_byte_sub_u32); + gen_const_byte_arith_harness!(u64, byte_sub, check_const_byte_sub_u64); + gen_const_byte_arith_harness!(u128, byte_sub, check_const_byte_sub_u128); + gen_const_byte_arith_harness!(usize, byte_sub, check_const_byte_sub_usize); + gen_const_byte_arith_harness!((i8, i8), byte_sub, check_const_byte_sub_tuple_1); + gen_const_byte_arith_harness!((f64, bool), byte_sub, check_const_byte_sub_tuple_2); + gen_const_byte_arith_harness!((i32, f64, bool), byte_sub, check_const_byte_sub_tuple_3); + gen_const_byte_arith_harness!( + (i8, u16, i32, u64, isize), + byte_sub, + check_const_byte_sub_tuple_4 + ); + + gen_const_byte_arith_harness!(i8, byte_offset, check_const_byte_offset_i8); + gen_const_byte_arith_harness!(i16, byte_offset, check_const_byte_offset_i16); + gen_const_byte_arith_harness!(i32, byte_offset, check_const_byte_offset_i32); + gen_const_byte_arith_harness!(i64, byte_offset, check_const_byte_offset_i64); + gen_const_byte_arith_harness!(i128, byte_offset, check_const_byte_offset_i128); + gen_const_byte_arith_harness!(isize, byte_offset, check_const_byte_offset_isize); + gen_const_byte_arith_harness!(u8, byte_offset, check_const_byte_offset_u8); + gen_const_byte_arith_harness!(u16, byte_offset, check_const_byte_offset_u16); + gen_const_byte_arith_harness!(u32, byte_offset, check_const_byte_offset_u32); + gen_const_byte_arith_harness!(u64, byte_offset, check_const_byte_offset_u64); + gen_const_byte_arith_harness!(u128, byte_offset, check_const_byte_offset_u128); + gen_const_byte_arith_harness!(usize, byte_offset, check_const_byte_offset_usize); + gen_const_byte_arith_harness!((i8, i8), byte_offset, check_const_byte_offset_tuple_1); + gen_const_byte_arith_harness!((f64, bool), byte_offset, check_const_byte_offset_tuple_2); + gen_const_byte_arith_harness!((i32, f64, bool), byte_offset, check_const_byte_offset_tuple_3); + gen_const_byte_arith_harness!( + (i8, u16, i32, u64, isize), + byte_offset, + check_const_byte_offset_tuple_4 + ); + + macro_rules! gen_const_byte_arith_harness_for_slice { + ($type:ty, byte_offset, $proof_name:ident) => { + #[kani::proof_for_contract(<*const [$type]>::byte_offset)] + pub fn $proof_name() { + let arr: [$type; ARRAY_LEN] = kani::Arbitrary::any_array(); + let slice: &[$type] = kani::slice::any_slice_of_array(&arr); + let ptr: *const [$type] = slice; + + let count: isize = kani::any(); + + unsafe { + ptr.byte_offset(count); + } + } + }; + + ($type:ty, $fn_name: ident, $proof_name:ident) => { + #[kani::proof_for_contract(<*const [$type]>::$fn_name)] + pub fn $proof_name() { + let arr: [$type; ARRAY_LEN] = kani::Arbitrary::any_array(); + let slice: &[$type] = kani::slice::any_slice_of_array(&arr); + let ptr: *const [$type] = slice; + + //byte_add and byte_sub need count to be usize unlike byte_offset + let count: usize = kani::any(); + + unsafe { + ptr.$fn_name(count); + } + } + }; + } + + gen_const_byte_arith_harness_for_slice!(i8, byte_add, check_const_byte_add_i8_slice); + gen_const_byte_arith_harness_for_slice!(i16, byte_add, check_const_byte_add_i16_slice); + gen_const_byte_arith_harness_for_slice!(i32, byte_add, check_const_byte_add_i32_slice); + gen_const_byte_arith_harness_for_slice!(i64, byte_add, check_const_byte_add_i64_slice); + gen_const_byte_arith_harness_for_slice!(i128, byte_add, check_const_byte_add_i128_slice); + gen_const_byte_arith_harness_for_slice!(isize, byte_add, check_const_byte_add_isize_slice); + gen_const_byte_arith_harness_for_slice!(u8, byte_add, check_const_byte_add_u8_slice); + gen_const_byte_arith_harness_for_slice!(u16, byte_add, check_const_byte_add_u16_slice); + gen_const_byte_arith_harness_for_slice!(u32, byte_add, check_const_byte_add_u32_slice); + gen_const_byte_arith_harness_for_slice!(u64, byte_add, check_const_byte_add_u64_slice); + gen_const_byte_arith_harness_for_slice!(u128, byte_add, check_const_byte_add_u128_slice); + gen_const_byte_arith_harness_for_slice!(usize, byte_add, check_const_byte_add_usize_slice); + + gen_const_byte_arith_harness_for_slice!(i8, byte_sub, check_const_byte_sub_i8_slice); + gen_const_byte_arith_harness_for_slice!(i16, byte_sub, check_const_byte_sub_i16_slice); + gen_const_byte_arith_harness_for_slice!(i32, byte_sub, check_const_byte_sub_i32_slice); + gen_const_byte_arith_harness_for_slice!(i64, byte_sub, check_const_byte_sub_i64_slice); + gen_const_byte_arith_harness_for_slice!(i128, byte_sub, check_const_byte_sub_i128_slice); + gen_const_byte_arith_harness_for_slice!(isize, byte_sub, check_const_byte_sub_isize_slice); + gen_const_byte_arith_harness_for_slice!(u8, byte_sub, check_const_byte_sub_u8_slice); + gen_const_byte_arith_harness_for_slice!(u16, byte_sub, check_const_byte_sub_u16_slice); + gen_const_byte_arith_harness_for_slice!(u32, byte_sub, check_const_byte_sub_u32_slice); + gen_const_byte_arith_harness_for_slice!(u64, byte_sub, check_const_byte_sub_u64_slice); + gen_const_byte_arith_harness_for_slice!(u128, byte_sub, check_const_byte_sub_u128_slice); + gen_const_byte_arith_harness_for_slice!(usize, byte_sub, check_const_byte_sub_usize_slice); + + gen_const_byte_arith_harness_for_slice!(i8, byte_offset, check_const_byte_offset_i8_slice); + gen_const_byte_arith_harness_for_slice!(i16, byte_offset, check_const_byte_offset_i16_slice); + gen_const_byte_arith_harness_for_slice!(i32, byte_offset, check_const_byte_offset_i32_slice); + gen_const_byte_arith_harness_for_slice!(i64, byte_offset, check_const_byte_offset_i64_slice); + gen_const_byte_arith_harness_for_slice!(i128, byte_offset, check_const_byte_offset_i128_slice); + gen_const_byte_arith_harness_for_slice!(isize, byte_offset, check_const_byte_offset_isize_slice); + gen_const_byte_arith_harness_for_slice!(u8, byte_offset, check_const_byte_offset_u8_slice); + gen_const_byte_arith_harness_for_slice!(u16, byte_offset, check_const_byte_offset_u16_slice); + gen_const_byte_arith_harness_for_slice!(u32, byte_offset, check_const_byte_offset_u32_slice); + gen_const_byte_arith_harness_for_slice!(u64, byte_offset, check_const_byte_offset_u64_slice); + gen_const_byte_arith_harness_for_slice!(u128, byte_offset, check_const_byte_offset_u128_slice); + gen_const_byte_arith_harness_for_slice!(usize, byte_offset, check_const_byte_offset_usize_slice); +} From cd0a13429c46e1037a913295bd7f4bd3215f24dc Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Sat, 16 Nov 2024 14:27:34 -0800 Subject: [PATCH 07/16] Runs rustfmt --- library/core/src/ptr/const_ptr.rs | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index a501e6113683d..08ff2f331b5cf 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -1998,7 +1998,6 @@ pub mod verify { // bounding space for PointerGenerator to accommodate 40 elements. const ARRAY_LEN: usize = 40; - // generate proof for contracts for byte_add, byte_sub and byte_offset // - `$type`: pointee type // - `$fn_name`: function for which the contract must be verified @@ -2108,7 +2107,11 @@ pub mod verify { gen_const_byte_arith_harness!(usize, byte_offset, check_const_byte_offset_usize); gen_const_byte_arith_harness!((i8, i8), byte_offset, check_const_byte_offset_tuple_1); gen_const_byte_arith_harness!((f64, bool), byte_offset, check_const_byte_offset_tuple_2); - gen_const_byte_arith_harness!((i32, f64, bool), byte_offset, check_const_byte_offset_tuple_3); + gen_const_byte_arith_harness!( + (i32, f64, bool), + byte_offset, + check_const_byte_offset_tuple_3 + ); gen_const_byte_arith_harness!( (i8, u16, i32, u64, isize), byte_offset, @@ -2179,11 +2182,19 @@ pub mod verify { gen_const_byte_arith_harness_for_slice!(i32, byte_offset, check_const_byte_offset_i32_slice); gen_const_byte_arith_harness_for_slice!(i64, byte_offset, check_const_byte_offset_i64_slice); gen_const_byte_arith_harness_for_slice!(i128, byte_offset, check_const_byte_offset_i128_slice); - gen_const_byte_arith_harness_for_slice!(isize, byte_offset, check_const_byte_offset_isize_slice); + gen_const_byte_arith_harness_for_slice!( + isize, + byte_offset, + check_const_byte_offset_isize_slice + ); gen_const_byte_arith_harness_for_slice!(u8, byte_offset, check_const_byte_offset_u8_slice); gen_const_byte_arith_harness_for_slice!(u16, byte_offset, check_const_byte_offset_u16_slice); gen_const_byte_arith_harness_for_slice!(u32, byte_offset, check_const_byte_offset_u32_slice); gen_const_byte_arith_harness_for_slice!(u64, byte_offset, check_const_byte_offset_u64_slice); gen_const_byte_arith_harness_for_slice!(u128, byte_offset, check_const_byte_offset_u128_slice); - gen_const_byte_arith_harness_for_slice!(usize, byte_offset, check_const_byte_offset_usize_slice); + gen_const_byte_arith_harness_for_slice!( + usize, + byte_offset, + check_const_byte_offset_usize_slice + ); } From 40eb29d1061fdc801b4e1b7ca974162a66f73cab Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Wed, 20 Nov 2024 23:55:15 -0800 Subject: [PATCH 08/16] Some refactoring --- library/core/src/ptr/mut_ptr.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index c1bbfbdffa363..6f610cb03ab14 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -2372,7 +2372,7 @@ pub mod verify { #[kani::proof_for_contract(<*mut ()>::byte_offset)] pub fn $proof_name() { let mut val = (); - let ptr: *mut () = &mut val as *mut (); + let ptr: *mut () = &mut val; let count: isize = kani::any(); unsafe { ptr.byte_offset(count); @@ -2384,7 +2384,7 @@ pub mod verify { #[kani::proof_for_contract(<*mut ()>::$fn_name)] pub fn $proof_name() { let mut val = (); - let ptr: *mut () = &mut val as *mut (); + let ptr: *mut () = &mut val; //byte_add and byte_sub need count to be usize unlike byte_offset let count: usize = kani::any(); unsafe { @@ -2416,9 +2416,9 @@ pub mod verify { PointerGenerator::<{ mem::size_of::<$type>() * ARRAY_LEN }>::new(); let ptr: *mut $type = if kani::any() { - generator1.any_in_bounds().ptr as *mut $type + generator1.any_in_bounds().ptr } else { - generator2.any_in_bounds().ptr as *mut $type + generator2.any_in_bounds().ptr }; let count: isize = kani::any(); @@ -2438,9 +2438,9 @@ pub mod verify { PointerGenerator::<{ mem::size_of::<$type>() * ARRAY_LEN }>::new(); let ptr: *mut $type = if kani::any() { - generator1.any_in_bounds().ptr as *mut $type + generator1.any_in_bounds().ptr } else { - generator2.any_in_bounds().ptr as *mut $type + generator2.any_in_bounds().ptr }; //byte_add and byte_sub need count to be usize unlike byte_offset @@ -2522,7 +2522,7 @@ pub mod verify { pub fn $proof_name() { let mut arr: [$type; ARRAY_LEN] = kani::Arbitrary::any_array(); let slice: &mut [$type] = kani::slice::any_slice_of_array_mut(&mut arr); - let ptr: *mut [$type] = slice as *mut [$type]; + let ptr: *mut [$type] = slice; let count: isize = kani::any(); @@ -2537,7 +2537,7 @@ pub mod verify { pub fn $proof_name() { let mut arr: [$type; ARRAY_LEN] = kani::Arbitrary::any_array(); let slice: &mut [$type] = kani::slice::any_slice_of_array_mut(&mut arr); - let ptr: *mut [$type] = slice as *mut [$type]; + let ptr: *mut [$type] = slice; //byte_add and byte_sub need count to be usize unlike byte_offset let count: usize = kani::any(); From 81fe7c9ae7396f775490f37359e39357f4bdb33a Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Thu, 21 Nov 2024 00:38:38 -0800 Subject: [PATCH 09/16] Refactors the function contracts --- library/core/src/ptr/const_ptr.rs | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 08ff2f331b5cf..db5b08e59be90 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -471,14 +471,14 @@ impl *const T { // bytes doesn't cause overflow and that both pointers `self` and the result // are pointing to the same address or in the same allocation (mem::size_of_val_raw(self) != 0 && - (self as *mut u8 as isize).checked_add(count).is_some() && - ((self as *mut u8 as usize) == (self.wrapping_byte_offset(count) as *mut u8 as usize) || - kani::mem::same_allocation(self as *const T, self.wrapping_byte_offset(count) as *const T))) + (self as *const u8 as isize).checked_add(count).is_some() && + ((self as *const u8 as usize) == (self.wrapping_byte_offset(count) as *const u8 as usize) || + kani::mem::same_allocation(self, self.wrapping_byte_offset(count)))) )] #[ensures(|result| // The resulting pointer should either be unchanged or still point to the same allocation - ((self as *mut u8 as usize) == (*result as *mut u8 as usize)) || - (kani::mem::same_allocation(self as *const T, *result as *const T)) + ((self as *const u8 as usize) == (*result as *const u8 as usize)) || + (kani::mem::same_allocation(self, *result)) )] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. @@ -1000,14 +1000,14 @@ impl *const T { // bytes doesn't cause overflow and that both pointers `self` and the result // are pointing to the same address or in the same allocation (mem::size_of_val_raw(self) != 0 && - (self as *mut u8 as isize).checked_add(count as isize).is_some() && - ((self as *mut u8 as usize) == (self.wrapping_byte_add(count) as *mut u8 as usize) || - kani::mem::same_allocation(self as *const T, self.wrapping_byte_add(count) as *const T))) + (self as *const u8 as isize).checked_add(count as isize).is_some() && + ((self as *const u8 as usize) == (self.wrapping_byte_add(count) as *const u8 as usize) || + kani::mem::same_allocation(self, self.wrapping_byte_add(count)))) )] #[ensures(|result| // The resulting pointer should either be unchanged or still point to the same allocation - ((self as *mut u8 as usize) == (*result as *mut u8 as usize)) || - (kani::mem::same_allocation(self as *const T, *result as *const T)) + ((self as *const u8 as usize) == (*result as *const u8 as usize)) || + (kani::mem::same_allocation(self, *result)) )] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. @@ -1131,14 +1131,14 @@ impl *const T { // bytes doesn't cause overflow and that both pointers `self` and the result // would be pointing to the same address or in the same allocation (mem::size_of_val_raw(self) != 0 && - (self as *mut u8 as isize).checked_sub(count as isize).is_some() && - ((self as *mut u8 as usize) == (self.wrapping_byte_sub(count) as *mut u8 as usize) || - kani::mem::same_allocation(self as *const T, self.wrapping_byte_sub(count) as *const T))) + (self as *const u8 as isize).checked_sub(count as isize).is_some() && + ((self as *const u8 as usize) == (self.wrapping_byte_sub(count) as *const u8 as usize) || + kani::mem::same_allocation(self, self.wrapping_byte_sub(count)))) )] #[ensures(|result| // The resulting pointer should either be unchanged or still point to the same allocation - ((self as *mut u8 as isize) == (*result as *mut u8 as isize)) || - (kani::mem::same_allocation(self as *const T, *result as *const T)) + ((self as *const u8 as isize) == (*result as *const u8 as isize)) || + (kani::mem::same_allocation(self, *result)) )] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. From 5e4f6162bc1ea7a4ea6be4b042b5eaf1e051155d Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Mon, 25 Nov 2024 23:00:39 -0800 Subject: [PATCH 10/16] Addresses commennts on PR --- library/core/src/ptr/const_ptr.rs | 58 ++++++++++++++----------------- library/core/src/ptr/mut_ptr.rs | 53 +++++++++++++--------------- 2 files changed, 51 insertions(+), 60 deletions(-) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index bbd6ee6d52fc5..85a3f81ba50d8 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -465,19 +465,18 @@ impl *const T { #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( - // If the size of the pointee is zero, then `count` must also be zero - (mem::size_of_val_raw(self) == 0 && count == 0) || - // If the size of the pointee is not zero, then ensure that adding `count` - // bytes doesn't cause overflow and that both pointers `self` and the result - // are pointing to the same address or in the same allocation + // If count is zero, any pointer is valid including null pointer. + (count == 0) || + // Else if count is not zero, then ensure that adding `count` doesn't cause + // overflow and that both pointers `self` and the result are in the same + // allocation (mem::size_of_val_raw(self) != 0 && - (self as *const u8 as isize).checked_add(count).is_some() && - ((self as *const u8 as usize) == (self.wrapping_byte_offset(count) as *const u8 as usize) || - kani::mem::same_allocation(self, self.wrapping_byte_offset(count)))) + (self.addr() as isize).checked_add(count).is_some() && + kani::mem::same_allocation(self, self.wrapping_byte_offset(count))) )] #[ensures(|result| // The resulting pointer should either be unchanged or still point to the same allocation - ((self as *const u8 as usize) == (*result as *const u8 as usize)) || + (self.addr() == (*result).addr()) || (kani::mem::same_allocation(self, *result)) )] pub const unsafe fn byte_offset(self, count: isize) -> Self { @@ -1004,19 +1003,18 @@ impl *const T { #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( - // If the size of the pointee is zero, then `count` must also be zero - (mem::size_of_val_raw(self) == 0 && count == 0) || - // If the size of the pointee is not zero, then ensure that adding `count` - // bytes doesn't cause overflow and that both pointers `self` and the result - // are pointing to the same address or in the same allocation + // If count is zero, any pointer is valid including null pointer. + (count == 0) || + // Else if count is not zero, then ensure that adding `count` doesn't cause + // overflow and that both pointers `self` and the result are in the same + // allocation (mem::size_of_val_raw(self) != 0 && - (self as *const u8 as isize).checked_add(count as isize).is_some() && - ((self as *const u8 as usize) == (self.wrapping_byte_add(count) as *const u8 as usize) || - kani::mem::same_allocation(self, self.wrapping_byte_add(count)))) + (self.addr() as isize).checked_add(count as isize).is_some() && + kani::mem::same_allocation(self, self.wrapping_byte_add(count))) )] #[ensures(|result| // The resulting pointer should either be unchanged or still point to the same allocation - ((self as *const u8 as usize) == (*result as *const u8 as usize)) || + (self.addr() == (*result).addr()) || (kani::mem::same_allocation(self, *result)) )] pub const unsafe fn byte_add(self, count: usize) -> Self { @@ -1135,19 +1133,18 @@ impl *const T { #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( - // If the size of the pointee is zero, then `count` must also be zero - (mem::size_of_val_raw(self) == 0 && count == 0) || - // If the size of the pointee is not zero then ensure that adding `count` - // bytes doesn't cause overflow and that both pointers `self` and the result - // would be pointing to the same address or in the same allocation + // If count is zero, any pointer is valid including null pointer. + (count == 0) || + // Else if count is not zero, then ensure that subtracting `count` doesn't + // cause overflow and that both pointers `self` and the result are in the + // same allocation (mem::size_of_val_raw(self) != 0 && - (self as *const u8 as isize).checked_sub(count as isize).is_some() && - ((self as *const u8 as usize) == (self.wrapping_byte_sub(count) as *const u8 as usize) || - kani::mem::same_allocation(self, self.wrapping_byte_sub(count)))) + (self.addr() as isize).checked_sub(count as isize).is_some() && + kani::mem::same_allocation(self, self.wrapping_byte_sub(count))) )] #[ensures(|result| - // The resulting pointer should either be unchanged or still point to the same allocation - ((self as *const u8 as isize) == (*result as *const u8 as isize)) || + // The resulting pointer should either be unchanged or still point to the same allocation + (self.addr() == (*result).addr()) || (kani::mem::same_allocation(self, *result)) )] pub const unsafe fn byte_sub(self, count: usize) -> Self { @@ -1982,7 +1979,7 @@ mod verify { } } - // Array size bound for kani::any_array + // bounding space for PointerGenerator to accommodate 40 elements. const ARRAY_LEN: usize = 40; macro_rules! generate_offset_from_harness { @@ -2143,9 +2140,6 @@ mod verify { gen_const_byte_arith_harness_for_unit!(byte_sub, check_const_byte_sub_unit); gen_const_byte_arith_harness_for_unit!(byte_offset, check_const_byte_offset_unit); - // bounding space for PointerGenerator to accommodate 40 elements. - const ARRAY_LEN: usize = 40; - // generate proof for contracts for byte_add, byte_sub and byte_offset // - `$type`: pointee type // - `$fn_name`: function for which the contract must be verified diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 6f610cb03ab14..5ae20d7d9db59 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -463,19 +463,18 @@ impl *mut T { #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( - // If the size of the pointee is zero, then `count` must also be zero - (mem::size_of_val_raw(self) == 0 && count == 0) || - // If the size of the pointee is not zero, then ensure that adding `count` - // bytes doesn't cause overflow and that both pointers `self` and the result - // are pointing to the same address or in the same allocation + // If count is zero, any pointer is valid including null pointer. + (count == 0) || + // Else if count is not zero, then ensure that subtracting `count` doesn't + // cause overflow and that both pointers `self` and the result are in the + // same allocation (mem::size_of_val_raw(self) != 0 && - (self as *mut u8 as isize).checked_add(count).is_some() && - ((self as *mut u8 as usize) == (self.wrapping_byte_offset(count) as *mut u8 as usize) || - kani::mem::same_allocation(self as *const T, self.wrapping_byte_offset(count) as *const T))) + (self.addr() as isize).checked_add(count).is_some() && + kani::mem::same_allocation(self as *const T, self.wrapping_byte_offset(count) as *const T)) )] #[ensures(|result| // The resulting pointer should either be unchanged or still point to the same allocation - ((self as *mut u8 as usize) == (*result as *mut u8 as usize)) || + (self.addr() == (*result).addr()) || (kani::mem::same_allocation(self as *const T, *result as *const T)) )] pub const unsafe fn byte_offset(self, count: isize) -> Self { @@ -1074,19 +1073,18 @@ impl *mut T { #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( - // If the size of the pointee is zero, then `count` must also be zero - (mem::size_of_val_raw(self) == 0 && count == 0) || - // If the size of the pointee is not zero, then ensure that adding `count` - // bytes doesn't cause overflow and that both pointers `self` and the result - // are pointing to the same address or in the same allocation + // If count is zero, any pointer is valid including null pointer. + (count == 0) || + // Else if count is not zero, then ensure that subtracting `count` doesn't + // cause overflow and that both pointers `self` and the result are in the + // same allocation (mem::size_of_val_raw(self) != 0 && - (self as *mut u8 as isize).checked_add(count as isize).is_some() && - ((self as *mut u8 as usize) == (self.wrapping_byte_add(count) as *mut u8 as usize) || - kani::mem::same_allocation(self as *const T, self.wrapping_byte_add(count) as *const T))) + (self.addr() as isize).checked_add(count as isize).is_some() && + kani::mem::same_allocation(self as *const T, self.wrapping_byte_add(count) as *const T)) )] #[ensures(|result| // The resulting pointer should either be unchanged or still point to the same allocation - ((self as *mut u8 as usize) == (*result as *mut u8 as usize)) || + (self.addr() == (*result).addr()) || (kani::mem::same_allocation(self as *const T, *result as *const T)) )] pub const unsafe fn byte_add(self, count: usize) -> Self { @@ -1205,19 +1203,18 @@ impl *mut T { #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( - // If the size of the pointee is zero, then `count` must also be zero - (mem::size_of_val_raw(self) == 0 && count == 0) || - // If the size of the pointee is not zero then ensure that adding `count` - // bytes doesn't cause overflow and that both pointers `self` and the result - // would be pointing to the same address or in the same allocation + // If count is zero, any pointer is valid including null pointer. + (count == 0) || + // Else if count is not zero, then ensure that subtracting `count` doesn't + // cause overflow and that both pointers `self` and the result are in the + // same allocation (mem::size_of_val_raw(self) != 0 && - (self as *mut u8 as isize).checked_sub(count as isize).is_some() && - ((self as *mut u8 as usize) == (self.wrapping_byte_sub(count) as *mut u8 as usize) || - kani::mem::same_allocation(self as *const T, self.wrapping_byte_sub(count) as *const T))) + (self.addr() as isize).checked_sub(count as isize).is_some() && + kani::mem::same_allocation(self as *const T, self.wrapping_byte_sub(count) as *const T)) )] #[ensures(|result| - // The resulting pointer should either be unchanged or still point to the same allocation - ((self as *mut u8 as isize) == (*result as *mut u8 as isize)) || + // The resulting pointer should either be unchanged or still point to the same allocation + (self.addr() == (*result).addr()) || (kani::mem::same_allocation(self as *const T, *result as *const T)) )] pub const unsafe fn byte_sub(self, count: usize) -> Self { From 534ce591fc6ebbd71c5a31e9366cae577fce005d Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Fri, 29 Nov 2024 15:36:31 -0800 Subject: [PATCH 11/16] Refactors the contracts to avoid repeated dereferencing --- library/core/src/ptr/const_ptr.rs | 18 +++++++++--------- library/core/src/ptr/mut_ptr.rs | 24 ++++++++++++------------ 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 85a3f81ba50d8..a359b8da74135 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -474,10 +474,10 @@ impl *const T { (self.addr() as isize).checked_add(count).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_offset(count))) )] - #[ensures(|result| + #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation - (self.addr() == (*result).addr()) || - (kani::mem::same_allocation(self, *result)) + (self.addr() == result.addr()) || + (kani::mem::same_allocation(self, result)) )] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. @@ -1012,10 +1012,10 @@ impl *const T { (self.addr() as isize).checked_add(count as isize).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_add(count))) )] - #[ensures(|result| + #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation - (self.addr() == (*result).addr()) || - (kani::mem::same_allocation(self, *result)) + (self.addr() == result.addr()) || + (kani::mem::same_allocation(self, result)) )] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. @@ -1142,10 +1142,10 @@ impl *const T { (self.addr() as isize).checked_sub(count as isize).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_sub(count))) )] - #[ensures(|result| + #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation - (self.addr() == (*result).addr()) || - (kani::mem::same_allocation(self, *result)) + (self.addr() == result.addr()) || + (kani::mem::same_allocation(self, result)) )] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 4f586e7835a26..1493abd92d0da 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -486,12 +486,12 @@ impl *mut T { // same allocation (mem::size_of_val_raw(self) != 0 && (self.addr() as isize).checked_add(count).is_some() && - kani::mem::same_allocation(self as *const T, self.wrapping_byte_offset(count) as *const T)) + kani::mem::same_allocation(self, self.wrapping_byte_offset(count))) )] - #[ensures(|result| + #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation - (self.addr() == (*result).addr()) || - (kani::mem::same_allocation(self as *const T, *result as *const T)) + (self.addr() == result.addr()) || + (kani::mem::same_allocation(self, result)) )] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. @@ -1113,12 +1113,12 @@ impl *mut T { // same allocation (mem::size_of_val_raw(self) != 0 && (self.addr() as isize).checked_add(count as isize).is_some() && - kani::mem::same_allocation(self as *const T, self.wrapping_byte_add(count) as *const T)) + kani::mem::same_allocation(self, self.wrapping_byte_add(count))) )] - #[ensures(|result| + #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation - (self.addr() == (*result).addr()) || - (kani::mem::same_allocation(self as *const T, *result as *const T)) + (self.addr() == result.addr()) || + (kani::mem::same_allocation(self, result)) )] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. @@ -1260,12 +1260,12 @@ impl *mut T { // same allocation (mem::size_of_val_raw(self) != 0 && (self.addr() as isize).checked_sub(count as isize).is_some() && - kani::mem::same_allocation(self as *const T, self.wrapping_byte_sub(count) as *const T)) + kani::mem::same_allocation(self, self.wrapping_byte_sub(count))) )] - #[ensures(|result| + #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation - (self.addr() == (*result).addr()) || - (kani::mem::same_allocation(self as *const T, *result as *const T)) + (self.addr() == result.addr()) || + (kani::mem::same_allocation(self, result)) )] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. From 5f2125c286d05834798833973fbc72ee909f6fd8 Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Mon, 2 Dec 2024 01:36:01 -0800 Subject: [PATCH 12/16] Refactors the contracts and add harnesses to test invalid count / casting to unit type --- library/core/src/ptr/const_ptr.rs | 31 ++++++++++++++++++----- library/core/src/ptr/mut_ptr.rs | 41 ++++++++++++++++++++++--------- 2 files changed, 55 insertions(+), 17 deletions(-) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index a359b8da74135..de8866d8003e7 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -470,8 +470,7 @@ impl *const T { // Else if count is not zero, then ensure that adding `count` doesn't cause // overflow and that both pointers `self` and the result are in the same // allocation - (mem::size_of_val_raw(self) != 0 && - (self.addr() as isize).checked_add(count).is_some() && + ((self.addr() as isize).checked_add(count).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_offset(count))) )] #[ensures(|&result| @@ -1008,8 +1007,7 @@ impl *const T { // Else if count is not zero, then ensure that adding `count` doesn't cause // overflow and that both pointers `self` and the result are in the same // allocation - (mem::size_of_val_raw(self) != 0 && - (self.addr() as isize).checked_add(count as isize).is_some() && + ((self.addr() as isize).checked_add(count as isize).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_add(count))) )] #[ensures(|&result| @@ -1138,8 +1136,7 @@ impl *const T { // Else if count is not zero, then ensure that subtracting `count` doesn't // cause overflow and that both pointers `self` and the result are in the // same allocation - (mem::size_of_val_raw(self) != 0 && - (self.addr() as isize).checked_sub(count as isize).is_some() && + ((self.addr() as isize).checked_sub(count as isize).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_sub(count))) )] #[ensures(|&result| @@ -2105,6 +2102,28 @@ mod verify { check_const_offset_from_tuple_4_arr ); + #[kani::proof_for_contract(<*const ()>::byte_offset)] + #[kani::should_panic] + pub fn check_const_byte_offset_unit_invalid_count() { + let val = (); + let ptr: *const () = &val; + let count: isize = kani::any_where(|&x| x != (mem::size_of::<()>() as isize)); + unsafe { + ptr.byte_offset(count); + } + } + + #[kani::proof_for_contract(<*const ()>::byte_offset)] + pub fn check_const_byte_offset_cast_unit() { + let mut generator = PointerGenerator::::new(); + let ptr: *const u8 = generator.any_in_bounds().ptr; + let ptr1: *const () = ptr as *const (); + let count: isize = kani::any(); + unsafe { + ptr1.byte_offset(count); + } + } + // generate proof for contracts of byte_add, byte_sub and byte_offset to verify // unit pointee type // - `$fn_name`: function for which the contract must be verified diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 1493abd92d0da..6d722e84b9462 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -484,8 +484,7 @@ impl *mut T { // Else if count is not zero, then ensure that subtracting `count` doesn't // cause overflow and that both pointers `self` and the result are in the // same allocation - (mem::size_of_val_raw(self) != 0 && - (self.addr() as isize).checked_add(count).is_some() && + ((self.addr() as isize).checked_add(count).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_offset(count))) )] #[ensures(|&result| @@ -1111,8 +1110,7 @@ impl *mut T { // Else if count is not zero, then ensure that subtracting `count` doesn't // cause overflow and that both pointers `self` and the result are in the // same allocation - (mem::size_of_val_raw(self) != 0 && - (self.addr() as isize).checked_add(count as isize).is_some() && + ((self.addr() as isize).checked_add(count as isize).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_add(count))) )] #[ensures(|&result| @@ -1258,8 +1256,7 @@ impl *mut T { // Else if count is not zero, then ensure that subtracting `count` doesn't // cause overflow and that both pointers `self` and the result are in the // same allocation - (mem::size_of_val_raw(self) != 0 && - (self.addr() as isize).checked_sub(count as isize).is_some() && + ((self.addr() as isize).checked_sub(count as isize).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_sub(count))) )] #[ensures(|&result| @@ -2410,6 +2407,31 @@ mod verify { use core::mem; use kani::PointerGenerator; + // bounding space for PointerGenerator to accommodate 40 elements. + const ARRAY_LEN: usize = 40; + + #[kani::proof_for_contract(<*mut ()>::byte_offset)] + #[kani::should_panic] + pub fn check_mut_byte_offset_unit_invalid_count() { + let mut val = (); + let ptr: *mut () = &mut val; + let count: isize = kani::any_where(|&x| x > (mem::size_of::<()>() as isize)); + unsafe { + ptr.byte_offset(count); + } + } + + #[kani::proof_for_contract(<*mut ()>::byte_offset)] + pub fn check_mut_byte_offset_cast_unit() { + let mut generator = PointerGenerator::::new(); + let ptr: *mut u8 = generator.any_in_bounds().ptr; + let ptr1: *mut () = ptr as *mut (); + let count: isize = kani::any(); + unsafe { + ptr1.byte_offset(count); + } + } + // generate proof for contracts of byte_add, byte_sub and byte_offset to verify // unit pointee type // - `$fn_name`: function for which the contract must be verified @@ -2420,7 +2442,7 @@ mod verify { pub fn $proof_name() { let mut val = (); let ptr: *mut () = &mut val; - let count: isize = kani::any(); + let count: isize = mem::size_of::<()>() as isize; unsafe { ptr.byte_offset(count); } @@ -2433,7 +2455,7 @@ mod verify { let mut val = (); let ptr: *mut () = &mut val; //byte_add and byte_sub need count to be usize unlike byte_offset - let count: usize = kani::any(); + let count: usize = mem::size_of::<()>(); unsafe { ptr.$fn_name(count); } @@ -2445,9 +2467,6 @@ mod verify { gen_mut_byte_arith_harness_for_unit!(byte_sub, check_mut_byte_sub_unit); gen_mut_byte_arith_harness_for_unit!(byte_offset, check_mut_byte_offset_unit); - // bounding space for PointerGenerator to accommodate 40 elements. - const ARRAY_LEN: usize = 40; - // generate proof for contracts for byte_add, byte_sub and byte_offset // - `$type`: pointee type // - `$fn_name`: function for which the contract must be verified From 41f23a1b899fd8bfa5d68c2265e5e60c2789b1d7 Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Mon, 2 Dec 2024 01:45:15 -0800 Subject: [PATCH 13/16] Fixes a bug in the unit type proofs --- library/core/src/ptr/const_ptr.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index de8866d8003e7..40579bd4c81b0 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -2134,7 +2134,7 @@ mod verify { pub fn $proof_name() { let val = (); let ptr: *const () = &val; - let count: isize = kani::any(); + let count: isize = mem::size_of::<()>() as isize; unsafe { ptr.byte_offset(count); } @@ -2147,7 +2147,7 @@ mod verify { let val = (); let ptr: *const () = &val; //byte_add and byte_sub need count to be usize unlike byte_offset - let count: usize = kani::any(); + let count: usize = mem::size_of::<()>(); unsafe { ptr.$fn_name(count); } From 32e8398d2858e9525ea73d2a6eb0011ae60980be Mon Sep 17 00:00:00 2001 From: Yifei Wang <1277495324@qq.com> Date: Wed, 4 Dec 2024 15:09:00 -0800 Subject: [PATCH 14/16] Formatted code using rustfmt. --- library/core/src/ptr/const_ptr.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 3b4748adc4203..1a401d4d386a4 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -1029,11 +1029,11 @@ impl *const T { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( // If count is zero, any pointer is valid including null pointer. - (count == 0) || + (count == 0) || // Else if count is not zero, then ensure that adding `count` doesn't cause // overflow and that both pointers `self` and the result are in the same // allocation - ((self.addr() as isize).checked_add(count as isize).is_some() && + ((self.addr() as isize).checked_add(count as isize).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_add(count))) )] #[ensures(|&result| @@ -1173,11 +1173,11 @@ impl *const T { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( // If count is zero, any pointer is valid including null pointer. - (count == 0) || + (count == 0) || // Else if count is not zero, then ensure that subtracting `count` doesn't // cause overflow and that both pointers `self` and the result are in the // same allocation - ((self.addr() as isize).checked_sub(count as isize).is_some() && + ((self.addr() as isize).checked_sub(count as isize).is_some() && kani::mem::same_allocation(self, self.wrapping_byte_sub(count))) )] #[ensures(|&result| From eab2c6599011738e2130616949b78769cc6195d2 Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Wed, 4 Dec 2024 23:19:09 -0800 Subject: [PATCH 15/16] Refactors contracts to be tool agnostic --- library/core/src/ptr/const_ptr.rs | 12 ++++++------ library/core/src/ptr/mut_ptr.rs | 24 ++++++++++++------------ 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 1a401d4d386a4..229d97e3cf48d 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -482,12 +482,12 @@ impl *const T { // overflow and that both pointers `self` and the result are in the same // allocation ((self.addr() as isize).checked_add(count).is_some() && - kani::mem::same_allocation(self, self.wrapping_byte_offset(count))) + core::ub_checks::same_allocation(self, self.wrapping_byte_offset(count))) )] #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation (self.addr() == result.addr()) || - (kani::mem::same_allocation(self, result)) + (core::ub_checks::same_allocation(self, result)) )] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. @@ -1034,12 +1034,12 @@ impl *const T { // overflow and that both pointers `self` and the result are in the same // allocation ((self.addr() as isize).checked_add(count as isize).is_some() && - kani::mem::same_allocation(self, self.wrapping_byte_add(count))) + core::ub_checks::same_allocation(self, self.wrapping_byte_add(count))) )] #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation (self.addr() == result.addr()) || - (kani::mem::same_allocation(self, result)) + (core::ub_checks::same_allocation(self, result)) )] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. @@ -1178,12 +1178,12 @@ impl *const T { // cause overflow and that both pointers `self` and the result are in the // same allocation ((self.addr() as isize).checked_sub(count as isize).is_some() && - kani::mem::same_allocation(self, self.wrapping_byte_sub(count))) + core::ub_checks::same_allocation(self, self.wrapping_byte_sub(count))) )] #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation (self.addr() == result.addr()) || - (kani::mem::same_allocation(self, result)) + (core::ub_checks::same_allocation(self, result)) )] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 106c0f19a144f..1ede380d0aeab 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -477,17 +477,17 @@ impl *mut T { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( // If count is zero, any pointer is valid including null pointer. - (count == 0) || + (count == 0) || // Else if count is not zero, then ensure that subtracting `count` doesn't // cause overflow and that both pointers `self` and the result are in the // same allocation - ((self.addr() as isize).checked_add(count).is_some() && - kani::mem::same_allocation(self, self.wrapping_byte_offset(count))) + ((self.addr() as isize).checked_add(count).is_some() && + core::ub_checks::same_allocation(self, self.wrapping_byte_offset(count))) )] #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation (self.addr() == result.addr()) || - (kani::mem::same_allocation(self, result)) + (core::ub_checks::same_allocation(self, result)) )] pub const unsafe fn byte_offset(self, count: isize) -> Self { // SAFETY: the caller must uphold the safety contract for `offset`. @@ -1102,17 +1102,17 @@ impl *mut T { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( // If count is zero, any pointer is valid including null pointer. - (count == 0) || + (count == 0) || // Else if count is not zero, then ensure that subtracting `count` doesn't // cause overflow and that both pointers `self` and the result are in the // same allocation - ((self.addr() as isize).checked_add(count as isize).is_some() && - kani::mem::same_allocation(self, self.wrapping_byte_add(count))) + ((self.addr() as isize).checked_add(count as isize).is_some() && + core::ub_checks::same_allocation(self, self.wrapping_byte_add(count))) )] #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation (self.addr() == result.addr()) || - (kani::mem::same_allocation(self, result)) + (core::ub_checks::same_allocation(self, result)) )] pub const unsafe fn byte_add(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `add`. @@ -1248,17 +1248,17 @@ impl *mut T { #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[requires( // If count is zero, any pointer is valid including null pointer. - (count == 0) || + (count == 0) || // Else if count is not zero, then ensure that subtracting `count` doesn't // cause overflow and that both pointers `self` and the result are in the // same allocation - ((self.addr() as isize).checked_sub(count as isize).is_some() && - kani::mem::same_allocation(self, self.wrapping_byte_sub(count))) + ((self.addr() as isize).checked_sub(count as isize).is_some() && + core::ub_checks::same_allocation(self, self.wrapping_byte_sub(count))) )] #[ensures(|&result| // The resulting pointer should either be unchanged or still point to the same allocation (self.addr() == result.addr()) || - (kani::mem::same_allocation(self, result)) + (core::ub_checks::same_allocation(self, result)) )] pub const unsafe fn byte_sub(self, count: usize) -> Self { // SAFETY: the caller must uphold the safety contract for `sub`. From 0779d9224b5c00c762932f5eaee4c2d64970fbee Mon Sep 17 00:00:00 2001 From: Surya Togaru Date: Wed, 4 Dec 2024 23:26:01 -0800 Subject: [PATCH 16/16] Adds a commetn for a magic number --- library/core/src/ptr/const_ptr.rs | 4 +++- library/core/src/ptr/mut_ptr.rs | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index 229d97e3cf48d..a210821f08d10 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -2111,7 +2111,9 @@ mod verify { check_const_offset_slice_tuple_4 ); - // bounding space for PointerGenerator to accommodate 40 elements. + // The array's length is set to an arbitrary value, which defines its size. + // In this case, implementing a dynamic array is not possible, because + // PointerGenerator does not support dynamic sized arrays. const ARRAY_LEN: usize = 40; macro_rules! generate_offset_from_harness { diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index 1ede380d0aeab..aea2df07f19cb 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -2223,7 +2223,9 @@ mod verify { use core::mem; use kani::PointerGenerator; - // bounding space for PointerGenerator to accommodate 40 elements. + // The array's length is set to an arbitrary value, which defines its size. + // In this case, implementing a dynamic array is not possible, because + // PointerGenerator does not support dynamic sized arrays. const ARRAY_LEN: usize = 40; #[kani::proof_for_contract(<*mut ()>::byte_offset)]