@@ -526,6 +526,11 @@ impl<T: ?Sized> NonNull<T> {
526
526
#[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
527
527
#[ stable( feature = "non_null_convenience" , since = "1.80.0" ) ]
528
528
#[ rustc_const_stable( feature = "non_null_convenience" , since = "1.80.0" ) ]
529
+ #[ requires(
530
+ ( self . as_ptr( ) . addr( ) as isize ) . checked_add( count) . is_some( ) &&
531
+ kani:: mem:: same_allocation( self . as_ptr( ) , self . as_ptr( ) . wrapping_byte_offset( count) )
532
+ ) ]
533
+ #[ ensures( |result: & Self | result. as_ptr( ) == self . as_ptr( ) . wrapping_byte_offset( count) ) ]
529
534
pub const unsafe fn byte_offset ( self , count : isize ) -> Self {
530
535
// SAFETY: the caller must uphold the safety contract for `offset` and `byte_offset` has
531
536
// the same safety contract.
@@ -607,6 +612,10 @@ impl<T: ?Sized> NonNull<T> {
607
612
#[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
608
613
#[ stable( feature = "non_null_convenience" , since = "1.80.0" ) ]
609
614
#[ rustc_const_stable( feature = "non_null_convenience" , since = "1.80.0" ) ]
615
+ #[ requires(
616
+ count <= ( isize :: MAX as usize ) - self . as_ptr( ) . addr( ) && // Ensure the offset doesn't overflow
617
+ ( count == 0 || kani:: mem:: same_allocation( self . as_ptr( ) , self . as_ptr( ) . wrapping_byte_add( count) ) ) // Ensure the offset is within the same allocation
618
+ ) ]
610
619
pub const unsafe fn byte_add ( self , count : usize ) -> Self {
611
620
// SAFETY: the caller must uphold the safety contract for `add` and `byte_add` has the same
612
621
// safety contract.
@@ -820,6 +829,14 @@ impl<T: ?Sized> NonNull<T> {
820
829
#[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
821
830
#[ stable( feature = "non_null_convenience" , since = "1.80.0" ) ]
822
831
#[ rustc_const_stable( feature = "non_null_convenience" , since = "1.80.0" ) ]
832
+ #[ requires(
833
+ self . as_ptr( ) . addr( ) == origin. as_ptr( ) . addr( ) ||
834
+ kani:: mem:: same_allocation( self . as_ptr( ) as * const ( ) , origin. as_ptr( ) as * const ( ) )
835
+ ) ]
836
+ #[ ensures(
837
+ |result: & isize |
838
+ * result == ( self . as_ptr( ) as * const u8 ) . offset_from( origin. as_ptr( ) as * const u8 )
839
+ ) ]
823
840
pub const unsafe fn byte_offset_from < U : ?Sized > ( self , origin : NonNull < U > ) -> isize {
824
841
// SAFETY: the caller must uphold the safety contract for `byte_offset_from`.
825
842
unsafe { self . pointer . byte_offset_from ( origin. pointer ) }
@@ -1736,6 +1753,7 @@ impl<T: ?Sized> From<&T> for NonNull<T> {
1736
1753
mod verify {
1737
1754
use super :: * ;
1738
1755
use crate :: ptr:: null_mut;
1756
+ use crate :: mem;
1739
1757
use kani:: PointerGenerator ;
1740
1758
1741
1759
trait SampleTrait {
@@ -2345,4 +2363,72 @@ mod verify {
2345
2363
generate_write_volatile_harness ! ( usize , { core:: mem:: size_of:: <usize >( ) } , non_null_check_write_volatile_usize) ;
2346
2364
generate_write_volatile_harness ! ( ( ) , 1 , non_null_check_write_volatile_unit) ;
2347
2365
2366
+ #[ kani:: proof_for_contract( NonNull :: byte_add) ]
2367
+ pub fn non_null_byte_add_proof ( ) {
2368
+ // Make size as 1000 to ensure the array is large enough to cover various senarios
2369
+ // while maintaining a reasonable proof runtime
2370
+ const ARR_SIZE : usize = mem:: size_of :: < i32 > ( ) * 1000 ;
2371
+ let mut generator = PointerGenerator :: < ARR_SIZE > :: new ( ) ;
2372
+
2373
+ let count: usize = kani:: any ( ) ;
2374
+ let raw_ptr: * mut i32 = generator. any_in_bounds ( ) . ptr as * mut i32 ;
2375
+
2376
+ unsafe {
2377
+ let ptr = NonNull :: new ( raw_ptr) . unwrap ( ) ;
2378
+ let result = ptr. byte_add ( count) ;
2379
+ }
2380
+ }
2381
+
2382
+ #[ kani:: proof_for_contract( NonNull :: byte_add) ]
2383
+ pub fn non_null_byte_add_dangling_proof ( ) {
2384
+ let ptr = NonNull :: < i32 > :: dangling ( ) ;
2385
+ unsafe {
2386
+ let _ = ptr. byte_add ( 0 ) ;
2387
+ }
2388
+ }
2389
+
2390
+ #[ kani:: proof_for_contract( NonNull :: byte_offset) ]
2391
+ pub fn non_null_byte_offset_proof ( ) {
2392
+ const ARR_SIZE : usize = mem:: size_of :: < i32 > ( ) * 1000 ;
2393
+ let mut generator = PointerGenerator :: < ARR_SIZE > :: new ( ) ;
2394
+
2395
+ let count: isize = kani:: any ( ) ;
2396
+ let raw_ptr: * mut i32 = generator. any_in_bounds ( ) . ptr as * mut i32 ;
2397
+
2398
+ unsafe {
2399
+ let ptr = NonNull :: new ( raw_ptr) . unwrap ( ) ;
2400
+ let result = ptr. byte_offset ( count) ;
2401
+ }
2402
+ }
2403
+
2404
+ #[ kani:: proof_for_contract( NonNull :: byte_offset_from) ]
2405
+ pub fn non_null_byte_offset_from_proof ( ) {
2406
+ const SIZE : usize = mem:: size_of :: < i32 > ( ) * 1000 ;
2407
+ let mut generator1 = PointerGenerator :: < SIZE > :: new ( ) ;
2408
+ let mut generator2 = PointerGenerator :: < SIZE > :: new ( ) ;
2409
+
2410
+ let ptr: * mut i32 = generator1. any_in_bounds ( ) . ptr as * mut i32 ;
2411
+
2412
+ let origin: * mut i32 = if kani:: any ( ) {
2413
+ generator1. any_in_bounds ( ) . ptr as * mut i32
2414
+ } else {
2415
+ generator2. any_in_bounds ( ) . ptr as * mut i32
2416
+ } ;
2417
+
2418
+ let ptr_nonnull = unsafe { NonNull :: new ( ptr) . unwrap ( ) } ;
2419
+ let origin_nonnull = unsafe { NonNull :: new ( origin) . unwrap ( ) } ;
2420
+
2421
+ unsafe {
2422
+ let result = ptr_nonnull. byte_offset_from ( origin_nonnull) ;
2423
+ }
2424
+ }
2425
+
2426
+ #[ kani:: proof_for_contract( NonNull :: byte_offset_from) ]
2427
+ pub fn non_null_byte_offset_from_dangling_proof ( ) {
2428
+ let origin = NonNull :: < i32 > :: dangling ( ) ;
2429
+ let ptr = origin;
2430
+ unsafe {
2431
+ let _ = ptr. byte_offset_from ( origin) ;
2432
+ }
2433
+ }
2348
2434
}
0 commit comments