Skip to content

Commit 168a020

Browse files
committed
Auto merge of rust-lang#92686 - saethlin:unsafe-debug-asserts, r=Amanieu
Add debug assertions to some unsafe functions As suggested by rust-lang#51713 ~~Some similar code calls `abort()` instead of `panic!()` but aborting doesn't work in a `const fn`, and the intrinsic for doing dispatch based on whether execution is in a const is unstable.~~ This picked up some invalid uses of `get_unchecked` in the compiler, and fixes them. I can confirm that they do in fact pick up invalid uses of `get_unchecked` in the wild, though the user experience is less-than-awesome: ``` Running unittests (target/x86_64-unknown-linux-gnu/debug/deps/rle_decode_fast-04b7918da2001b50) running 6 tests error: test failed, to rerun pass '--lib' Caused by: process didn't exit successfully: `/home/ben/rle-decode-helper/target/x86_64-unknown-linux-gnu/debug/deps/rle_decode_fast-04b7918da2001b50` (signal: 4, SIGILL: illegal instruction) ``` ~~As best I can tell these changes produce a 6% regression in the runtime of `./x.py test` when `[rust] debug = true` is set.~~ Latest commit (rust-lang@6894d55) brings the additional overhead from this PR down to 0.5%, while also adding a few more assertions. I think this actually covers all the places in `core` that it is reasonable to check for safety requirements at runtime. Thoughts?
2 parents 15a242a + 6e6d0cb commit 168a020

File tree

8 files changed

+123
-128
lines changed

8 files changed

+123
-128
lines changed

compiler/rustc_data_structures/src/map_in_place.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -30,13 +30,13 @@ impl<T> MapInPlace<T> for Vec<T> {
3030
while read_i < old_len {
3131
// move the read_i'th item out of the vector and map it
3232
// to an iterator
33-
let e = ptr::read(self.get_unchecked(read_i));
33+
let e = ptr::read(self.as_ptr().add(read_i));
3434
let iter = f(e).into_iter();
3535
read_i += 1;
3636

3737
for e in iter {
3838
if write_i < read_i {
39-
ptr::write(self.get_unchecked_mut(write_i), e);
39+
ptr::write(self.as_mut_ptr().add(write_i), e);
4040
write_i += 1;
4141
} else {
4242
// If this is reached we ran out of space
@@ -76,13 +76,13 @@ impl<T, A: Array<Item = T>> MapInPlace<T> for SmallVec<A> {
7676
while read_i < old_len {
7777
// move the read_i'th item out of the vector and map it
7878
// to an iterator
79-
let e = ptr::read(self.get_unchecked(read_i));
79+
let e = ptr::read(self.as_ptr().add(read_i));
8080
let iter = f(e).into_iter();
8181
read_i += 1;
8282

8383
for e in iter {
8484
if write_i < read_i {
85-
ptr::write(self.get_unchecked_mut(write_i), e);
85+
ptr::write(self.as_mut_ptr().add(write_i), e);
8686
write_i += 1;
8787
} else {
8888
// If this is reached we ran out of space

library/alloc/benches/vec.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -627,10 +627,10 @@ fn bench_map_regular(b: &mut Bencher) {
627627
fn bench_map_fast(b: &mut Bencher) {
628628
let data = black_box([(0, 0); LEN]);
629629
b.iter(|| {
630-
let mut result = Vec::with_capacity(data.len());
630+
let mut result: Vec<u32> = Vec::with_capacity(data.len());
631631
for i in 0..data.len() {
632632
unsafe {
633-
*result.get_unchecked_mut(i) = data[i].0;
633+
*result.as_mut_ptr().add(i) = data[i].0;
634634
result.set_len(i);
635635
}
636636
}

library/core/src/intrinsics.rs

+48-53
Original file line numberDiff line numberDiff line change
@@ -1969,6 +1969,40 @@ extern "rust-intrinsic" {
19691969
// (`transmute` also falls into this category, but it cannot be wrapped due to the
19701970
// check that `T` and `U` have the same size.)
19711971

1972+
/// Check that the preconditions of an unsafe function are followed, if debug_assertions are on,
1973+
/// and only at runtime.
1974+
///
1975+
/// # Safety
1976+
///
1977+
/// Invoking this macro is only sound if the following code is already UB when the passed
1978+
/// expression evaluates to false.
1979+
///
1980+
/// This macro expands to a check at runtime if debug_assertions is set. It has no effect at
1981+
/// compile time, but the semantics of the contained `const_eval_select` must be the same at
1982+
/// runtime and at compile time. Thus if the expression evaluates to false, this macro produces
1983+
/// different behavior at compile time and at runtime, and invoking it is incorrect.
1984+
///
1985+
/// So in a sense it is UB if this macro is useful, but we expect callers of `unsafe fn` to make
1986+
/// the occasional mistake, and this check should help them figure things out.
1987+
#[allow_internal_unstable(const_eval_select)] // permit this to be called in stably-const fn
1988+
macro_rules! assert_unsafe_precondition {
1989+
($e:expr) => {
1990+
if cfg!(debug_assertions) {
1991+
// Use a closure so that we can capture arbitrary expressions from the invocation
1992+
let runtime = || {
1993+
if !$e {
1994+
// abort instead of panicking to reduce impact on code size
1995+
::core::intrinsics::abort();
1996+
}
1997+
};
1998+
const fn comptime() {}
1999+
2000+
::core::intrinsics::const_eval_select((), comptime, runtime);
2001+
}
2002+
};
2003+
}
2004+
pub(crate) use assert_unsafe_precondition;
2005+
19722006
/// Checks whether `ptr` is properly aligned with respect to
19732007
/// `align_of::<T>()`.
19742008
pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
@@ -1977,7 +2011,6 @@ pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
19772011

19782012
/// Checks whether the regions of memory starting at `src` and `dst` of size
19792013
/// `count * size_of::<T>()` do *not* overlap.
1980-
#[cfg(debug_assertions)]
19812014
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
19822015
let src_usize = src.addr();
19832016
let dst_usize = dst.addr();
@@ -2079,28 +2112,16 @@ pub const unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: us
20792112
pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
20802113
}
20812114

2082-
#[cfg(debug_assertions)]
2083-
fn runtime_check<T>(src: *const T, dst: *mut T, count: usize) {
2084-
if !is_aligned_and_not_null(src)
2085-
|| !is_aligned_and_not_null(dst)
2086-
|| !is_nonoverlapping(src, dst, count)
2087-
{
2088-
// Not panicking to keep codegen impact smaller.
2089-
abort();
2090-
}
2091-
}
2092-
#[cfg(debug_assertions)]
2093-
const fn compiletime_check<T>(_src: *const T, _dst: *mut T, _count: usize) {}
2094-
#[cfg(debug_assertions)]
2095-
// SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
2096-
// Therefore, compiletime_check and runtime_check are observably equivalent.
2097-
unsafe {
2098-
const_eval_select((src, dst, count), compiletime_check, runtime_check);
2099-
}
2100-
21012115
// SAFETY: the safety contract for `copy_nonoverlapping` must be
21022116
// upheld by the caller.
2103-
unsafe { copy_nonoverlapping(src, dst, count) }
2117+
unsafe {
2118+
assert_unsafe_precondition!(
2119+
is_aligned_and_not_null(src)
2120+
&& is_aligned_and_not_null(dst)
2121+
&& is_nonoverlapping(src, dst, count)
2122+
);
2123+
copy_nonoverlapping(src, dst, count)
2124+
}
21042125
}
21052126

21062127
/// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
@@ -2173,24 +2194,11 @@ pub const unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
21732194
fn copy<T>(src: *const T, dst: *mut T, count: usize);
21742195
}
21752196

2176-
#[cfg(debug_assertions)]
2177-
fn runtime_check<T>(src: *const T, dst: *mut T) {
2178-
if !is_aligned_and_not_null(src) || !is_aligned_and_not_null(dst) {
2179-
// Not panicking to keep codegen impact smaller.
2180-
abort();
2181-
}
2182-
}
2183-
#[cfg(debug_assertions)]
2184-
const fn compiletime_check<T>(_src: *const T, _dst: *mut T) {}
2185-
#[cfg(debug_assertions)]
2186-
// SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
2187-
// Therefore, compiletime_check and runtime_check are observably equivalent.
2197+
// SAFETY: the safety contract for `copy` must be upheld by the caller.
21882198
unsafe {
2189-
const_eval_select((src, dst), compiletime_check, runtime_check);
2199+
assert_unsafe_precondition!(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst));
2200+
copy(src, dst, count)
21902201
}
2191-
2192-
// SAFETY: the safety contract for `copy` must be upheld by the caller.
2193-
unsafe { copy(src, dst, count) }
21942202
}
21952203

21962204
/// Sets `count * size_of::<T>()` bytes of memory starting at `dst` to
@@ -2274,24 +2282,11 @@ pub const unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize) {
22742282
fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
22752283
}
22762284

2277-
#[cfg(debug_assertions)]
2278-
fn runtime_check<T>(ptr: *mut T) {
2279-
debug_assert!(
2280-
is_aligned_and_not_null(ptr),
2281-
"attempt to write to unaligned or null pointer"
2282-
);
2283-
}
2284-
#[cfg(debug_assertions)]
2285-
const fn compiletime_check<T>(_ptr: *mut T) {}
2286-
#[cfg(debug_assertions)]
2287-
// SAFETY: runtime debug-assertions are a best-effort basis; it's fine to
2288-
// not do them during compile time
2285+
// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
22892286
unsafe {
2290-
const_eval_select((dst,), compiletime_check, runtime_check);
2287+
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
2288+
write_bytes(dst, val, count)
22912289
}
2292-
2293-
// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
2294-
unsafe { write_bytes(dst, val, count) }
22952290
}
22962291

22972292
/// Selects which function to call depending on the context.

library/core/src/num/nonzero.rs

+5-1
Original file line numberDiff line numberDiff line change
@@ -52,9 +52,13 @@ macro_rules! nonzero_integers {
5252
#[$const_new_unchecked_stability]
5353
#[must_use]
5454
#[inline]
55+
#[rustc_allow_const_fn_unstable(const_fn_fn_ptr_basics)] // required by assert_unsafe_precondition
5556
pub const unsafe fn new_unchecked(n: $Int) -> Self {
5657
// SAFETY: this is guaranteed to be safe by the caller.
57-
unsafe { Self(n) }
58+
unsafe {
59+
core::intrinsics::assert_unsafe_precondition!(n != 0);
60+
Self(n)
61+
}
5862
}
5963

6064
/// Creates a non-zero if the given value is not zero.

library/core/src/ptr/mod.rs

+20-10
Original file line numberDiff line numberDiff line change
@@ -332,7 +332,10 @@
332332
use crate::cmp::Ordering;
333333
use crate::fmt;
334334
use crate::hash;
335-
use crate::intrinsics::{self, abort, is_aligned_and_not_null};
335+
use crate::intrinsics::{
336+
self, assert_unsafe_precondition, is_aligned_and_not_null, is_nonoverlapping,
337+
};
338+
336339
use crate::mem::{self, MaybeUninit};
337340

338341
#[stable(feature = "rust1", since = "1.0.0")]
@@ -749,6 +752,16 @@ pub const unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) {
749752
};
750753
}
751754

755+
// SAFETY: the caller must guarantee that `x` and `y` are
756+
// valid for writes and properly aligned.
757+
unsafe {
758+
assert_unsafe_precondition!(
759+
is_aligned_and_not_null(x)
760+
&& is_aligned_and_not_null(y)
761+
&& is_nonoverlapping(x, y, count)
762+
);
763+
}
764+
752765
// NOTE(scottmcm) MIRI is disabled here as reading in smaller units is a
753766
// pessimization for it. Also, if the type contains any unaligned pointers,
754767
// copying those over multiple reads is difficult to support.
@@ -839,6 +852,7 @@ pub const unsafe fn replace<T>(dst: *mut T, mut src: T) -> T {
839852
// and cannot overlap `src` since `dst` must point to a distinct
840853
// allocated object.
841854
unsafe {
855+
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
842856
mem::swap(&mut *dst, &mut src); // cannot overlap
843857
}
844858
src
@@ -1318,12 +1332,11 @@ pub const unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
13181332
#[inline]
13191333
#[stable(feature = "volatile", since = "1.9.0")]
13201334
pub unsafe fn read_volatile<T>(src: *const T) -> T {
1321-
if cfg!(debug_assertions) && !is_aligned_and_not_null(src) {
1322-
// Not panicking to keep codegen impact smaller.
1323-
abort();
1324-
}
13251335
// SAFETY: the caller must uphold the safety contract for `volatile_load`.
1326-
unsafe { intrinsics::volatile_load(src) }
1336+
unsafe {
1337+
assert_unsafe_precondition!(is_aligned_and_not_null(src));
1338+
intrinsics::volatile_load(src)
1339+
}
13271340
}
13281341

13291342
/// Performs a volatile write of a memory location with the given value without
@@ -1389,12 +1402,9 @@ pub unsafe fn read_volatile<T>(src: *const T) -> T {
13891402
#[inline]
13901403
#[stable(feature = "volatile", since = "1.9.0")]
13911404
pub unsafe fn write_volatile<T>(dst: *mut T, src: T) {
1392-
if cfg!(debug_assertions) && !is_aligned_and_not_null(dst) {
1393-
// Not panicking to keep codegen impact smaller.
1394-
abort();
1395-
}
13961405
// SAFETY: the caller must uphold the safety contract for `volatile_store`.
13971406
unsafe {
1407+
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
13981408
intrinsics::volatile_store(dst, src);
13991409
}
14001410
}

library/core/src/slice/index.rs

+15-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
//! Indexing implementations for `[T]`.
22
3+
use crate::intrinsics::assert_unsafe_precondition;
34
use crate::intrinsics::const_eval_select;
45
use crate::ops;
56
use crate::ptr;
@@ -219,13 +220,19 @@ unsafe impl<T> const SliceIndex<[T]> for usize {
219220
// cannot be longer than `isize::MAX`. They also guarantee that
220221
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
221222
// so the call to `add` is safe.
222-
unsafe { slice.as_ptr().add(self) }
223+
unsafe {
224+
assert_unsafe_precondition!(self < slice.len());
225+
slice.as_ptr().add(self)
226+
}
223227
}
224228

225229
#[inline]
226230
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut T {
227231
// SAFETY: see comments for `get_unchecked` above.
228-
unsafe { slice.as_mut_ptr().add(self) }
232+
unsafe {
233+
assert_unsafe_precondition!(self < slice.len());
234+
slice.as_mut_ptr().add(self)
235+
}
229236
}
230237

231238
#[inline]
@@ -272,13 +279,18 @@ unsafe impl<T> const SliceIndex<[T]> for ops::Range<usize> {
272279
// cannot be longer than `isize::MAX`. They also guarantee that
273280
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
274281
// so the call to `add` is safe.
275-
unsafe { ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start) }
282+
283+
unsafe {
284+
assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
285+
ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
286+
}
276287
}
277288

278289
#[inline]
279290
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
280291
// SAFETY: see comments for `get_unchecked` above.
281292
unsafe {
293+
assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
282294
ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
283295
}
284296
}

library/core/src/slice/mod.rs

+16-17
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
#![stable(feature = "rust1", since = "1.0.0")]
88

99
use crate::cmp::Ordering::{self, Greater, Less};
10+
use crate::intrinsics::{assert_unsafe_precondition, exact_div};
1011
use crate::marker::Copy;
1112
use crate::mem;
1213
use crate::num::NonZeroUsize;
@@ -656,15 +657,10 @@ impl<T> [T] {
656657
#[unstable(feature = "slice_swap_unchecked", issue = "88539")]
657658
#[rustc_const_unstable(feature = "const_swap", issue = "83163")]
658659
pub const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
659-
#[cfg(debug_assertions)]
660-
{
661-
let _ = &self[a];
662-
let _ = &self[b];
663-
}
664-
665660
let ptr = self.as_mut_ptr();
666661
// SAFETY: caller has to guarantee that `a < self.len()` and `b < self.len()`
667662
unsafe {
663+
assert_unsafe_precondition!(a < self.len() && b < self.len());
668664
ptr::swap(ptr.add(a), ptr.add(b));
669665
}
670666
}
@@ -970,11 +966,11 @@ impl<T> [T] {
970966
#[inline]
971967
#[must_use]
972968
pub unsafe fn as_chunks_unchecked<const N: usize>(&self) -> &[[T; N]] {
973-
debug_assert_ne!(N, 0);
974-
debug_assert_eq!(self.len() % N, 0);
975-
let new_len =
976-
// SAFETY: Our precondition is exactly what's needed to call this
977-
unsafe { crate::intrinsics::exact_div(self.len(), N) };
969+
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
970+
let new_len = unsafe {
971+
assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
972+
exact_div(self.len(), N)
973+
};
978974
// SAFETY: We cast a slice of `new_len * N` elements into
979975
// a slice of `new_len` many `N` elements chunks.
980976
unsafe { from_raw_parts(self.as_ptr().cast(), new_len) }
@@ -1109,11 +1105,11 @@ impl<T> [T] {
11091105
#[inline]
11101106
#[must_use]
11111107
pub unsafe fn as_chunks_unchecked_mut<const N: usize>(&mut self) -> &mut [[T; N]] {
1112-
debug_assert_ne!(N, 0);
1113-
debug_assert_eq!(self.len() % N, 0);
1114-
let new_len =
1115-
// SAFETY: Our precondition is exactly what's needed to call this
1116-
unsafe { crate::intrinsics::exact_div(self.len(), N) };
1108+
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
1109+
let new_len = unsafe {
1110+
assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
1111+
exact_div(self.len(), N)
1112+
};
11171113
// SAFETY: We cast a slice of `new_len * N` elements into
11181114
// a slice of `new_len` many `N` elements chunks.
11191115
unsafe { from_raw_parts_mut(self.as_mut_ptr().cast(), new_len) }
@@ -1675,7 +1671,10 @@ impl<T> [T] {
16751671
//
16761672
// `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference
16771673
// is fine.
1678-
unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid)) }
1674+
unsafe {
1675+
assert_unsafe_precondition!(mid <= len);
1676+
(from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid))
1677+
}
16791678
}
16801679

16811680
/// Divides one slice into an array and a remainder slice at an index.

0 commit comments

Comments
 (0)