|
| 1 | +// compile-flags: -Zmiri-track-raw-pointers |
1 | 2 | #![feature(new_uninit)]
|
| 3 | +#![feature(slice_as_chunks)] |
| 4 | +#![feature(slice_partition_dedup)] |
2 | 5 |
|
3 | 6 | use std::slice;
|
4 | 7 |
|
@@ -186,8 +189,70 @@ fn uninit_slice() {
|
186 | 189 | assert_eq!(values.iter().map(|x| **x).collect::<Vec<_>>(), vec![1, 2, 3])
|
187 | 190 | }
|
188 | 191 |
|
| 192 | +/// Regression tests for slice methods in the Rust core library where raw pointers are obtained |
| 193 | +/// from mutable references. |
| 194 | +fn test_for_invalidated_pointers() { |
| 195 | + let mut buffer = [0usize; 64]; |
| 196 | + let len = buffer.len(); |
| 197 | + |
| 198 | + // These regression tests (indirectly) call every slice method which contains a `buffer.as_mut_ptr()`. |
| 199 | + // `<[T]>::as_mut_ptr(&mut self)` takes a mutable reference (tagged Unique), which will invalidate all |
| 200 | + // the other pointers that were previously derived from it according to the Stacked Borrows model. |
| 201 | + // An example of where this could go wrong is a prior bug inside `<[T]>::copy_within`: |
| 202 | + // |
| 203 | + // unsafe { |
| 204 | + // core::ptr::copy(self.as_ptr().add(src_start), self.as_mut_ptr().add(dest), count); |
| 205 | + // } |
| 206 | + // |
| 207 | + // The arguments to `core::ptr::copy` are evaluated from left to right. `self.as_ptr()` creates |
| 208 | + // an immutable reference (which is tagged as `SharedReadOnly` by Stacked Borrows) to the array |
| 209 | + // and derives a valid `*const` pointer from it. When jumping to the next argument, |
| 210 | + // `self.as_mut_ptr()` creates a mutable reference (tagged as `Unique`) to the array, which |
| 211 | + // invalidates the existing `SharedReadOnly` reference and any pointers derived from it. |
| 212 | + // The invalidated `*const` pointer (the first argument to `core::ptr::copy`) is then used |
| 213 | + // after the fact when `core::ptr::copy` is called, which triggers undefined behavior. |
| 214 | + |
| 215 | + unsafe { assert_eq!(0, *buffer.as_mut_ptr_range().start ); } |
| 216 | + // Check that the pointer range is in-bounds, while we're at it |
| 217 | + let range = buffer.as_mut_ptr_range(); |
| 218 | + unsafe { assert_eq!(*range.start, *range.end.sub(len)); } |
| 219 | + |
| 220 | + buffer.reverse(); |
| 221 | + |
| 222 | + // Calls `fn as_chunks_unchecked_mut` internally (requires unstable `#![feature(slice_as_chunks)]`): |
| 223 | + assert_eq!(2, buffer.as_chunks_mut::<32>().0.len()); |
| 224 | + for chunk in buffer.as_chunks_mut::<32>().0 { |
| 225 | + for elem in chunk { |
| 226 | + *elem += 1; |
| 227 | + } |
| 228 | + } |
| 229 | + |
| 230 | + // Calls `fn split_at_mut_unchecked` internally: |
| 231 | + let split_mut = buffer.split_at_mut(32); |
| 232 | + assert_eq!(split_mut.0, split_mut.1); |
| 233 | + |
| 234 | + // Calls `fn partition_dedup_by` internally (requires unstable `#![feature(slice_partition_dedup)]`): |
| 235 | + let partition_dedup = buffer.partition_dedup(); |
| 236 | + assert_eq!(1, partition_dedup.0.len()); |
| 237 | + partition_dedup.0[0] += 1; |
| 238 | + for elem in partition_dedup.1 { |
| 239 | + *elem += 1; |
| 240 | + } |
| 241 | + |
| 242 | + buffer.rotate_left(8); |
| 243 | + buffer.rotate_right(16); |
| 244 | + |
| 245 | + buffer.copy_from_slice(&[1usize; 64]); |
| 246 | + buffer.swap_with_slice(&mut [2usize; 64]); |
| 247 | + |
| 248 | + assert_eq!(0, unsafe { buffer.align_to_mut::<u8>().1[1] }); |
| 249 | + |
| 250 | + buffer.copy_within(1.., 0); |
| 251 | +} |
| 252 | + |
189 | 253 | fn main() {
|
190 | 254 | slice_of_zst();
|
191 | 255 | test_iter_ref_consistency();
|
192 | 256 | uninit_slice();
|
| 257 | + test_for_invalidated_pointers(); |
193 | 258 | }
|
0 commit comments