diff --git a/src/lib.rs b/src/lib.rs index 43afb11..8f0c749 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -21,7 +21,7 @@ #![warn(missing_docs)] #![deny(unsafe_op_in_unsafe_fn)] -use access::{Access, ReadOnly, ReadWrite, WriteOnly}; +use access::{Access, NoAccess, ReadOnly, ReadWrite, WriteOnly}; use core::{ fmt, marker::PhantomData, @@ -57,26 +57,57 @@ pub mod access; /// let field_2 = map_field!(volatile.field_2); /// assert_eq!(field_2.read(), 255); /// ``` +/// +/// Creating `VolatilePtr`s to unaligned field in packed structs is not allowed: +/// ```compile_fail +/// # extern crate core; +/// use volatile::{VolatilePtr, map_field}; +/// use core::ptr::NonNull; +/// +/// #[repr(packed)] +/// struct Example { field_1: u8, field_2: usize, } +/// let mut value = Example { field_1: 15, field_2: 255 }; +/// let mut volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(&mut value)) }; +/// +/// // Constructing a volatile reference to an unaligned field doesn't compile. +/// let field_2 = map_field!(volatile.field_2); +/// ``` #[macro_export] macro_rules! map_field { - ($volatile:ident.$place:ident) => { + ($volatile:ident.$place:ident) => {{ + // Simulate creating a reference to the field. This is done to make + // sure that the field is not potentially unaligned. The body of the + // if statement will never be executed, so it can never cause any UB. + if false { + #[deny(unaligned_references)] + let _ref_to_field = &(unsafe { &*$volatile.as_ptr().as_ptr() }).$place; + } + unsafe { $volatile.map(|ptr| { core::ptr::NonNull::new(core::ptr::addr_of_mut!((*ptr.as_ptr()).$place)).unwrap() }) } - }; + }}; } #[macro_export] macro_rules! map_field_mut { - ($volatile:ident.$place:ident) => { + ($volatile:ident.$place:ident) => {{ + // Simulate creating a reference to the field. This is done to make + // sure that the field is not potentially unaligned. The body of the + // if statement will never be executed, so it can never cause any UB. + if false { + #[deny(unaligned_references)] + let _ref_to_field = &(unsafe { &*$volatile.as_ptr().as_ptr() }).$place; + } + unsafe { $volatile.map_mut(|ptr| { core::ptr::NonNull::new(core::ptr::addr_of_mut!((*ptr.as_ptr()).$place)).unwrap() }) } - }; + }}; } // this must be defined after the `map_field` macros @@ -306,10 +337,20 @@ where } /// Transformation methods for accessing struct fields -impl VolatilePtr<'_, T, Access> +impl<'a, T, R, W> VolatilePtr<'a, T, Access> where T: ?Sized, { + // TODO: Add documentation + pub fn borrow(&self) -> VolatilePtr> { + unsafe { VolatilePtr::new_generic(self.pointer) } + } + + // TODO: Add documentation + pub fn borrow_mut(&mut self) -> VolatilePtr> { + unsafe { VolatilePtr::new_generic(self.pointer) } + } + /// Constructs a new `Volatile` reference by mapping the wrapped pointer. /// /// This method is useful for accessing only a part of a volatile value, e.g. a subslice or @@ -351,7 +392,7 @@ where /// value /// })}; /// ``` - pub unsafe fn map<'a, F, U>(&'a self, f: F) -> VolatilePtr<'a, U, Access> + pub unsafe fn map(self, f: F) -> VolatilePtr<'a, U, Access> where F: FnOnce(NonNull) -> NonNull, U: ?Sized, @@ -360,8 +401,8 @@ where } #[cfg(feature = "very_unstable")] - pub const unsafe fn map_const<'a, F, U>( - &'a self, + pub const unsafe fn map_const( + self, f: F, ) -> VolatilePtr<'a, U, Access> where @@ -371,7 +412,7 @@ where unsafe { VolatilePtr::new_generic(f(self.pointer)) } } - pub unsafe fn map_mut(&mut self, f: F) -> VolatilePtr> + pub unsafe fn map_mut(self, f: F) -> VolatilePtr<'a, U, Access> where F: FnOnce(NonNull) -> NonNull, U: ?Sized, @@ -380,7 +421,7 @@ where } #[cfg(feature = "very_unstable")] - pub const unsafe fn map_mut_const(&mut self, f: F) -> VolatilePtr> + pub const unsafe fn map_mut_const(self, f: F) -> VolatilePtr<'a, U, Access> where F: FnOnce(NonNull) -> NonNull, U: ?Sized, @@ -396,6 +437,10 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { self.pointer.len() } + pub fn is_empty(&self) -> bool { + self.pointer.len() == 0 + } + /// Applies the index operation on the wrapped slice. /// /// Returns a shared `Volatile` reference to the resulting subslice. @@ -433,9 +478,9 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { /// assert_eq!(subslice.index(0).read(), 2); /// ``` pub fn index( - &self, + self, index: I, - ) -> VolatilePtr<>::Output, Access> + ) -> VolatilePtr<'a, >::Output, Access> where I: SliceIndex<[T]> + SliceIndex<[()]> + Clone, { @@ -445,7 +490,10 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { } #[cfg(feature = "very_unstable")] - pub const fn index_const(&self, index: usize) -> VolatilePtr> { + pub const fn index_const( + self, + index: usize, + ) -> VolatilePtr<'a, T, Access> { assert!(index < self.pointer.len(), "index out of bounds"); unsafe { self.map_const(|slice| { @@ -455,9 +503,9 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { } pub fn index_mut( - &mut self, + self, index: I, - ) -> VolatilePtr<>::Output, Access> + ) -> VolatilePtr<'a, >::Output, Access> where I: SliceIndex<[T]> + SliceIndex<[()]> + Clone, { @@ -467,7 +515,7 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { } #[cfg(feature = "very_unstable")] - pub const fn index_mut_const(&mut self, index: usize) -> VolatilePtr> { + pub const fn index_mut_const(self, index: usize) -> VolatilePtr<'a, T, Access> { assert!(index < self.pointer.len(), "index out of bounds"); unsafe { self.map_mut_const(|slice| { @@ -476,6 +524,22 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { } } + /// Returns an iterator over the slice. + pub fn iter(self) -> impl Iterator>> { + let ptr = self.as_ptr().as_ptr() as *mut T; + let len = self.len(); + (0..len) + .map(move |i| unsafe { VolatilePtr::new_generic(NonNull::new_unchecked(ptr.add(i))) }) + } + + /// Returns an iterator that allows modifying each value. + pub fn iter_mut(self) -> impl Iterator>> { + let ptr = self.as_ptr().as_ptr() as *mut T; + let len = self.len(); + (0..len) + .map(move |i| unsafe { VolatilePtr::new_generic(NonNull::new_unchecked(ptr.add(i))) }) + } + /// Copies all elements from `self` into `dst`, using a volatile memcpy. /// /// The length of `dst` must be the same as `self`. @@ -513,6 +577,7 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { pub fn copy_into_slice(&self, dst: &mut [T]) where T: Copy, + R: access::Safe, { let len = self.pointer.len(); assert_eq!( @@ -569,6 +634,7 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy, + W: access::Safe, { let len = self.pointer.len(); assert_eq!( @@ -622,6 +688,8 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { pub fn copy_within(&mut self, src: impl RangeBounds, dest: usize) where T: Copy, + R: access::Safe, + W: access::Safe, { let len = self.pointer.len(); // implementation taken from https://github.com/rust-lang/rust/blob/683d1bcd405727fcc9209f64845bd3b9104878b8/library/core/src/slice/mod.rs#L2726-L2738 @@ -643,11 +711,11 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { } pub fn split_at( - &self, + self, mid: usize, ) -> ( - VolatilePtr<[T], Access>, - VolatilePtr<[T], Access>, + VolatilePtr<'a, [T], Access>, + VolatilePtr<'a, [T], Access>, ) { assert!(mid <= self.pointer.len()); // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which @@ -656,11 +724,11 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { } pub fn split_at_mut( - &mut self, + self, mid: usize, ) -> ( - VolatilePtr<[T], Access>, - VolatilePtr<[T], Access>, + VolatilePtr<'a, [T], Access>, + VolatilePtr<'a, [T], Access>, ) { assert!(mid <= self.pointer.len()); // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which @@ -669,11 +737,11 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { } unsafe fn split_at_unchecked( - &self, + self, mid: usize, ) -> ( - VolatilePtr<[T], Access>, - VolatilePtr<[T], Access>, + VolatilePtr<'a, [T], Access>, + VolatilePtr<'a, [T], Access>, ) { // SAFETY: Caller has to check that `0 <= mid <= self.len()` unsafe { @@ -685,11 +753,11 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { } unsafe fn split_at_mut_unchecked( - &mut self, + self, mid: usize, ) -> ( - VolatilePtr<[T], Access>, - VolatilePtr<[T], Access>, + VolatilePtr<'a, [T], Access>, + VolatilePtr<'a, [T], Access>, ) { let len = self.pointer.len(); let ptr = self.pointer.as_mut_ptr(); @@ -711,23 +779,23 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { } pub fn as_chunks( - &self, + self, ) -> ( - VolatilePtr<[[T; N]], Access>, - VolatilePtr<[T], Access>, + VolatilePtr<'a, [[T; N]], Access>, + VolatilePtr<'a, [T], Access>, ) { assert_ne!(N, 0); let len = self.pointer.len() / N; let (multiple_of_n, remainder) = self.split_at(len * N); // SAFETY: We already panicked for zero, and ensured by construction // that the length of the subslice is a multiple of N. - let array_slice = unsafe { multiple_of_n.as_chunks_unchecked_by_val() }; + let array_slice = unsafe { multiple_of_n.as_chunks_unchecked() }; (array_slice, remainder) } pub unsafe fn as_chunks_unchecked( - &self, - ) -> VolatilePtr<[[T; N]], Access> { + self, + ) -> VolatilePtr<'a, [[T; N]], Access> { debug_assert_ne!(N, 0); debug_assert_eq!(self.pointer.len() % N, 0); let new_len = @@ -744,39 +812,21 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { } pub fn as_chunks_mut( - &mut self, + self, ) -> ( - VolatilePtr<[[T; N]], Access>, - VolatilePtr<[T], Access>, + VolatilePtr<'a, [[T; N]], Access>, + VolatilePtr<'a, [T], Access>, ) { assert_ne!(N, 0); let len = self.pointer.len() / N; let (multiple_of_n, remainder) = self.split_at_mut(len * N); // SAFETY: We already panicked for zero, and ensured by construction // that the length of the subslice is a multiple of N. - let array_slice = unsafe { multiple_of_n.as_chunks_unchecked_by_val() }; + let array_slice = unsafe { multiple_of_n.as_chunks_unchecked_mut() }; (array_slice, remainder) } pub unsafe fn as_chunks_unchecked_mut( - &mut self, - ) -> VolatilePtr<[[T; N]], Access> { - debug_assert_ne!(N, 0); - debug_assert_eq!(self.pointer.len() % N, 0); - let new_len = - // SAFETY: Our precondition is exactly what's needed to call this - unsafe { core::intrinsics::exact_div(self.pointer.len(), N) }; - // SAFETY: We cast a slice of `new_len * N` elements into - // a slice of `new_len` many `N` elements chunks. - let pointer = NonNull::new(ptr::slice_from_raw_parts_mut( - self.pointer.as_mut_ptr().cast(), - new_len, - )) - .unwrap(); - unsafe { VolatilePtr::new_generic(pointer) } - } - - pub unsafe fn as_chunks_unchecked_by_val( self, ) -> VolatilePtr<'a, [[T; N]], Access> { debug_assert_ne!(N, 0); @@ -797,7 +847,7 @@ impl<'a, T, R, W> VolatilePtr<'a, [T], Access> { /// Methods for volatile byte slices #[cfg(feature = "unstable")] -impl VolatilePtr<'_, [u8], A> { +impl VolatilePtr<'_, [u8], Access> { /// Sets all elements of the byte slice to the given `value` using a volatile `memset`. /// /// This method is similar to the `slice::fill` method of the standard library, with the @@ -815,11 +865,15 @@ impl VolatilePtr<'_, [u8], A> { /// use volatile::VolatilePtr; /// use core::ptr::NonNull; /// - /// let mut buf = unsafe { VolatilePtr::new_read_write(NonNull::from(vec![0; 10].as_mut_slice())) }; + /// let mut vec = vec![0; 10]; + /// let mut buf = unsafe { VolatilePtr::new_read_write(NonNull::from(vec.as_mut_slice())) }; /// buf.fill(1); /// assert_eq!(unsafe { buf.as_ptr().as_mut() }, &mut vec![1; 10]); /// ``` - pub fn fill(&mut self, value: u8) { + pub fn fill(&mut self, value: u8) + where + W: access::Safe, + { unsafe { intrinsics::volatile_set_memory(self.pointer.as_mut_ptr(), value, self.pointer.len()); } @@ -831,7 +885,7 @@ impl VolatilePtr<'_, [u8], A> { /// These methods are only available with the `unstable` feature enabled (requires a nightly /// Rust compiler). #[cfg(feature = "unstable")] -impl VolatilePtr<'_, [T; N], Access> { +impl<'a, T, R, W, const N: usize> VolatilePtr<'a, [T; N], Access> { /// Converts an array reference to a shared slice. /// /// This makes it possible to use the methods defined on slices. @@ -856,13 +910,45 @@ impl VolatilePtr<'_, [T; N], Access> { /// /// assert_eq!(dst, [1, 2]); /// ``` - pub fn as_slice(&self) -> VolatilePtr<[T], Access> { + pub fn as_slice(self) -> VolatilePtr<'a, [T], Access> { unsafe { self.map(|array| { NonNull::new(ptr::slice_from_raw_parts_mut(array.as_ptr() as *mut T, N)).unwrap() }) } } + + /// Converts an array reference to a shared slice. + /// + /// This makes it possible to use the methods defined on slices. + /// + /// ## Example + /// + /// Copying two elements into a volatile array reference using `copy_from_slice`: + /// + /// ``` + /// # extern crate core; + /// use volatile::VolatilePtr; + /// use core::ptr::NonNull; + /// + /// let src = [1, 2]; + /// let mut dst = [0, 0]; + /// let mut volatile = unsafe { VolatilePtr::new_write_only(NonNull::from(&dst)) }; + /// + /// // convert the `Volatile<[i32; 2]>` array reference to a `Volatile<[i32]>` slice + /// let mut volatile_slice = volatile.as_slice_mut(); + /// // we can now use the slice methods + /// volatile_slice.copy_from_slice(&src); + /// + /// assert_eq!(dst, [1, 2]); + /// ``` + pub fn as_slice_mut(self) -> VolatilePtr<'a, [T], Access> { + unsafe { + self.map_mut(|array| { + NonNull::new(ptr::slice_from_raw_parts_mut(array.as_ptr() as *mut T, N)).unwrap() + }) + } + } } /// Methods for restricting access. diff --git a/src/tests.rs b/src/tests.rs index 840c7ff..bc22063 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -138,7 +138,9 @@ fn test_struct() { }; let mut volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(&mut val)) }; unsafe { - volatile.map_mut(|s| NonNull::new(core::ptr::addr_of_mut!((*s.as_ptr()).field_1)).unwrap()) + volatile + .borrow_mut() + .map_mut(|s| NonNull::new(core::ptr::addr_of_mut!((*s.as_ptr()).field_1)).unwrap()) } .update(|v| *v += 1); let mut field_2 = unsafe { @@ -168,7 +170,8 @@ fn test_struct_macro() { field_2: true, }; let mut volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(&mut val)) }; - let mut field_1 = map_field_mut!(volatile.field_1); + let volatile_borrowed = volatile.borrow_mut(); + let mut field_1 = map_field_mut!(volatile_borrowed.field_1); field_1.update(|v| *v += 1); let mut field_2 = map_field_mut!(volatile.field_2); assert!(field_2.read()); @@ -187,7 +190,7 @@ fn test_struct_macro() { fn test_slice() { let val: &mut [u32] = &mut [1, 2, 3]; let mut volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; - volatile.index_mut(0).update(|v| *v += 1); + volatile.borrow_mut().index_mut(0).update(|v| *v += 1); let mut dst = [0; 3]; volatile.copy_into_slice(&mut dst); @@ -199,7 +202,7 @@ fn test_slice() { #[should_panic] fn test_bounds_check_1() { let val: &mut [u32] = &mut [1, 2, 3]; - let mut volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; + let volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; volatile.index_mut(3); } @@ -208,7 +211,7 @@ fn test_bounds_check_1() { #[should_panic] fn test_bounds_check_2() { let val: &mut [u32] = &mut [1, 2, 3]; - let mut volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; + let volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; volatile.index_mut(2..1); } @@ -217,7 +220,7 @@ fn test_bounds_check_2() { #[should_panic] fn test_bounds_check_3() { let val: &mut [u32] = &mut [1, 2, 3]; - let mut volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; + let volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; volatile.index_mut(4..); // `3..` is is still ok (see next test) } @@ -225,7 +228,7 @@ fn test_bounds_check_3() { #[test] fn test_bounds_check_4() { let val: &mut [u32] = &mut [1, 2, 3]; - let mut volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; + let volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; assert_eq!(volatile.index_mut(3..).len(), 0); } @@ -234,7 +237,7 @@ fn test_bounds_check_4() { #[should_panic] fn test_bounds_check_5() { let val: &mut [u32] = &mut [1, 2, 3]; - let mut volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; + let volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; volatile.index_mut(..4); } @@ -242,10 +245,10 @@ fn test_bounds_check_5() { #[test] fn test_chunks() { let val: &mut [u32] = &mut [1, 2, 3, 4, 5, 6]; - let mut volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; + let volatile = unsafe { VolatilePtr::new_read_write(NonNull::from(val)) }; let mut chunks = volatile.as_chunks_mut().0; - chunks.index_mut(1).write([10, 11, 12]); - assert_eq!(chunks.index(0).read(), [1, 2, 3]); + chunks.borrow_mut().index_mut(1).write([10, 11, 12]); + assert_eq!(chunks.borrow().index(0).read(), [1, 2, 3]); assert_eq!(chunks.index(1).read(), [10, 11, 12]); }