diff --git a/doc/src/SUMMARY.md b/doc/src/SUMMARY.md index 3f99b7ffc8985..35377852d0d15 100644 --- a/doc/src/SUMMARY.md +++ b/doc/src/SUMMARY.md @@ -14,7 +14,7 @@ - [Challenges](./challenges.md) - [1: Verify core transmuting methods](./challenges/0001-core-transmutation.md) - - [2: Verify the memory safery of core intrinsics using raw pointers](./challenges/0002-intrinsics-memory.md) + - [2: Verify the memory safety of core intrinsics using raw pointers](./challenges/0002-intrinsics-memory.md) - [3: Verifying Raw Pointer Arithmetic Operations](./challenges/0003-pointer-arithmentic.md) - [4: Memory safety of BTreeMap's `btree::node` module](./challenges/0004-btree-node.md) - [5: Verify functions iterating over inductive data type: `linked_list`](./challenges/0005-linked-list.md) diff --git a/doc/src/challenges/0003-pointer-arithmentic.md b/doc/src/challenges/0003-pointer-arithmentic.md index b85c4f45e1a78..65fb3074cf487 100644 --- a/doc/src/challenges/0003-pointer-arithmentic.md +++ b/doc/src/challenges/0003-pointer-arithmentic.md @@ -1,10 +1,11 @@ # Challenge 3: Verifying Raw Pointer Arithmetic Operations -- **Status:** Open +- **Status:** Resolved - **Tracking Issue:** [#76](https://github.com/model-checking/verify-rust-std/issues/76) - **Start date:** *2024/06/24* -- **End date:** *2025/04/10* +- **End date:** *2024/12/11* - **Reward:** *N/A* +- **Contributors:** [Surya Togaru](https://github.com/stogaru), [Yifei Wang](https://github.com/xsxszab), [Szu-Yu Lee](https://github.com/szlee118), [Mayuresh Joshi](https://github.com/MayureshJoshi25) ------------------- diff --git a/doc/src/general-rules.md b/doc/src/general-rules.md index dfd12722fa7dd..0b884d996ba24 100644 --- a/doc/src/general-rules.md +++ b/doc/src/general-rules.md @@ -90,3 +90,21 @@ members = [ + "rahulku" ] ``` + +Committee members are expected to contribute by reviewing pull requests (all +pull requests review approvals from at least two committee members before they +can be merged). +Reviews of solutions towards challenges should consider at least the following aspects: + +1. Does the pull request implement a solution that respects/meets the success + criteria of the challenge? +2. Do the contracts and harnesses incorporate the safety conditions stated in + the documentation (from comments in the code and the + [standard library documentation](https://doc.rust-lang.org/std/index.html))? + Note that we currently focus on safety verification. Pre- and post-conditions + towards functional correctness are acceptable as long as they do not + negatively impact verification of safety, such as over-constraining input + values or causing excessive verification run time. +3. Is the contributed code of adequate quality, idiomatic, and stands a chance + to be accepted into the standard library (to the best of the committee + member's knowledge)? diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs index cf51a84bb6f24..a0d34f8bb8c48 100644 --- a/library/alloc/src/collections/vec_deque/mod.rs +++ b/library/alloc/src/collections/vec_deque/mod.rs @@ -58,6 +58,9 @@ mod spec_from_iter; #[cfg(test)] mod tests; +#[cfg(kani)] +use core::kani; + /// A double-ended queue implemented with a growable ring buffer. /// /// The "default" usage of this type as a queue is to use [`push_back`] to add to @@ -3079,3 +3082,43 @@ impl From<[T; N]> for VecDeque { deq } } + +#[cfg(kani)] +#[unstable(feature = "kani", issue = "none")] +mod verify { + use core::kani; + use crate::collections::VecDeque; + + #[kani::proof] + fn check_vecdeque_swap() { + // The array's length is set to an arbitrary value, which defines its size. + // In this case, implementing a dynamic array is not possible using any_array + // The more elements in the array the longer the veification time. + const ARRAY_LEN: usize = 40; + let mut arr: [u32; ARRAY_LEN] = kani::Arbitrary::any_array(); + let mut deque: VecDeque = VecDeque::from(arr); + let len = deque.len(); + + // Generate valid indices within bounds + let i = kani::any_where(|&x: &usize| x < len); + let j = kani::any_where(|&x: &usize| x < len); + + // Capture the elements at i and j before the swap + let elem_i_before = deque[i]; + let elem_j_before = deque[j]; + + // Perform the swap + deque.swap(i, j); + + // Postcondition: Verify elements have swapped places + assert_eq!(deque[i], elem_j_before); + assert_eq!(deque[j], elem_i_before); + + // Ensure other elements remain unchanged + let k = kani::any_where(|&x: &usize| x < len); + if k != i && k != j { + assert!(deque[k] == arr[k]); + } + } + +} diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index 990b7e8f76127..370c412af83a2 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -4032,3 +4032,49 @@ impl TryFrom> for [T; N] { Ok(array) } } + +#[cfg(kani)] +#[unstable(feature = "kani", issue = "none")] +mod verify { + use core::kani; + use crate::vec::Vec; + + // Size chosen for testing the empty vector (0), middle element removal (1) + // and last element removal (2) cases while keeping verification tractable + const ARRAY_LEN: usize = 3; + + #[kani::proof] + pub fn verify_swap_remove() { + + // Creating a vector directly from a fixed length arbitrary array + let mut arr: [i32; ARRAY_LEN] = kani::Arbitrary::any_array(); + let mut vect = Vec::from(&arr); + + // Recording the original length and a copy of the vector for validation + let original_len = vect.len(); + let original_vec = vect.clone(); + + // Generating a nondeterministic index which is guaranteed to be within bounds + let index: usize = kani::any_where(|x| *x < original_len); + + let removed = vect.swap_remove(index); + + // Verifying that the length of the vector decreases by one after the operation is performed + assert!(vect.len() == original_len - 1, "Length should decrease by 1"); + + // Verifying that the removed element matches the original element at the index + assert!(removed == original_vec[index], "Removed element should match original"); + + // Verifying that the removed index now contains the element originally at the vector's last index if applicable + if index < original_len - 1 { + assert!(vect[index] == original_vec[original_len - 1], "Index should contain last element"); + } + + // Check that all other unaffected elements remain unchanged + let k = kani::any_where(|&x: &usize| x < original_len - 1); + if k != index { + assert!(vect[k] == arr[k]); + } + + } +} diff --git a/library/core/src/option.rs b/library/core/src/option.rs index 29d1956af9559..052cff05faf80 100644 --- a/library/core/src/option.rs +++ b/library/core/src/option.rs @@ -2572,3 +2572,30 @@ impl [Option; N] { self.try_map(core::convert::identity) } } + +#[cfg(kani)] +#[unstable(feature = "kani", issue = "none")] +mod verify { + use crate::kani; + use crate::option::Option; + + #[kani::proof] + fn verify_as_slice() { + if kani::any() { + // Case 1: Option is Some + let value: u32 = kani::any(); + let some_option: Option = Some(value); + + let slice = some_option.as_slice(); + assert_eq!(slice.len(), 1); // The slice should have exactly one element + assert_eq!(slice[0], value); // The value in the slice should match + } else { + // Case 2: Option is None + let none_option: Option = None; + + let empty_slice = none_option.as_slice(); + assert_eq!(empty_slice.len(), 0); // The slice should be empty + assert!(empty_slice.is_empty()); // Explicit check for emptiness + } + } +} diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs index 68bcd0c1a3d84..714104930dd5a 100644 --- a/library/core/src/ptr/non_null.rs +++ b/library/core/src/ptr/non_null.rs @@ -1035,8 +1035,14 @@ impl NonNull { /// [`ptr::copy`]: crate::ptr::copy() #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[cfg_attr(kani, kani::modifies(NonNull::slice_from_raw_parts(dest, count).as_ptr()))] #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] + #[requires(count.checked_mul(core::mem::size_of::()).map_or_else(|| false, |size| size <= isize::MAX as usize) + && ub_checks::can_dereference(NonNull::slice_from_raw_parts(self, count).as_ptr()) + && ub_checks::can_write(NonNull::slice_from_raw_parts(dest, count).as_ptr()))] + #[ensures(|result: &()| ub_checks::can_dereference(self.as_ptr() as *const u8) + && ub_checks::can_dereference(dest.as_ptr() as *const u8))] pub const unsafe fn copy_to(self, dest: NonNull, count: usize) where T: Sized, @@ -1055,8 +1061,15 @@ impl NonNull { /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[cfg_attr(kani, kani::modifies(NonNull::slice_from_raw_parts(dest, count).as_ptr()))] #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] + #[requires(count.checked_mul(core::mem::size_of::()).map_or_else(|| false, |size| size <= isize::MAX as usize) + && ub_checks::can_dereference(NonNull::slice_from_raw_parts(self, count).as_ptr()) + && ub_checks::can_write(NonNull::slice_from_raw_parts(dest, count).as_ptr()) + && ub_checks::maybe_is_nonoverlapping(self.as_ptr() as *const (), dest.as_ptr() as *const (), count, core::mem::size_of::()))] + #[ensures(|result: &()| ub_checks::can_dereference(self.as_ptr() as *const u8) + && ub_checks::can_dereference(dest.as_ptr() as *const u8))] pub const unsafe fn copy_to_nonoverlapping(self, dest: NonNull, count: usize) where T: Sized, @@ -1075,8 +1088,14 @@ impl NonNull { /// [`ptr::copy`]: crate::ptr::copy() #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[cfg_attr(kani, kani::modifies(NonNull::slice_from_raw_parts(self, count).as_ptr()))] #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] + #[requires(count.checked_mul(core::mem::size_of::()).map_or_else(|| false, |size| size <= isize::MAX as usize) + && ub_checks::can_dereference(NonNull::slice_from_raw_parts(src, count).as_ptr()) + && ub_checks::can_write(NonNull::slice_from_raw_parts(self, count).as_ptr()))] + #[ensures(|result: &()| ub_checks::can_dereference(src.as_ptr() as *const u8) + && ub_checks::can_dereference(self.as_ptr() as *const u8))] pub const unsafe fn copy_from(self, src: NonNull, count: usize) where T: Sized, @@ -1095,8 +1114,15 @@ impl NonNull { /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping() #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[cfg_attr(kani, kani::modifies(NonNull::slice_from_raw_parts(self, count).as_ptr()))] #[stable(feature = "non_null_convenience", since = "1.80.0")] #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")] + #[requires(count.checked_mul(core::mem::size_of::()).map_or_else(|| false, |size| size <= isize::MAX as usize) + && ub_checks::can_dereference(NonNull::slice_from_raw_parts(src, count).as_ptr()) + && ub_checks::can_write(NonNull::slice_from_raw_parts(self, count).as_ptr()) + && ub_checks::maybe_is_nonoverlapping(src.as_ptr() as *const (), self.as_ptr() as *const (), count, core::mem::size_of::()))] + #[ensures(|result: &()| ub_checks::can_dereference(src.as_ptr() as *const u8) + && ub_checks::can_dereference(self.as_ptr() as *const u8))] pub const unsafe fn copy_from_nonoverlapping(self, src: NonNull, count: usize) where T: Sized, @@ -2538,4 +2564,62 @@ mod verify { let _ = ptr.byte_offset_from(origin); } } + + #[kani::proof_for_contract(NonNull::::copy_to)] + pub fn non_null_check_copy_to() { + // PointerGenerator instance + let mut generator = PointerGenerator::<16>::new(); + // Generate arbitrary pointers for src and dest + let src_ptr = generator.any_in_bounds::().ptr; + let dest_ptr = generator.any_in_bounds::().ptr; + // Wrap the raw pointers in NonNull + let src = NonNull::new(src_ptr).unwrap(); + let dest = NonNull::new(dest_ptr).unwrap(); + // Generate an arbitrary count using kani::any + let count: usize = kani::any(); + unsafe { src.copy_to(dest, count);} + } + + #[kani::proof_for_contract(NonNull::::copy_from)] + pub fn non_null_check_copy_from() { + // PointerGenerator instance + let mut generator = PointerGenerator::<16>::new(); + // Generate arbitrary pointers for src and dest + let src_ptr = generator.any_in_bounds::().ptr; + let dest_ptr = generator.any_in_bounds::().ptr; + // Wrap the raw pointers in NonNull + let src = NonNull::new(src_ptr).unwrap(); + let dest = NonNull::new(dest_ptr).unwrap(); + // Generate an arbitrary count using kani::any + let count: usize = kani::any(); + unsafe { src.copy_from(dest, count);} + } + #[kani::proof_for_contract(NonNull::::copy_to_nonoverlapping)] + pub fn non_null_check_copy_to_nonoverlapping() { + // PointerGenerator instance + let mut generator = PointerGenerator::<16>::new(); + // Generate arbitrary pointers for src and dest + let src_ptr = generator.any_in_bounds::().ptr; + let dest_ptr = generator.any_in_bounds::().ptr; + // Wrap the raw pointers in NonNull + let src = NonNull::new(src_ptr).unwrap(); + let dest = NonNull::new(dest_ptr).unwrap(); + // Generate an arbitrary count using kani::any + let count: usize = kani::any(); + unsafe { src.copy_to_nonoverlapping(dest, count);} + } + #[kani::proof_for_contract(NonNull::::copy_from_nonoverlapping)] + pub fn non_null_check_copy_from_nonoverlapping() { + // PointerGenerator instance + let mut generator = PointerGenerator::<16>::new(); + // Generate arbitrary pointers for src and dest + let src_ptr = generator.any_in_bounds::().ptr; + let dest_ptr = generator.any_in_bounds::().ptr; + // Wrap the raw pointers in NonNull + let src = NonNull::new(src_ptr).unwrap(); + let dest = NonNull::new(dest_ptr).unwrap(); + // Generate an arbitrary count using kani::any + let count: usize = kani::any(); + unsafe { src.copy_from_nonoverlapping(dest, count);} + } }