From eae6c8bc2a4393c0c6704603038b00253c83a118 Mon Sep 17 00:00:00 2001 From: Reinier Maas Date: Fri, 6 Dec 2024 20:57:54 +0100 Subject: [PATCH 1/4] Update tools.md (#207) --- doc/src/tools.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/src/tools.md b/doc/src/tools.md index 1e86d661ccb2e..0eaba7c40d4d8 100644 --- a/doc/src/tools.md +++ b/doc/src/tools.md @@ -4,7 +4,7 @@ The verification tool ecosystem for Rust is rapidly growing, and we welcome the In this chapter, you can find a list of tools that have already been approved for new solutions, what is their CI current status, as well as more details on how to use them. -If the tool you would like to add a new tool to the list of tool applications, +If you would like to add a new tool to the list of tool applications, please see the [Tool Application](general-rules.md#tool-applications) section. ## Approved tools: From ee9b7c311ab494329382b483e35cec622d7e565e Mon Sep 17 00:00:00 2001 From: "Celina G. Val" Date: Fri, 6 Dec 2024 14:30:39 -0800 Subject: [PATCH 2/4] Enable harnesses that were blocked by Kani's spurious CEX (#211) In #148 and #122, we had to comment out a few harnesses due to the issue https://github.com/model-checking/kani/issues/3670. But now that the fix has been pushed, we can enable them. --- library/core/src/slice/iter.rs | 22 ++++++++++++++-------- library/core/src/str/pattern.rs | 5 ----- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs index 5005563233d2d..5ea9204a28fd0 100644 --- a/library/core/src/slice/iter.rs +++ b/library/core/src/slice/iter.rs @@ -3604,18 +3604,24 @@ mod verify { } check_unsafe_contracts!(check_next_back_unchecked, $ty, next_back_unchecked()); - //check_unsafe_contracts!(check_post_inc_start, $ty, post_inc_start(kani::any())); - //check_unsafe_contracts!(check_pre_dec_end, $ty, pre_dec_end(kani::any())); + check_unsafe_contracts!(check_post_inc_start, $ty, post_inc_start(kani::any())); + check_unsafe_contracts!(check_pre_dec_end, $ty, pre_dec_end(kani::any())); // FIXME: The functions that are commented out are currently failing verification. // Debugging the issue is currently blocked by: // https://github.com/model-checking/kani/issues/3670 // // Public functions that call safe abstraction `make_slice`. - // check_safe_abstraction!(check_as_slice, $ty, as_slice); - // check_safe_abstraction!(check_as_ref, $ty, as_ref); + check_safe_abstraction!(check_as_slice, $ty, |iter: &mut Iter<'_, $ty>| { + iter.as_slice(); + }); + check_safe_abstraction!(check_as_ref, $ty, |iter: &mut Iter<'_, $ty>| { + iter.as_ref(); + }); - // check_safe_abstraction!(check_advance_back_by, $ty, advance_back_by, kani::any()); + check_safe_abstraction!(check_advance_back_by, $ty, |iter: &mut Iter<'_, $ty>| { + iter.advance_back_by(kani::any()); + }); check_safe_abstraction!(check_is_empty, $ty, |iter: &mut Iter<'_, $ty>| { let _ = iter.is_empty(); @@ -3626,12 +3632,12 @@ mod verify { check_safe_abstraction!(check_size_hint, $ty, |iter: &mut Iter<'_, $ty>| { let _ = iter.size_hint(); }); - //check_safe_abstraction!(check_nth, $ty, |iter: &mut Iter<'_, $ty>| { let _ = iter.nth(kani::any()); }); - //check_safe_abstraction!(check_advance_by, $ty, |iter: &mut Iter<'_, $ty>| { let _ = iter.advance_by(kani::any()); }); + check_safe_abstraction!(check_nth, $ty, |iter: &mut Iter<'_, $ty>| { let _ = iter.nth(kani::any()); }); + check_safe_abstraction!(check_advance_by, $ty, |iter: &mut Iter<'_, $ty>| { let _ = iter.advance_by(kani::any()); }); check_safe_abstraction!(check_next_back, $ty, |iter: &mut Iter<'_, $ty>| { let _ = iter.next_back(); }); - //check_safe_abstraction!(check_nth_back, $ty, |iter: &mut Iter<'_, $ty>| { let _ = iter.nth_back(kani::any()); }); + check_safe_abstraction!(check_nth_back, $ty, |iter: &mut Iter<'_, $ty>| { let _ = iter.nth_back(kani::any()); }); check_safe_abstraction!(check_next, $ty, |iter: &mut Iter<'_, $ty>| { let _ = iter.next(); }); diff --git a/library/core/src/str/pattern.rs b/library/core/src/str/pattern.rs index 7792bfbbac718..6540608344fa1 100644 --- a/library/core/src/str/pattern.rs +++ b/library/core/src/str/pattern.rs @@ -2000,10 +2000,6 @@ pub mod verify { } } - /* This harness check `small_slice_eq` with dangling pointer to slice - with zero size. Kani finds safety issue of `small_slice_eq` in this - harness and hence the proof will fail. - #[cfg(all(kani, target_arch = "x86_64"))] // only called on x86 #[kani::proof] #[kani::unwind(4)] @@ -2022,5 +2018,4 @@ pub mod verify { true ); } - */ } From d92a7ea57f9096171354b47b45bd4f6e31baba2a Mon Sep 17 00:00:00 2001 From: "Celina G. Val" Date: Fri, 6 Dec 2024 17:44:00 -0800 Subject: [PATCH 3/4] Add a few intrinsics contracts (#37) Here are a few limitations: 1. Harness for`write_bytes` was disabled due to: - Issue https://github.com/model-checking/kani/issues/90. 2. The harnesses explicitly disable cases where a pointer is dangling. - Kani cannot make assumptions on pointer allocation for dead or dangling pointers (https://github.com/model-checking/kani/issues/2300). 3. Actual intrinsics are very hard to verify with Kani. The cases we can verify are those that have wrappers around the actual intrinsic. - Issue https://github.com/model-checking/kani/issues/3345 By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 and MIT licenses. --------- Co-authored-by: Michael Tautschnig Co-authored-by: Michael Tautschnig --- doc/src/challenges/0002-intrinsics-memory.md | 46 +++--- library/core/src/intrinsics/mod.rs | 149 +++++++++++++++++-- 2 files changed, 157 insertions(+), 38 deletions(-) diff --git a/doc/src/challenges/0002-intrinsics-memory.md b/doc/src/challenges/0002-intrinsics-memory.md index 31c1c43225250..adb4176254804 100644 --- a/doc/src/challenges/0002-intrinsics-memory.md +++ b/doc/src/challenges/0002-intrinsics-memory.md @@ -24,29 +24,29 @@ Annotate Rust core::intrinsics functions that manipulate raw pointers with their Intrinsic functions to be annotated with safety contracts -| Function | Location | -|---------|---------| -|typed_swap | core::intrisics | -|vtable_size| core::intrisics | -|vtable_align| core::intrisics | -|copy_nonoverlapping| core::intrisics | -|copy| core::intrisics | -|write_bytes| core::intrisics | -|size_of_val| core::intrisics | -|arith_offset| core::intrisics | -|volatile_copy_nonoverlapping_memory| core::intrisics | -|volatile_copy_memory| core::intrisics | -|volatile_set_memory| core::intrisics | -|volatile_load| core::intrisics | -|volatile_store| core::intrisics | -|unaligned_volatile_load| core::intrisics | -|unaligned_volatile_store| core::intrisics | -|compare_bytes| core::intrisics | -|min_align_of_val| core::intrisics | -|ptr_offset_from| core::intrisics | -|ptr_offset_from_unsigned| core::intrisics | -|read_via_copy| core::intrisics | -|write_via_move| core::intrisics | +| Function | Location | +|-------------------------------------|-----------------| +| typed_swap | core::intrisics | +| vtable_size | core::intrisics | +| vtable_align | core::intrisics | +| copy_nonoverlapping | core::intrisics | +| copy | core::intrisics | +| write_bytes | core::intrisics | +| size_of_val | core::intrisics | +| arith_offset | core::intrisics | +| volatile_copy_nonoverlapping_memory | core::intrisics | +| volatile_copy_memory | core::intrisics | +| volatile_set_memory | core::intrisics | +| volatile_load | core::intrisics | +| volatile_store | core::intrisics | +| unaligned_volatile_load | core::intrisics | +| unaligned_volatile_store | core::intrisics | +| compare_bytes | core::intrisics | +| min_align_of_val | core::intrisics | +| ptr_offset_from | core::intrisics | +| ptr_offset_from_unsigned | core::intrisics | +| read_via_copy | core::intrisics | +| write_via_move | core::intrisics | All the following usages of intrinsics were proven safe: diff --git a/library/core/src/intrinsics/mod.rs b/library/core/src/intrinsics/mod.rs index 38dbe91bea64a..29ef19daaf679 100644 --- a/library/core/src/intrinsics/mod.rs +++ b/library/core/src/intrinsics/mod.rs @@ -64,10 +64,10 @@ )] #![allow(missing_docs)] -use safety::requires; use crate::marker::{DiscriminantKind, Tuple}; use crate::mem::SizedTypeProperties; use crate::{ptr, ub_checks}; +use safety::{ensures, requires}; #[cfg(kani)] use crate::kani; @@ -3663,7 +3663,8 @@ pub const fn is_val_statically_known(_arg: T) -> bool { #[requires(ub_checks::can_dereference(x) && ub_checks::can_write(x))] #[requires(ub_checks::can_dereference(y) && ub_checks::can_write(y))] #[requires(x.addr() != y.addr() || core::mem::size_of::() == 0)] -#[requires((x.addr() >= y.addr() + core::mem::size_of::()) || (y.addr() >= x.addr() + core::mem::size_of::()))] +#[requires(ub_checks::maybe_is_nonoverlapping(x as *const (), y as *const (), size_of::(), 1))] +#[ensures(|_| ub_checks::can_dereference(x) && ub_checks::can_dereference(y))] pub const unsafe fn typed_swap(x: *mut T, y: *mut T) { // SAFETY: The caller provided single non-overlapping items behind // pointers, so swapping them with `count: 1` is fine. @@ -3737,6 +3738,9 @@ pub const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) #[unstable(feature = "core_intrinsics", issue = "none")] #[rustc_intrinsic] #[rustc_intrinsic_must_be_overridden] +// VTable pointers must be valid for dereferencing at least 3 `usize` (size, alignment and drop): +// +#[requires(ub_checks::can_dereference(_ptr as *const [usize; 3]))] pub unsafe fn vtable_size(_ptr: *const ()) -> usize { unreachable!() } @@ -3750,6 +3754,9 @@ pub unsafe fn vtable_size(_ptr: *const ()) -> usize { #[unstable(feature = "core_intrinsics", issue = "none")] #[rustc_intrinsic] #[rustc_intrinsic_must_be_overridden] +// VTable pointers must be valid for dereferencing at least 3 `usize` (size, alignment and drop): +// +#[requires(ub_checks::can_dereference(_ptr as *const [usize; 3]))] pub unsafe fn vtable_align(_ptr: *const ()) -> usize { unreachable!() } @@ -4034,6 +4041,13 @@ pub const fn ptr_metadata + ?Sized, M>(_ptr: *cons #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[rustc_diagnostic_item = "ptr_copy_nonoverlapping"] +// Copy is "untyped". +#[cfg_attr(kani, kani::modifies(crate::ptr::slice_from_raw_parts(dst, count)))] +#[requires(!count.overflowing_mul(size_of::()).1 + && ub_checks::can_dereference(core::ptr::slice_from_raw_parts(src as *const crate::mem::MaybeUninit, count)) + && ub_checks::can_write(core::ptr::slice_from_raw_parts_mut(dst, count)) + && ub_checks::maybe_is_nonoverlapping(src as *const (), dst as *const (), size_of::(), count))] +#[ensures(|_| { check_copy_untyped(src, dst, count)})] pub const unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize) { #[cfg_attr(bootstrap, rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0"))] #[cfg_attr(not(bootstrap), rustc_intrinsic_const_stable_indirect)] @@ -4141,6 +4155,11 @@ pub const unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: us #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[rustc_diagnostic_item = "ptr_copy"] +#[requires(!count.overflowing_mul(size_of::()).1 + && ub_checks::can_dereference(core::ptr::slice_from_raw_parts(src as *const crate::mem::MaybeUninit, count)) + && ub_checks::can_write(core::ptr::slice_from_raw_parts_mut(dst, count)))] +#[ensures(|_| { check_copy_untyped(src, dst, count) })] +#[cfg_attr(kani, kani::modifies(crate::ptr::slice_from_raw_parts(dst, count)))] pub const unsafe fn copy(src: *const T, dst: *mut T, count: usize) { #[cfg_attr(bootstrap, rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0"))] #[cfg_attr(not(bootstrap), rustc_intrinsic_const_stable_indirect)] @@ -4225,6 +4244,12 @@ pub const unsafe fn copy(src: *const T, dst: *mut T, count: usize) { #[inline(always)] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[rustc_diagnostic_item = "ptr_write_bytes"] +#[requires(!count.overflowing_mul(size_of::()).1 + && ub_checks::can_write(core::ptr::slice_from_raw_parts_mut(dst, count)))] +#[requires(ub_checks::maybe_is_aligned_and_not_null(dst as *const (), align_of::(), T::IS_ZST || count == 0))] +#[ensures(|_| + ub_checks::can_dereference(crate::ptr::slice_from_raw_parts(dst as *const u8, count * size_of::())))] +#[cfg_attr(kani, kani::modifies(crate::ptr::slice_from_raw_parts(dst, count)))] pub const unsafe fn write_bytes(dst: *mut T, val: u8, count: usize) { #[cfg_attr(bootstrap, rustc_const_stable(feature = "const_ptr_write", since = "1.83.0"))] #[cfg_attr(not(bootstrap), rustc_intrinsic_const_stable_indirect)] @@ -4513,6 +4538,36 @@ pub const unsafe fn copysignf128(_x: f128, _y: f128) -> f128 { unimplemented!(); } +/// Return whether the initialization state is preserved. +/// +/// For untyped copy, done via `copy` and `copy_nonoverlapping`, the copies of non-initialized +/// bytes (such as padding bytes) should result in a non-initialized copy, while copies of +/// initialized bytes result in initialized bytes. +/// +/// It is UB to read the uninitialized bytes, so we cannot compare their values only their +/// initialization state. +/// +/// This is used for contracts only. +/// +/// FIXME: Change this once we add support to quantifiers. +#[allow(dead_code)] +#[allow(unused_variables)] +fn check_copy_untyped(src: *const T, dst: *mut T, count: usize) -> bool { + #[cfg(kani)] + if count > 0 { + let byte = kani::any_where(|sz: &usize| *sz < size_of::()); + let elem = kani::any_where(|val: &usize| *val < count); + let src_data = src as *const u8; + let dst_data = unsafe { dst.add(elem) } as *const u8; + ub_checks::can_dereference(unsafe { src_data.add(byte) }) + == ub_checks::can_dereference(unsafe { dst_data.add(byte) }) + } else { + true + } + #[cfg(not(kani))] + false +} + /// Inform Miri that a given pointer definitely has a certain alignment. #[cfg(miri)] #[rustc_allow_const_fn_unstable(const_eval_select)] @@ -4538,35 +4593,99 @@ pub(crate) const fn miri_promise_symbolic_alignment(ptr: *const (), align: usize } #[cfg(kani)] -#[unstable(feature="kani", issue="none")] +#[unstable(feature = "kani", issue = "none")] mod verify { - use core::{cmp, fmt}; use super::*; use crate::kani; + use core::mem::MaybeUninit; + use kani::{AllocationStatus, Arbitrary, ArbitraryPointer, PointerGenerator}; #[kani::proof_for_contract(typed_swap)] pub fn check_typed_swap_u8() { - check_swap::() + run_with_arbitrary_ptrs::(|x, y| unsafe { typed_swap(x, y) }); } #[kani::proof_for_contract(typed_swap)] pub fn check_typed_swap_char() { - check_swap::() + run_with_arbitrary_ptrs::(|x, y| unsafe { typed_swap(x, y) }); } #[kani::proof_for_contract(typed_swap)] pub fn check_typed_swap_non_zero() { - check_swap::() + run_with_arbitrary_ptrs::(|x, y| unsafe { typed_swap(x, y) }); } - pub fn check_swap() { - let mut x = kani::any::(); - let old_x = x; - let mut y = kani::any::(); - let old_y = y; + #[kani::proof_for_contract(copy)] + fn check_copy() { + run_with_arbitrary_ptrs::(|src, dst| unsafe { copy(src, dst, kani::any()) }); + } - unsafe { typed_swap(&mut x, &mut y) }; - assert_eq!(y, old_x); - assert_eq!(x, old_y); + #[kani::proof_for_contract(copy_nonoverlapping)] + fn check_copy_nonoverlapping() { + // Note: cannot use `ArbitraryPointer` here. + // The `ArbitraryPtr` will arbitrarily initialize memory by indirectly invoking + // `copy_nonoverlapping`. + // Kani contract checking would fail due to existing restriction on calls to + // the function under verification. + let gen_any_ptr = |buf: &mut [MaybeUninit; 100]| -> *mut char { + let base = buf.as_mut_ptr() as *mut u8; + base.wrapping_add(kani::any_where(|offset: &usize| *offset < 400)) as *mut char + }; + let mut buffer1 = [MaybeUninit::::uninit(); 100]; + for i in 0..100 { + if kani::any() { + buffer1[i] = MaybeUninit::new(kani::any()); + } + } + let mut buffer2 = [MaybeUninit::::uninit(); 100]; + let src = gen_any_ptr(&mut buffer1); + let dst = if kani::any() { gen_any_ptr(&mut buffer2) } else { gen_any_ptr(&mut buffer1) }; + unsafe { copy_nonoverlapping(src, dst, kani::any()) } + } + + // FIXME: Enable this harness once is fixed. + // Harness triggers a spurious failure when writing 0 bytes to an invalid memory location, + // which is a safe operation. + #[cfg(not(kani))] + #[kani::proof_for_contract(write_bytes)] + fn check_write_bytes() { + let mut generator = PointerGenerator::<100>::new(); + let ArbitraryPointer { + ptr, + status, + .. + } = generator.any_alloc_status::(); + kani::assume(supported_status(status)); + unsafe { write_bytes(ptr, kani::any(), kani::any()) }; + } + + fn run_with_arbitrary_ptrs(harness: impl Fn(*mut T, *mut T)) { + let mut generator1 = PointerGenerator::<100>::new(); + let mut generator2 = PointerGenerator::<100>::new(); + let ArbitraryPointer { + ptr: src, + status: src_status, + .. + } = generator1.any_alloc_status::(); + let ArbitraryPointer { + ptr: dst, + status: dst_status, + .. + } = if kani::any() { + generator1.any_alloc_status::() + } else { + generator2.any_alloc_status::() + }; + kani::assume(supported_status(src_status)); + kani::assume(supported_status(dst_status)); + harness(src, dst); + } + + /// Return whether the current status is supported by Kani's contract. + /// + /// Kani memory predicates currently doesn't support pointers to dangling or dead allocations. + /// Thus, we have to explicitly exclude those cases. + fn supported_status(status: AllocationStatus) -> bool { + status != AllocationStatus::Dangling && status != AllocationStatus::DeadObject } } From 8a0d0ce22d8094799ca302248956b18b36fdc564 Mon Sep 17 00:00:00 2001 From: stogaru <143449212+stogaru@users.noreply.github.com> Date: Sat, 7 Dec 2024 16:51:44 -0800 Subject: [PATCH 4/4] Contracts and Harnesses for `core::ptr::byte_offset_from` (#186) --- library/core/src/ptr/const_ptr.rs | 205 +++++++++++++++++++++++++++++- library/core/src/ptr/mut_ptr.rs | 205 +++++++++++++++++++++++++++++- 2 files changed, 407 insertions(+), 3 deletions(-) diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs index a210821f08d10..32baf1b33941a 100644 --- a/library/core/src/ptr/const_ptr.rs +++ b/library/core/src/ptr/const_ptr.rs @@ -730,6 +730,16 @@ impl *const T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[requires( + (mem::size_of_val_raw(self) != 0) && + // Ensures subtracting `origin` from `self` doesn't overflow + (self.addr() as isize).checked_sub(origin.addr() as isize).is_some() && + // Ensure both pointers are in the same allocation or are pointing to the same address + (self.addr() == origin.addr() || + core::ub_checks::same_allocation(self as *const u8, origin as *const u8)) + )] + // The result should equal the distance in terms of bytes + #[ensures(|result| *result == (self.addr() as isize - origin.addr() as isize))] pub const unsafe fn byte_offset_from(self, origin: *const U) -> isize { // SAFETY: the caller must uphold the safety contract for `offset_from`. unsafe { self.cast::().offset_from(origin.cast::()) } @@ -1940,7 +1950,7 @@ mod verify { check_const_offset_usize ); - // Generte harnesses for composite types (add, sub, offset) + // Generate harnesses for composite types (add, sub, offset) generate_arithmetic_harnesses!( (i8, i8), check_const_add_tuple_1, @@ -2116,6 +2126,17 @@ mod verify { // PointerGenerator does not support dynamic sized arrays. const ARRAY_LEN: usize = 40; + #[kani::proof] + pub fn check_const_byte_offset_from_fixed_offset() { + let arr: [u32; ARRAY_LEN] = kani::Arbitrary::any_array(); + let offset: usize = kani::any_where(|&x| x <= ARRAY_LEN); + let origin_ptr: *const u32 = arr.as_ptr(); + let self_ptr: *const u32 = unsafe { origin_ptr.byte_offset(offset as isize) }; + let result: isize = unsafe { self_ptr.byte_offset_from(origin_ptr) }; + assert_eq!(result, offset as isize); + assert_eq!(result, (self_ptr.addr() as isize - origin_ptr.addr() as isize)); + } + macro_rules! generate_offset_from_harness { ($type: ty, $proof_name1: ident, $proof_name2: ident) => { // Proof for a single element @@ -2156,7 +2177,7 @@ mod verify { }; } - // Proof for unit size will panic as offset_from needs the pointee size to be greater then 0 + // Proof for unit size will panic as offset_from needs the pointee size to be greater than 0 #[kani::proof_for_contract(<*const ()>::offset_from)] #[kani::should_panic] pub fn check_const_offset_from_unit() { @@ -2252,6 +2273,186 @@ mod verify { check_const_offset_from_tuple_4_arr ); + // Proof for contact of byte_offset_from to verify unit type + #[kani::proof_for_contract(<*const ()>::byte_offset_from)] + pub fn check_const_byte_offset_from_unit() { + let val: () = (); + let src_ptr: *const () = &val; + let dest_ptr: *const () = &val; + unsafe { + dest_ptr.byte_offset_from(src_ptr); + } + } + + // generate proofs for contracts for byte_offset_from to verify int and composite + // types + // - `$type`: pointee type + // - `$proof_name1`: name of the harness for single element + // - `$proof_name2`: name of the harness for array of elements + macro_rules! generate_const_byte_offset_from_harness { + ($type: ty, $proof_name1: ident, $proof_name2: ident) => { + // Proof for a single element + #[kani::proof_for_contract(<*const $type>::byte_offset_from)] + pub fn $proof_name1() { + const gen_size: usize = mem::size_of::<$type>(); + let mut generator1 = PointerGenerator::::new(); + let mut generator2 = PointerGenerator::::new(); + let ptr1: *const $type = generator1.any_in_bounds().ptr; + let ptr2: *const $type = if kani::any() { + generator1.any_alloc_status().ptr + } else { + generator2.any_alloc_status().ptr + }; + + unsafe { + ptr1.byte_offset_from(ptr2); + } + } + + // Proof for large arrays + #[kani::proof_for_contract(<*const $type>::byte_offset_from)] + pub fn $proof_name2() { + const gen_size: usize = mem::size_of::<$type>(); + let mut generator1 = PointerGenerator::<{ gen_size * ARRAY_LEN }>::new(); + let mut generator2 = PointerGenerator::<{ gen_size * ARRAY_LEN }>::new(); + let ptr1: *const $type = generator1.any_in_bounds().ptr; + let ptr2: *const $type = if kani::any() { + generator1.any_alloc_status().ptr + } else { + generator2.any_alloc_status().ptr + }; + + unsafe { + ptr1.byte_offset_from(ptr2); + } + } + }; + } + + generate_const_byte_offset_from_harness!( + u8, + check_const_byte_offset_from_u8, + check_const_byte_offset_from_u8_arr + ); + generate_const_byte_offset_from_harness!( + u16, + check_const_byte_offset_from_u16, + check_const_byte_offset_from_u16_arr + ); + generate_const_byte_offset_from_harness!( + u32, + check_const_byte_offset_from_u32, + check_const_byte_offset_from_u32_arr + ); + generate_const_byte_offset_from_harness!( + u64, + check_const_byte_offset_from_u64, + check_const_byte_offset_from_u64_arr + ); + generate_const_byte_offset_from_harness!( + u128, + check_const_byte_offset_from_u128, + check_const_byte_offset_from_u128_arr + ); + generate_const_byte_offset_from_harness!( + usize, + check_const_byte_offset_from_usize, + check_const_byte_offset_from_usize_arr + ); + + generate_const_byte_offset_from_harness!( + i8, + check_const_byte_offset_from_i8, + check_const_byte_offset_from_i8_arr + ); + generate_const_byte_offset_from_harness!( + i16, + check_const_byte_offset_from_i16, + check_const_byte_offset_from_i16_arr + ); + generate_const_byte_offset_from_harness!( + i32, + check_const_byte_offset_from_i32, + check_const_byte_offset_from_i32_arr + ); + generate_const_byte_offset_from_harness!( + i64, + check_const_byte_offset_from_i64, + check_const_byte_offset_from_i64_arr + ); + generate_const_byte_offset_from_harness!( + i128, + check_const_byte_offset_from_i128, + check_const_byte_offset_from_i128_arr + ); + generate_const_byte_offset_from_harness!( + isize, + check_const_byte_offset_from_isize, + check_const_byte_offset_from_isize_arr + ); + + generate_const_byte_offset_from_harness!( + (i8, i8), + check_const_byte_offset_from_tuple_1, + check_const_byte_offset_from_tuple_1_arr + ); + generate_const_byte_offset_from_harness!( + (f64, bool), + check_const_byte_offset_from_tuple_2, + check_const_byte_offset_from_tuple_2_arr + ); + generate_const_byte_offset_from_harness!( + (u32, i16, f32), + check_const_byte_offset_from_tuple_3, + check_const_byte_offset_from_tuple_3_arr + ); + generate_const_byte_offset_from_harness!( + ((), bool, u8, u16, i32, f64, i128, usize), + check_const_byte_offset_from_tuple_4, + check_const_byte_offset_from_tuple_4_arr + ); + + // length of the slice generated from PointerGenerator + const SLICE_LEN: usize = 10; + + // generate proofs for contracts for byte_offset_from to verify slices + // - `$type`: type of the underlyign element within the slice pointer + // - `$proof_name`: name of the harness + macro_rules! generate_const_byte_offset_from_slice_harness { + ($type: ty, $proof_name: ident) => { + #[kani::proof_for_contract(<*const [$type]>::byte_offset_from)] + pub fn $proof_name() { + const gen_size: usize = mem::size_of::<$type>(); + let mut generator1 = PointerGenerator::<{ gen_size * ARRAY_LEN }>::new(); + let mut generator2 = PointerGenerator::<{ gen_size * ARRAY_LEN }>::new(); + let ptr1: *const [$type] = + generator1.any_in_bounds().ptr as *const [$type; SLICE_LEN]; + let ptr2: *const [$type] = if kani::any() { + generator1.any_alloc_status().ptr as *const [$type; SLICE_LEN] + } else { + generator2.any_alloc_status().ptr as *const [$type; SLICE_LEN] + }; + + unsafe { + ptr1.byte_offset_from(ptr2); + } + } + }; + } + + generate_const_byte_offset_from_slice_harness!(u8, check_const_byte_offset_from_u8_slice); + generate_const_byte_offset_from_slice_harness!(u16, check_const_byte_offset_from_u16_slice); + generate_const_byte_offset_from_slice_harness!(u32, check_const_byte_offset_from_u32_slice); + generate_const_byte_offset_from_slice_harness!(u64, check_const_byte_offset_from_u64_slice); + generate_const_byte_offset_from_slice_harness!(u128, check_const_byte_offset_from_u128_slice); + generate_const_byte_offset_from_slice_harness!(usize, check_const_byte_offset_from_usize_slice); + generate_const_byte_offset_from_slice_harness!(i8, check_const_byte_offset_from_i8_slice); + generate_const_byte_offset_from_slice_harness!(i16, check_const_byte_offset_from_i16_slice); + generate_const_byte_offset_from_slice_harness!(i32, check_const_byte_offset_from_i32_slice); + generate_const_byte_offset_from_slice_harness!(i64, check_const_byte_offset_from_i64_slice); + generate_const_byte_offset_from_slice_harness!(i128, check_const_byte_offset_from_i128_slice); + generate_const_byte_offset_from_slice_harness!(isize, check_const_byte_offset_from_isize_slice); + #[kani::proof_for_contract(<*const ()>::byte_offset)] #[kani::should_panic] pub fn check_const_byte_offset_unit_invalid_count() { diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs index fd7e4d80e676d..ef291233ae336 100644 --- a/library/core/src/ptr/mut_ptr.rs +++ b/library/core/src/ptr/mut_ptr.rs @@ -894,6 +894,16 @@ impl *mut T { #[stable(feature = "pointer_byte_offsets", since = "1.75.0")] #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")] #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces + #[requires( + (mem::size_of_val_raw(self) != 0) && + // Ensures subtracting `origin` from `self` doesn't overflow + (self.addr() as isize).checked_sub(origin.addr() as isize).is_some() && + // Ensure both pointers are in the same allocation or are pointing to the same address + (self.addr() == origin.addr() || + core::ub_checks::same_allocation(self as *const u8, origin as *const u8)) + )] + // The result should equal the distance in terms of bytes + #[ensures(|result| *result == (self.addr() as isize - origin.addr() as isize))] pub const unsafe fn byte_offset_from(self, origin: *const U) -> isize { // SAFETY: the caller must uphold the safety contract for `offset_from`. unsafe { self.cast::().offset_from(origin.cast::()) } @@ -2236,9 +2246,202 @@ mod verify { // The array's length is set to an arbitrary value, which defines its size. // In this case, implementing a dynamic array is not possible, because - // PointerGenerator does not support dynamic sized arrays. + // PointerGenerator or any_array() do not support dynamic sized arrays. const ARRAY_LEN: usize = 40; + #[kani::proof] + pub fn check_mut_byte_offset_from_fixed_offset() { + let mut arr: [u32; ARRAY_LEN] = kani::Arbitrary::any_array(); + let offset: usize = kani::any_where(|&x| x <= ARRAY_LEN); + let origin_ptr: *mut u32 = arr.as_mut_ptr(); + let self_ptr: *mut u32 = unsafe { origin_ptr.byte_offset(offset as isize) }; + let result: isize = unsafe { self_ptr.byte_offset_from(origin_ptr) }; + assert_eq!(result, offset as isize); + assert_eq!(result, (self_ptr.addr() as isize - origin_ptr.addr() as isize)); + } + + // Proof for unit size + #[kani::proof_for_contract(<*mut ()>::byte_offset_from)] + pub fn check_mut_byte_offset_from_unit() { + let mut val: () = (); + let src_ptr: *mut () = &mut val; + let dest_ptr: *mut () = &mut val; + unsafe { + dest_ptr.byte_offset_from(src_ptr); + } + } + + // generate proofs for contracts for byte_offset_from to verify int and composite + // types + // - `$type`: pointee type + // - `$proof_name1`: name of the harness for single element + // - `$proof_name2`: name of the harness for array of elements + macro_rules! generate_mut_byte_offset_from_harness { + ($type: ty, $proof_name1: ident, $proof_name2: ident) => { + // Proof for a single element + #[kani::proof_for_contract(<*mut $type>::byte_offset_from)] + pub fn $proof_name1() { + const gen_size: usize = mem::size_of::<$type>(); + let mut generator1 = PointerGenerator::::new(); + let mut generator2 = PointerGenerator::::new(); + let ptr1: *mut $type = generator1.any_in_bounds().ptr; + let ptr2: *mut $type = if kani::any() { + generator1.any_alloc_status().ptr + } else { + generator2.any_alloc_status().ptr + }; + + unsafe { + ptr1.byte_offset_from(ptr2); + } + } + + // Proof for large arrays + #[kani::proof_for_contract(<*mut $type>::byte_offset_from)] + pub fn $proof_name2() { + const gen_size: usize = mem::size_of::<$type>(); + let mut generator1 = PointerGenerator::<{ gen_size * ARRAY_LEN }>::new(); + let mut generator2 = PointerGenerator::<{ gen_size * ARRAY_LEN }>::new(); + let ptr1: *mut $type = generator1.any_in_bounds().ptr; + let ptr2: *mut $type = if kani::any() { + generator1.any_alloc_status().ptr + } else { + generator2.any_alloc_status().ptr + }; + + unsafe { + ptr1.byte_offset_from(ptr2); + } + } + }; + } + + generate_mut_byte_offset_from_harness!( + u8, + check_mut_byte_offset_from_u8, + check_mut_byte_offset_from_u8_arr + ); + generate_mut_byte_offset_from_harness!( + u16, + check_mut_byte_offset_from_u16, + check_mut_byte_offset_from_u16_arr + ); + generate_mut_byte_offset_from_harness!( + u32, + check_mut_byte_offset_from_u32, + check_mut_byte_offset_from_u32_arr + ); + generate_mut_byte_offset_from_harness!( + u64, + check_mut_byte_offset_from_u64, + check_mut_byte_offset_from_u64_arr + ); + generate_mut_byte_offset_from_harness!( + u128, + check_mut_byte_offset_from_u128, + check_mut_byte_offset_from_u128_arr + ); + generate_mut_byte_offset_from_harness!( + usize, + check_mut_byte_offset_from_usize, + check_mut_byte_offset_from_usize_arr + ); + + generate_mut_byte_offset_from_harness!( + i8, + check_mut_byte_offset_from_i8, + check_mut_byte_offset_from_i8_arr + ); + generate_mut_byte_offset_from_harness!( + i16, + check_mut_byte_offset_from_i16, + check_mut_byte_offset_from_i16_arr + ); + generate_mut_byte_offset_from_harness!( + i32, + check_mut_byte_offset_from_i32, + check_mut_byte_offset_from_i32_arr + ); + generate_mut_byte_offset_from_harness!( + i64, + check_mut_byte_offset_from_i64, + check_mut_byte_offset_from_i64_arr + ); + generate_mut_byte_offset_from_harness!( + i128, + check_mut_byte_offset_from_i128, + check_mut_byte_offset_from_i128_arr + ); + generate_mut_byte_offset_from_harness!( + isize, + check_mut_byte_offset_from_isize, + check_mut_byte_offset_from_isize_arr + ); + + generate_mut_byte_offset_from_harness!( + (i8, i8), + check_mut_byte_offset_from_tuple_1, + check_mut_byte_offset_from_tuple_1_arr + ); + generate_mut_byte_offset_from_harness!( + (f64, bool), + check_mut_byte_offset_from_tuple_2, + check_mut_byte_offset_from_tuple_2_arr + ); + generate_mut_byte_offset_from_harness!( + (u32, i16, f32), + check_mut_byte_offset_from_tuple_3, + check_mut_byte_offset_from_tuple_3_arr + ); + generate_mut_byte_offset_from_harness!( + ((), bool, u8, u16, i32, f64, i128, usize), + check_mut_byte_offset_from_tuple_4, + check_mut_byte_offset_from_tuple_4_arr + ); + + // The length of a slice is set to an arbitrary value, which defines its size. + // In this case, implementing a slice with a dynamic size set using kani::any() + // is not possible, because PointerGenerator does not support non-deterministic + // slice pointers. + const SLICE_LEN: usize = 10; + + // generate proofs for contracts for byte_offset_from to verify slices + // - `$type`: type of the underlyign element within the slice pointer + // - `$proof_name`: name of the harness + macro_rules! generate_mut_byte_offset_from_slice_harness { + ($type: ty, $proof_name: ident) => { + #[kani::proof_for_contract(<*mut [$type]>::byte_offset_from)] + pub fn $proof_name() { + const gen_size: usize = mem::size_of::<$type>(); + let mut generator1 = PointerGenerator::<{ gen_size * ARRAY_LEN }>::new(); + let mut generator2 = PointerGenerator::<{ gen_size * ARRAY_LEN }>::new(); + let ptr1: *mut [$type] = generator1.any_in_bounds().ptr as *mut [$type; SLICE_LEN]; + let ptr2: *mut [$type] = if kani::any() { + generator1.any_alloc_status().ptr as *mut [$type; SLICE_LEN] + } else { + generator2.any_alloc_status().ptr as *mut [$type; SLICE_LEN] + }; + + unsafe { + ptr1.byte_offset_from(ptr2); + } + } + }; + } + + generate_mut_byte_offset_from_slice_harness!(u8, check_mut_byte_offset_from_u8_slice); + generate_mut_byte_offset_from_slice_harness!(u16, check_mut_byte_offset_from_u16_slice); + generate_mut_byte_offset_from_slice_harness!(u32, check_mut_byte_offset_from_u32_slice); + generate_mut_byte_offset_from_slice_harness!(u64, check_mut_byte_offset_from_u64_slice); + generate_mut_byte_offset_from_slice_harness!(u128, check_mut_byte_offset_from_u128_slice); + generate_mut_byte_offset_from_slice_harness!(usize, check_mut_byte_offset_from_usize_slice); + generate_mut_byte_offset_from_slice_harness!(i8, check_mut_byte_offset_from_i8_slice); + generate_mut_byte_offset_from_slice_harness!(i16, check_mut_byte_offset_from_i16_slice); + generate_mut_byte_offset_from_slice_harness!(i32, check_mut_byte_offset_from_i32_slice); + generate_mut_byte_offset_from_slice_harness!(i64, check_mut_byte_offset_from_i64_slice); + generate_mut_byte_offset_from_slice_harness!(i128, check_mut_byte_offset_from_i128_slice); + generate_mut_byte_offset_from_slice_harness!(isize, check_mut_byte_offset_from_isize_slice); + #[kani::proof_for_contract(<*mut ()>::byte_offset)] #[kani::should_panic] pub fn check_mut_byte_offset_unit_invalid_count() {