Skip to content

Commit 6e6d0cb

Browse files
committed
Add debug assertions to some unsafe functions
These debug assertions are all implemented only at runtime using `const_eval_select`, and in the error path they execute `intrinsics::abort` instead of being a normal debug assertion to minimize the impact of these assertions on code size, when enabled. Of all these changes, the bounds checks for unchecked indexing are expected to be most impactful (case in point, they found a problem in rustc).
1 parent ba14a83 commit 6e6d0cb

File tree

8 files changed

+123
-128
lines changed

8 files changed

+123
-128
lines changed

compiler/rustc_data_structures/src/map_in_place.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -30,13 +30,13 @@ impl<T> MapInPlace<T> for Vec<T> {
3030
while read_i < old_len {
3131
// move the read_i'th item out of the vector and map it
3232
// to an iterator
33-
let e = ptr::read(self.get_unchecked(read_i));
33+
let e = ptr::read(self.as_ptr().add(read_i));
3434
let iter = f(e).into_iter();
3535
read_i += 1;
3636

3737
for e in iter {
3838
if write_i < read_i {
39-
ptr::write(self.get_unchecked_mut(write_i), e);
39+
ptr::write(self.as_mut_ptr().add(write_i), e);
4040
write_i += 1;
4141
} else {
4242
// If this is reached we ran out of space
@@ -76,13 +76,13 @@ impl<T, A: Array<Item = T>> MapInPlace<T> for SmallVec<A> {
7676
while read_i < old_len {
7777
// move the read_i'th item out of the vector and map it
7878
// to an iterator
79-
let e = ptr::read(self.get_unchecked(read_i));
79+
let e = ptr::read(self.as_ptr().add(read_i));
8080
let iter = f(e).into_iter();
8181
read_i += 1;
8282

8383
for e in iter {
8484
if write_i < read_i {
85-
ptr::write(self.get_unchecked_mut(write_i), e);
85+
ptr::write(self.as_mut_ptr().add(write_i), e);
8686
write_i += 1;
8787
} else {
8888
// If this is reached we ran out of space

library/alloc/benches/vec.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -627,10 +627,10 @@ fn bench_map_regular(b: &mut Bencher) {
627627
fn bench_map_fast(b: &mut Bencher) {
628628
let data = black_box([(0, 0); LEN]);
629629
b.iter(|| {
630-
let mut result = Vec::with_capacity(data.len());
630+
let mut result: Vec<u32> = Vec::with_capacity(data.len());
631631
for i in 0..data.len() {
632632
unsafe {
633-
*result.get_unchecked_mut(i) = data[i].0;
633+
*result.as_mut_ptr().add(i) = data[i].0;
634634
result.set_len(i);
635635
}
636636
}

library/core/src/intrinsics.rs

+48-53
Original file line numberDiff line numberDiff line change
@@ -1969,6 +1969,40 @@ extern "rust-intrinsic" {
19691969
// (`transmute` also falls into this category, but it cannot be wrapped due to the
19701970
// check that `T` and `U` have the same size.)
19711971

1972+
/// Check that the preconditions of an unsafe function are followed, if debug_assertions are on,
1973+
/// and only at runtime.
1974+
///
1975+
/// # Safety
1976+
///
1977+
/// Invoking this macro is only sound if the following code is already UB when the passed
1978+
/// expression evaluates to false.
1979+
///
1980+
/// This macro expands to a check at runtime if debug_assertions is set. It has no effect at
1981+
/// compile time, but the semantics of the contained `const_eval_select` must be the same at
1982+
/// runtime and at compile time. Thus if the expression evaluates to false, this macro produces
1983+
/// different behavior at compile time and at runtime, and invoking it is incorrect.
1984+
///
1985+
/// So in a sense it is UB if this macro is useful, but we expect callers of `unsafe fn` to make
1986+
/// the occasional mistake, and this check should help them figure things out.
1987+
#[allow_internal_unstable(const_eval_select)] // permit this to be called in stably-const fn
1988+
macro_rules! assert_unsafe_precondition {
1989+
($e:expr) => {
1990+
if cfg!(debug_assertions) {
1991+
// Use a closure so that we can capture arbitrary expressions from the invocation
1992+
let runtime = || {
1993+
if !$e {
1994+
// abort instead of panicking to reduce impact on code size
1995+
::core::intrinsics::abort();
1996+
}
1997+
};
1998+
const fn comptime() {}
1999+
2000+
::core::intrinsics::const_eval_select((), comptime, runtime);
2001+
}
2002+
};
2003+
}
2004+
pub(crate) use assert_unsafe_precondition;
2005+
19722006
/// Checks whether `ptr` is properly aligned with respect to
19732007
/// `align_of::<T>()`.
19742008
pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
@@ -1977,7 +2011,6 @@ pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
19772011

19782012
/// Checks whether the regions of memory starting at `src` and `dst` of size
19792013
/// `count * size_of::<T>()` do *not* overlap.
1980-
#[cfg(debug_assertions)]
19812014
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
19822015
let src_usize = src as usize;
19832016
let dst_usize = dst as usize;
@@ -2079,28 +2112,16 @@ pub const unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: us
20792112
pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
20802113
}
20812114

2082-
#[cfg(debug_assertions)]
2083-
fn runtime_check<T>(src: *const T, dst: *mut T, count: usize) {
2084-
if !is_aligned_and_not_null(src)
2085-
|| !is_aligned_and_not_null(dst)
2086-
|| !is_nonoverlapping(src, dst, count)
2087-
{
2088-
// Not panicking to keep codegen impact smaller.
2089-
abort();
2090-
}
2091-
}
2092-
#[cfg(debug_assertions)]
2093-
const fn compiletime_check<T>(_src: *const T, _dst: *mut T, _count: usize) {}
2094-
#[cfg(debug_assertions)]
2095-
// SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
2096-
// Therefore, compiletime_check and runtime_check are observably equivalent.
2097-
unsafe {
2098-
const_eval_select((src, dst, count), compiletime_check, runtime_check);
2099-
}
2100-
21012115
// SAFETY: the safety contract for `copy_nonoverlapping` must be
21022116
// upheld by the caller.
2103-
unsafe { copy_nonoverlapping(src, dst, count) }
2117+
unsafe {
2118+
assert_unsafe_precondition!(
2119+
is_aligned_and_not_null(src)
2120+
&& is_aligned_and_not_null(dst)
2121+
&& is_nonoverlapping(src, dst, count)
2122+
);
2123+
copy_nonoverlapping(src, dst, count)
2124+
}
21042125
}
21052126

21062127
/// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
@@ -2173,24 +2194,11 @@ pub const unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
21732194
fn copy<T>(src: *const T, dst: *mut T, count: usize);
21742195
}
21752196

2176-
#[cfg(debug_assertions)]
2177-
fn runtime_check<T>(src: *const T, dst: *mut T) {
2178-
if !is_aligned_and_not_null(src) || !is_aligned_and_not_null(dst) {
2179-
// Not panicking to keep codegen impact smaller.
2180-
abort();
2181-
}
2182-
}
2183-
#[cfg(debug_assertions)]
2184-
const fn compiletime_check<T>(_src: *const T, _dst: *mut T) {}
2185-
#[cfg(debug_assertions)]
2186-
// SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
2187-
// Therefore, compiletime_check and runtime_check are observably equivalent.
2197+
// SAFETY: the safety contract for `copy` must be upheld by the caller.
21882198
unsafe {
2189-
const_eval_select((src, dst), compiletime_check, runtime_check);
2199+
assert_unsafe_precondition!(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst));
2200+
copy(src, dst, count)
21902201
}
2191-
2192-
// SAFETY: the safety contract for `copy` must be upheld by the caller.
2193-
unsafe { copy(src, dst, count) }
21942202
}
21952203

21962204
/// Sets `count * size_of::<T>()` bytes of memory starting at `dst` to
@@ -2274,24 +2282,11 @@ pub const unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize) {
22742282
fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
22752283
}
22762284

2277-
#[cfg(debug_assertions)]
2278-
fn runtime_check<T>(ptr: *mut T) {
2279-
debug_assert!(
2280-
is_aligned_and_not_null(ptr),
2281-
"attempt to write to unaligned or null pointer"
2282-
);
2283-
}
2284-
#[cfg(debug_assertions)]
2285-
const fn compiletime_check<T>(_ptr: *mut T) {}
2286-
#[cfg(debug_assertions)]
2287-
// SAFETY: runtime debug-assertions are a best-effort basis; it's fine to
2288-
// not do them during compile time
2285+
// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
22892286
unsafe {
2290-
const_eval_select((dst,), compiletime_check, runtime_check);
2287+
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
2288+
write_bytes(dst, val, count)
22912289
}
2292-
2293-
// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
2294-
unsafe { write_bytes(dst, val, count) }
22952290
}
22962291

22972292
/// Selects which function to call depending on the context.

library/core/src/num/nonzero.rs

+5-1
Original file line numberDiff line numberDiff line change
@@ -52,9 +52,13 @@ macro_rules! nonzero_integers {
5252
#[$const_new_unchecked_stability]
5353
#[must_use]
5454
#[inline]
55+
#[rustc_allow_const_fn_unstable(const_fn_fn_ptr_basics)] // required by assert_unsafe_precondition
5556
pub const unsafe fn new_unchecked(n: $Int) -> Self {
5657
// SAFETY: this is guaranteed to be safe by the caller.
57-
unsafe { Self(n) }
58+
unsafe {
59+
core::intrinsics::assert_unsafe_precondition!(n != 0);
60+
Self(n)
61+
}
5862
}
5963

6064
/// Creates a non-zero if the given value is not zero.

library/core/src/ptr/mod.rs

+20-10
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,10 @@
7575
use crate::cmp::Ordering;
7676
use crate::fmt;
7777
use crate::hash;
78-
use crate::intrinsics::{self, abort, is_aligned_and_not_null};
78+
use crate::intrinsics::{
79+
self, assert_unsafe_precondition, is_aligned_and_not_null, is_nonoverlapping,
80+
};
81+
7982
use crate::mem::{self, MaybeUninit};
8083

8184
#[stable(feature = "rust1", since = "1.0.0")]
@@ -438,6 +441,16 @@ pub const unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) {
438441
};
439442
}
440443

444+
// SAFETY: the caller must guarantee that `x` and `y` are
445+
// valid for writes and properly aligned.
446+
unsafe {
447+
assert_unsafe_precondition!(
448+
is_aligned_and_not_null(x)
449+
&& is_aligned_and_not_null(y)
450+
&& is_nonoverlapping(x, y, count)
451+
);
452+
}
453+
441454
// NOTE(scottmcm) MIRI is disabled here as reading in smaller units is a
442455
// pessimization for it. Also, if the type contains any unaligned pointers,
443456
// copying those over multiple reads is difficult to support.
@@ -528,6 +541,7 @@ pub const unsafe fn replace<T>(dst: *mut T, mut src: T) -> T {
528541
// and cannot overlap `src` since `dst` must point to a distinct
529542
// allocated object.
530543
unsafe {
544+
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
531545
mem::swap(&mut *dst, &mut src); // cannot overlap
532546
}
533547
src
@@ -1007,12 +1021,11 @@ pub const unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
10071021
#[inline]
10081022
#[stable(feature = "volatile", since = "1.9.0")]
10091023
pub unsafe fn read_volatile<T>(src: *const T) -> T {
1010-
if cfg!(debug_assertions) && !is_aligned_and_not_null(src) {
1011-
// Not panicking to keep codegen impact smaller.
1012-
abort();
1013-
}
10141024
// SAFETY: the caller must uphold the safety contract for `volatile_load`.
1015-
unsafe { intrinsics::volatile_load(src) }
1025+
unsafe {
1026+
assert_unsafe_precondition!(is_aligned_and_not_null(src));
1027+
intrinsics::volatile_load(src)
1028+
}
10161029
}
10171030

10181031
/// Performs a volatile write of a memory location with the given value without
@@ -1078,12 +1091,9 @@ pub unsafe fn read_volatile<T>(src: *const T) -> T {
10781091
#[inline]
10791092
#[stable(feature = "volatile", since = "1.9.0")]
10801093
pub unsafe fn write_volatile<T>(dst: *mut T, src: T) {
1081-
if cfg!(debug_assertions) && !is_aligned_and_not_null(dst) {
1082-
// Not panicking to keep codegen impact smaller.
1083-
abort();
1084-
}
10851094
// SAFETY: the caller must uphold the safety contract for `volatile_store`.
10861095
unsafe {
1096+
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
10871097
intrinsics::volatile_store(dst, src);
10881098
}
10891099
}

library/core/src/slice/index.rs

+15-3
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
//! Indexing implementations for `[T]`.
22
3+
use crate::intrinsics::assert_unsafe_precondition;
34
use crate::intrinsics::const_eval_select;
45
use crate::ops;
56
use crate::ptr;
@@ -219,13 +220,19 @@ unsafe impl<T> const SliceIndex<[T]> for usize {
219220
// cannot be longer than `isize::MAX`. They also guarantee that
220221
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
221222
// so the call to `add` is safe.
222-
unsafe { slice.as_ptr().add(self) }
223+
unsafe {
224+
assert_unsafe_precondition!(self < slice.len());
225+
slice.as_ptr().add(self)
226+
}
223227
}
224228

225229
#[inline]
226230
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut T {
227231
// SAFETY: see comments for `get_unchecked` above.
228-
unsafe { slice.as_mut_ptr().add(self) }
232+
unsafe {
233+
assert_unsafe_precondition!(self < slice.len());
234+
slice.as_mut_ptr().add(self)
235+
}
229236
}
230237

231238
#[inline]
@@ -272,13 +279,18 @@ unsafe impl<T> const SliceIndex<[T]> for ops::Range<usize> {
272279
// cannot be longer than `isize::MAX`. They also guarantee that
273280
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
274281
// so the call to `add` is safe.
275-
unsafe { ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start) }
282+
283+
unsafe {
284+
assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
285+
ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
286+
}
276287
}
277288

278289
#[inline]
279290
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
280291
// SAFETY: see comments for `get_unchecked` above.
281292
unsafe {
293+
assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
282294
ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
283295
}
284296
}

library/core/src/slice/mod.rs

+16-17
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
#![stable(feature = "rust1", since = "1.0.0")]
88

99
use crate::cmp::Ordering::{self, Greater, Less};
10+
use crate::intrinsics::{assert_unsafe_precondition, exact_div};
1011
use crate::marker::Copy;
1112
use crate::mem;
1213
use crate::num::NonZeroUsize;
@@ -637,15 +638,10 @@ impl<T> [T] {
637638
#[unstable(feature = "slice_swap_unchecked", issue = "88539")]
638639
#[rustc_const_unstable(feature = "const_swap", issue = "83163")]
639640
pub const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
640-
#[cfg(debug_assertions)]
641-
{
642-
let _ = &self[a];
643-
let _ = &self[b];
644-
}
645-
646641
let ptr = self.as_mut_ptr();
647642
// SAFETY: caller has to guarantee that `a < self.len()` and `b < self.len()`
648643
unsafe {
644+
assert_unsafe_precondition!(a < self.len() && b < self.len());
649645
ptr::swap(ptr.add(a), ptr.add(b));
650646
}
651647
}
@@ -950,11 +946,11 @@ impl<T> [T] {
950946
#[unstable(feature = "slice_as_chunks", issue = "74985")]
951947
#[inline]
952948
pub unsafe fn as_chunks_unchecked<const N: usize>(&self) -> &[[T; N]] {
953-
debug_assert_ne!(N, 0);
954-
debug_assert_eq!(self.len() % N, 0);
955-
let new_len =
956-
// SAFETY: Our precondition is exactly what's needed to call this
957-
unsafe { crate::intrinsics::exact_div(self.len(), N) };
949+
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
950+
let new_len = unsafe {
951+
assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
952+
exact_div(self.len(), N)
953+
};
958954
// SAFETY: We cast a slice of `new_len * N` elements into
959955
// a slice of `new_len` many `N` elements chunks.
960956
unsafe { from_raw_parts(self.as_ptr().cast(), new_len) }
@@ -1086,11 +1082,11 @@ impl<T> [T] {
10861082
#[unstable(feature = "slice_as_chunks", issue = "74985")]
10871083
#[inline]
10881084
pub unsafe fn as_chunks_unchecked_mut<const N: usize>(&mut self) -> &mut [[T; N]] {
1089-
debug_assert_ne!(N, 0);
1090-
debug_assert_eq!(self.len() % N, 0);
1091-
let new_len =
1092-
// SAFETY: Our precondition is exactly what's needed to call this
1093-
unsafe { crate::intrinsics::exact_div(self.len(), N) };
1085+
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
1086+
let new_len = unsafe {
1087+
assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
1088+
exact_div(self.len(), N)
1089+
};
10941090
// SAFETY: We cast a slice of `new_len * N` elements into
10951091
// a slice of `new_len` many `N` elements chunks.
10961092
unsafe { from_raw_parts_mut(self.as_mut_ptr().cast(), new_len) }
@@ -1646,7 +1642,10 @@ impl<T> [T] {
16461642
//
16471643
// `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference
16481644
// is fine.
1649-
unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid)) }
1645+
unsafe {
1646+
assert_unsafe_precondition!(mid <= len);
1647+
(from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid))
1648+
}
16501649
}
16511650

16521651
/// Divides one slice into an array and a remainder slice at an index.

0 commit comments

Comments
 (0)