Skip to content

Commit e46b790

Browse files
authored
Rollup merge of rust-lang#87089 - RalfJung:ctfe-memory-cleanup, r=oli-obk
CTFE engine: small cleanups I noticed these while preparing a large PR, and figured I'd better send them ahead to not muddy the diff unnecessarily. - remove remaining use of Pointer in Allocation API (I missed those in rust-lang#85472) - remove unnecessary deallocate_local hack (this logic does not seem necessary any more) r? `@oli-obk`
2 parents 15af98d + 6b26640 commit e46b790

File tree

3 files changed

+18
-26
lines changed

3 files changed

+18
-26
lines changed

compiler/rustc_middle/src/mir/interpret/allocation.rs

+9-10
Original file line numberDiff line numberDiff line change
@@ -512,7 +512,7 @@ impl InitMaskCompressed {
512512
/// Transferring the initialization mask to other allocations.
513513
impl<Tag, Extra> Allocation<Tag, Extra> {
514514
/// Creates a run-length encoding of the initialization mask.
515-
pub fn compress_uninit_range(&self, src: Pointer<Tag>, size: Size) -> InitMaskCompressed {
515+
pub fn compress_uninit_range(&self, range: AllocRange) -> InitMaskCompressed {
516516
// Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`),
517517
// a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from
518518
// the source and write it to the destination. Even if we optimized the memory accesses,
@@ -526,13 +526,13 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
526526
// where each element toggles the state.
527527

528528
let mut ranges = smallvec::SmallVec::<[u64; 1]>::new();
529-
let initial = self.init_mask.get(src.offset);
529+
let initial = self.init_mask.get(range.start);
530530
let mut cur_len = 1;
531531
let mut cur = initial;
532532

533-
for i in 1..size.bytes() {
533+
for i in 1..range.size.bytes() {
534534
// FIXME: optimize to bitshift the current uninitialized block's bits and read the top bit.
535-
if self.init_mask.get(src.offset + Size::from_bytes(i)) == cur {
535+
if self.init_mask.get(range.start + Size::from_bytes(i)) == cur {
536536
cur_len += 1;
537537
} else {
538538
ranges.push(cur_len);
@@ -550,24 +550,23 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
550550
pub fn mark_compressed_init_range(
551551
&mut self,
552552
defined: &InitMaskCompressed,
553-
dest: Pointer<Tag>,
554-
size: Size,
553+
range: AllocRange,
555554
repeat: u64,
556555
) {
557556
// An optimization where we can just overwrite an entire range of initialization
558557
// bits if they are going to be uniformly `1` or `0`.
559558
if defined.ranges.len() <= 1 {
560559
self.init_mask.set_range_inbounds(
561-
dest.offset,
562-
dest.offset + size * repeat, // `Size` operations
560+
range.start,
561+
range.start + range.size * repeat, // `Size` operations
563562
defined.initial,
564563
);
565564
return;
566565
}
567566

568567
for mut j in 0..repeat {
569-
j *= size.bytes();
570-
j += dest.offset.bytes();
568+
j *= range.size.bytes();
569+
j += range.start.bytes();
571570
let mut cur = defined.initial;
572571
for range in &defined.ranges {
573572
let old_j = j;

compiler/rustc_mir/src/interpret/eval_context.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@ use rustc_span::{Pos, Span};
1818
use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Size, TargetDataLayout};
1919

2020
use super::{
21-
Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, Operand, Place, PlaceTy,
22-
ScalarMaybeUninit, StackPopJump,
21+
Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, MemoryKind, Operand, Place,
22+
PlaceTy, ScalarMaybeUninit, StackPopJump,
2323
};
2424
use crate::transform::validate::equal_up_to_regions;
2525
use crate::util::storage::AlwaysLiveLocals;
@@ -900,7 +900,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
900900
// due to the local having ZST type.
901901
let ptr = ptr.assert_ptr();
902902
trace!("deallocating local: {:?}", self.memory.dump_alloc(ptr.alloc_id));
903-
self.memory.deallocate_local(ptr)?;
903+
self.memory.deallocate(ptr, None, MemoryKind::Stack)?;
904904
};
905905
Ok(())
906906
}

compiler/rustc_mir/src/interpret/memory.rs

+6-13
Original file line numberDiff line numberDiff line change
@@ -276,17 +276,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
276276
Ok(new_ptr)
277277
}
278278

279-
/// Deallocate a local, or do nothing if that local has been made into a global.
280-
pub fn deallocate_local(&mut self, ptr: Pointer<M::PointerTag>) -> InterpResult<'tcx> {
281-
// The allocation might be already removed by global interning.
282-
// This can only really happen in the CTFE instance, not in miri.
283-
if self.alloc_map.contains_key(&ptr.alloc_id) {
284-
self.deallocate(ptr, None, MemoryKind::Stack)
285-
} else {
286-
Ok(())
287-
}
288-
}
289-
290279
pub fn deallocate(
291280
&mut self,
292281
ptr: Pointer<M::PointerTag>,
@@ -1049,7 +1038,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
10491038
num_copies,
10501039
);
10511040
// Prepare a copy of the initialization mask.
1052-
let compressed = src_alloc.compress_uninit_range(src, size);
1041+
let compressed = src_alloc.compress_uninit_range(alloc_range(src.offset, size));
10531042
// This checks relocation edges on the src.
10541043
let src_bytes = src_alloc
10551044
.get_bytes_with_uninit_and_ptr(&tcx, alloc_range(src.offset, size))
@@ -1110,7 +1099,11 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
11101099
}
11111100

11121101
// now fill in all the "init" data
1113-
dest_alloc.mark_compressed_init_range(&compressed, dest, size, num_copies);
1102+
dest_alloc.mark_compressed_init_range(
1103+
&compressed,
1104+
alloc_range(dest.offset, size),
1105+
num_copies,
1106+
);
11141107
// copy the relocations to the destination
11151108
dest_alloc.mark_relocation_range(relocations);
11161109

0 commit comments

Comments
 (0)