Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CTFE engine: small cleanups #87089

Merged
merged 2 commits into from
Jul 13, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 9 additions & 10 deletions compiler/rustc_middle/src/mir/interpret/allocation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -512,7 +512,7 @@ impl InitMaskCompressed {
/// Transferring the initialization mask to other allocations.
impl<Tag, Extra> Allocation<Tag, Extra> {
/// Creates a run-length encoding of the initialization mask.
pub fn compress_uninit_range(&self, src: Pointer<Tag>, size: Size) -> InitMaskCompressed {
pub fn compress_uninit_range(&self, range: AllocRange) -> InitMaskCompressed {
// Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`),
// a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from
// the source and write it to the destination. Even if we optimized the memory accesses,
Expand All @@ -526,13 +526,13 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
// where each element toggles the state.

let mut ranges = smallvec::SmallVec::<[u64; 1]>::new();
let initial = self.init_mask.get(src.offset);
let initial = self.init_mask.get(range.start);
let mut cur_len = 1;
let mut cur = initial;

for i in 1..size.bytes() {
for i in 1..range.size.bytes() {
// FIXME: optimize to bitshift the current uninitialized block's bits and read the top bit.
if self.init_mask.get(src.offset + Size::from_bytes(i)) == cur {
if self.init_mask.get(range.start + Size::from_bytes(i)) == cur {
cur_len += 1;
} else {
ranges.push(cur_len);
Expand All @@ -550,24 +550,23 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
pub fn mark_compressed_init_range(
&mut self,
defined: &InitMaskCompressed,
dest: Pointer<Tag>,
size: Size,
range: AllocRange,
repeat: u64,
) {
// An optimization where we can just overwrite an entire range of initialization
// bits if they are going to be uniformly `1` or `0`.
if defined.ranges.len() <= 1 {
self.init_mask.set_range_inbounds(
dest.offset,
dest.offset + size * repeat, // `Size` operations
range.start,
range.start + range.size * repeat, // `Size` operations
defined.initial,
);
return;
}

for mut j in 0..repeat {
j *= size.bytes();
j += dest.offset.bytes();
j *= range.size.bytes();
j += range.start.bytes();
let mut cur = defined.initial;
for range in &defined.ranges {
let old_j = j;
Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_mir/src/interpret/eval_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ use rustc_span::{Pos, Span};
use rustc_target::abi::{Align, HasDataLayout, LayoutOf, Size, TargetDataLayout};

use super::{
Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, Operand, Place, PlaceTy,
ScalarMaybeUninit, StackPopJump,
Immediate, MPlaceTy, Machine, MemPlace, MemPlaceMeta, Memory, MemoryKind, Operand, Place,
PlaceTy, ScalarMaybeUninit, StackPopJump,
};
use crate::transform::validate::equal_up_to_regions;
use crate::util::storage::AlwaysLiveLocals;
Expand Down Expand Up @@ -900,7 +900,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// due to the local having ZST type.
let ptr = ptr.assert_ptr();
trace!("deallocating local: {:?}", self.memory.dump_alloc(ptr.alloc_id));
self.memory.deallocate_local(ptr)?;
self.memory.deallocate(ptr, None, MemoryKind::Stack)?;
};
Ok(())
}
Expand Down
19 changes: 6 additions & 13 deletions compiler/rustc_mir/src/interpret/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -276,17 +276,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
Ok(new_ptr)
}

/// Deallocate a local, or do nothing if that local has been made into a global.
pub fn deallocate_local(&mut self, ptr: Pointer<M::PointerTag>) -> InterpResult<'tcx> {
// The allocation might be already removed by global interning.
// This can only really happen in the CTFE instance, not in miri.
if self.alloc_map.contains_key(&ptr.alloc_id) {
self.deallocate(ptr, None, MemoryKind::Stack)
} else {
Ok(())
}
}

pub fn deallocate(
&mut self,
ptr: Pointer<M::PointerTag>,
Expand Down Expand Up @@ -1049,7 +1038,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
num_copies,
);
// Prepare a copy of the initialization mask.
let compressed = src_alloc.compress_uninit_range(src, size);
let compressed = src_alloc.compress_uninit_range(alloc_range(src.offset, size));
// This checks relocation edges on the src.
let src_bytes = src_alloc
.get_bytes_with_uninit_and_ptr(&tcx, alloc_range(src.offset, size))
Expand Down Expand Up @@ -1110,7 +1099,11 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
}

// now fill in all the "init" data
dest_alloc.mark_compressed_init_range(&compressed, dest, size, num_copies);
dest_alloc.mark_compressed_init_range(
&compressed,
alloc_range(dest.offset, size),
num_copies,
);
// copy the relocations to the destination
dest_alloc.mark_relocation_range(relocations);

Expand Down