Skip to content

Make Allocation::bytes private #63561

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Sep 3, 2019
Merged
71 changes: 63 additions & 8 deletions src/librustc/mir/interpret/allocation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ pub struct Allocation<Tag=(),Extra=()> {
/// Only the first byte of a pointer is inserted into the map; i.e.,
/// every entry in this map applies to `pointer_size` consecutive bytes starting
/// at the given offset.
pub relocations: Relocations<Tag>,
relocations: Relocations<Tag>,
/// Denotes which part of this allocation is initialized.
undef_mask: UndefMask,
/// The size of the allocation. Currently, must always equal `bytes.len()`.
Expand Down Expand Up @@ -136,18 +136,23 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
self.size.bytes() as usize
}

/// Look at a slice which may describe undefined bytes or describe a relocation. This differs
/// Looks at a slice which may describe undefined bytes or describe a relocation. This differs
/// from `get_bytes_with_undef_and_ptr` in that it does no relocation checks (even on the
/// edges) at all. It further ignores `AllocationExtra` callbacks.
/// This must not be used for reads affecting the interpreter execution.
pub fn inspect_with_undef_and_ptr_outside_interpreter(&self, range: Range<usize>) -> &[u8] {
&self.bytes[range]
}

/// View the undef mask.
/// Returns the undef mask.
pub fn undef_mask(&self) -> &UndefMask {
&self.undef_mask
}

/// Returns the relocation list.
pub fn relocations(&self) -> &Relocations<Tag> {
&self.relocations
}
}

impl<'tcx> rustc_serialize::UseSpecializedDecodable for &'tcx Allocation {}
Expand Down Expand Up @@ -459,7 +464,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
/// Relocations
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// Returns all relocations overlapping with the given ptr-offset pair.
pub fn relocations(
pub fn get_relocations(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
Expand All @@ -480,7 +485,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
ptr: Pointer<Tag>,
size: Size,
) -> InterpResult<'tcx> {
if self.relocations(cx, ptr, size).is_empty() {
if self.get_relocations(cx, ptr, size).is_empty() {
Ok(())
} else {
throw_unsup!(ReadPointerAsBytes)
Expand All @@ -502,7 +507,7 @@ impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
// Find the start and end of the given range and its outermost relocations.
let (first, last) = {
// Find all relocations overlapping the given range.
let relocations = self.relocations(cx, ptr, size);
let relocations = self.get_relocations(cx, ptr, size);
if relocations.is_empty() {
return Ok(());
}
Expand Down Expand Up @@ -583,7 +588,7 @@ pub struct AllocationDefinedness {
/// Transferring the definedness mask to other allocations.
impl<Tag, Extra> Allocation<Tag, Extra> {
/// Creates a run-length encoding of the undef_mask.
pub fn compress_defined_range(
pub fn compress_undef_range(
&self,
src: Pointer<Tag>,
size: Size,
Expand Down Expand Up @@ -622,7 +627,7 @@ impl<Tag, Extra> Allocation<Tag, Extra> {
}

/// Apply multiple instances of the run-length encoding to the undef_mask.
pub fn mark_compressed_range(
pub fn mark_compressed_undef_range(
&mut self,
defined: &AllocationDefinedness,
dest: Pointer<Tag>,
Expand Down Expand Up @@ -688,6 +693,56 @@ impl<Tag> DerefMut for Relocations<Tag> {
}
}

/// A partial, owned list of relocations to transfer into another allocation.
pub struct AllocationRelocations<Tag> {
relative_relocations: Vec<(Size, (Tag, AllocId))>,
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Interesting idea. Long-term (does not have to happen in this PR), it might make sense to combine this with the run-length encoded undef mask into one struct? They are AFAIK only used together, and basically together opaquely represent "metadata" (on top of the raw bytes) that memcpy needs to copy. Then there would be two operations, tentatively named get_metadata and apply_metadata or so.


impl<Tag: Copy, Extra> Allocation<Tag, Extra> {
pub fn prepare_relocation_copy(
&self,
cx: &impl HasDataLayout,
src: Pointer<Tag>,
size: Size,
dest: Pointer<Tag>,
length: u64,
) -> AllocationRelocations<Tag> {
let relocations = self.get_relocations(cx, src, size);
if relocations.is_empty() {
return AllocationRelocations { relative_relocations: Vec::new() };
}

let mut new_relocations = Vec::with_capacity(relocations.len() * (length as usize));

for i in 0..length {
new_relocations.extend(
relocations
.iter()
.map(|&(offset, reloc)| {
// compute offset for current repetition
let dest_offset = dest.offset + (i * size);
(
// shift offsets from source allocation to destination allocation
offset + dest_offset - src.offset,
reloc,
)
})
);
}

AllocationRelocations {
relative_relocations: new_relocations,
}
}

pub fn mark_relocation_range(
&mut self,
relocations: AllocationRelocations<Tag>,
) {
self.relocations.insert_presorted(relocations.relative_relocations);
}
}

////////////////////////////////////////////////////////////////////////////////
// Undefined byte tracking
////////////////////////////////////////////////////////////////////////////////
Expand Down
14 changes: 7 additions & 7 deletions src/librustc_codegen_llvm/consts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,19 +25,19 @@ use rustc::hir::{self, CodegenFnAttrs, CodegenFnAttrFlags};
use std::ffi::{CStr, CString};

pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll Value {
let mut llvals = Vec::with_capacity(alloc.relocations.len() + 1);
let mut llvals = Vec::with_capacity(alloc.relocations().len() + 1);
let dl = cx.data_layout();
let pointer_size = dl.pointer_size.bytes() as usize;

let mut next_offset = 0;
for &(offset, ((), alloc_id)) in alloc.relocations.iter() {
for &(offset, ((), alloc_id)) in alloc.relocations().iter() {
let offset = offset.bytes();
assert_eq!(offset as usize as u64, offset);
let offset = offset as usize;
if offset > next_offset {
// This `inspect` is okay since we have check that it is not within a relocation, it is
// within the bounds of the allocation, and it doesn't affect interpreter execution (we
// inspect the result after interpreter execution). Any undef byte is replaced with
// This `inspect` is okay since we have checked that it is not within a relocation, it
// is within the bounds of the allocation, and it doesn't affect interpreter execution
// (we inspect the result after interpreter execution). Any undef byte is replaced with
// some arbitrary byte value.
//
// FIXME: relay undef bytes to codegen as undef const bytes
Expand Down Expand Up @@ -455,7 +455,7 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> {
//
// We could remove this hack whenever we decide to drop macOS 10.10 support.
if self.tcx.sess.target.target.options.is_like_osx {
assert_eq!(alloc.relocations.len(), 0);
assert_eq!(alloc.relocations().len(), 0);

let is_zeroed = {
// Treats undefined bytes as if they were defined with the byte value that
Expand Down Expand Up @@ -490,7 +490,7 @@ impl StaticMethods for CodegenCx<'ll, 'tcx> {
section.as_str().as_ptr() as *const _,
section.as_str().len() as c_uint,
);
assert!(alloc.relocations.is_empty());
assert!(alloc.relocations().is_empty());

// The `inspect` method is okay here because we checked relocations, and
// because we are doing this access to inspect the final interpreter state (not
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_mir/interpret/intern.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ impl<'rt, 'mir, 'tcx> InternVisitor<'rt, 'mir, 'tcx> {
alloc.mutability = mutability;
// link the alloc id to the actual allocation
let alloc = tcx.intern_const_alloc(alloc);
self.leftover_relocations.extend(alloc.relocations.iter().map(|&(_, ((), reloc))| reloc));
self.leftover_relocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc));
tcx.alloc_map.lock().set_alloc_id_memory(ptr.alloc_id, alloc);
Ok(None)
}
Expand Down Expand Up @@ -316,7 +316,7 @@ pub fn intern_const_alloc_recursive(
// So we hand-roll the interning logic here again
let alloc = tcx.intern_const_alloc(alloc);
tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc);
for &(_, ((), reloc)) in alloc.relocations.iter() {
for &(_, ((), reloc)) in alloc.relocations().iter() {
if leftover_relocations.insert(reloc) {
todo.push(reloc);
}
Expand Down
36 changes: 6 additions & 30 deletions src/librustc_mir/interpret/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -647,7 +647,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {

for i in 0..alloc.size.bytes() {
let i = Size::from_bytes(i);
if let Some(&(_, target_id)) = alloc.relocations.get(&i) {
if let Some(&(_, target_id)) = alloc.relocations().get(&i) {
if allocs_seen.insert(target_id) {
allocs_to_print.push_back(target_id);
}
Expand Down Expand Up @@ -808,32 +808,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// since we don't want to keep any relocations at the target.
// (`get_bytes_with_undef_and_ptr` below checks that there are no
// relocations overlapping the edges; those would not be handled correctly).
let relocations = {
let relocations = self.get(src.alloc_id)?.relocations(self, src, size);
if relocations.is_empty() {
// nothing to copy, ignore even the `length` loop
Vec::new()
} else {
let mut new_relocations = Vec::with_capacity(relocations.len() * (length as usize));
for i in 0..length {
new_relocations.extend(
relocations
.iter()
.map(|&(offset, reloc)| {
// compute offset for current repetition
let dest_offset = dest.offset + (i * size);
(
// shift offsets from source allocation to destination allocation
offset + dest_offset - src.offset,
reloc,
)
})
);
}

new_relocations
}
};
let relocations = self.get(src.alloc_id)?
.prepare_relocation_copy(self, src, size, dest, length);

let tcx = self.tcx.tcx;

Expand Down Expand Up @@ -880,7 +856,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// copy definedness to the destination
self.copy_undef_mask(src, dest, size, length)?;
// copy the relocations to the destination
self.get_mut(dest.alloc_id)?.relocations.insert_presorted(relocations);
self.get_mut(dest.alloc_id)?.mark_relocation_range(relocations);

Ok(())
}
Expand All @@ -900,11 +876,11 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
assert_eq!(size.bytes() as usize as u64, size.bytes());

let src_alloc = self.get(src.alloc_id)?;
let compressed = src_alloc.compress_defined_range(src, size);
let compressed = src_alloc.compress_undef_range(src, size);

// now fill in all the data
let dest_allocation = self.get_mut(dest.alloc_id)?;
dest_allocation.mark_compressed_range(&compressed, dest, size, repeat);
dest_allocation.mark_compressed_undef_range(&compressed, dest, size, repeat);

Ok(())
}
Expand Down
3 changes: 2 additions & 1 deletion src/librustc_mir/interpret/snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,6 @@ impl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation

fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
let Allocation {
relocations,
size,
align,
mutability,
Expand All @@ -300,7 +299,9 @@ impl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation
// influence interpreter exeuction, but only to detect the error of cycles in evalution
// dependencies.
let bytes = self.inspect_with_undef_and_ptr_outside_interpreter(all_bytes);

let undef_mask = self.undef_mask();
let relocations = self.relocations();

AllocationSnapshot {
bytes,
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_mir/monomorphize/collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1202,7 +1202,7 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec<Mon
}
Some(GlobalAlloc::Memory(alloc)) => {
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
for &((), inner) in alloc.relocations.values() {
for &((), inner) in alloc.relocations().values() {
collect_miri(tcx, inner, output);
}
},
Expand Down Expand Up @@ -1268,7 +1268,7 @@ fn collect_const<'tcx>(
collect_miri(tcx, ptr.alloc_id, output),
ConstValue::Slice { data: alloc, start: _, end: _ } |
ConstValue::ByRef { alloc, .. } => {
for &((), id) in alloc.relocations.values() {
for &((), id) in alloc.relocations().values() {
collect_miri(tcx, id, output);
}
}
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_typeck/check/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1570,7 +1570,7 @@ fn maybe_check_static_with_link_section(tcx: TyCtxt<'_>, id: DefId, span: Span)
} else {
bug!("Matching on non-ByRef static")
};
if alloc.relocations.len() != 0 {
if alloc.relocations().len() != 0 {
let msg = "statics with a custom `#[link_section]` must be a \
simple list of bytes on the wasm target with no \
extra levels of indirection such as references";
Expand Down