diff --git a/src/librustc/mir/interpret/mod.rs b/src/librustc/mir/interpret/mod.rs index 6bd5814799ae0..37580aa4efdb3 100644 --- a/src/librustc/mir/interpret/mod.rs +++ b/src/librustc/mir/interpret/mod.rs @@ -635,11 +635,13 @@ impl UndefMask { } } + #[inline] pub fn get(&self, i: Size) -> bool { let (block, bit) = bit_index(i); (self.blocks[block] & 1 << bit) != 0 } + #[inline] pub fn set(&mut self, i: Size, new_state: bool) { let (block, bit) = bit_index(i); if new_state { @@ -664,6 +666,7 @@ impl UndefMask { } } +#[inline] fn bit_index(bits: Size) -> (usize, usize) { let bits = bits.bytes(); let a = bits / BLOCK_SIZE; diff --git a/src/librustc_mir/interpret/eval_context.rs b/src/librustc_mir/interpret/eval_context.rs index ea667273ecead..1faa23637a5ed 100644 --- a/src/librustc_mir/interpret/eval_context.rs +++ b/src/librustc_mir/interpret/eval_context.rs @@ -591,10 +591,14 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M let (dest, dest_align) = self.force_allocation(dest)?.to_ptr_align(); - // FIXME: speed up repeat filling - for i in 0..length { - let elem_dest = dest.ptr_offset(elem_size * i as u64, &self)?; - self.write_value_to_ptr(value, elem_dest, dest_align, elem_ty)?; + if length > 0 { + //write the first value + self.write_value_to_ptr(value, dest, dest_align, elem_ty)?; + + if length > 1 { + let rest = dest.ptr_offset(elem_size * 1 as u64, &self)?; + self.memory.copy_repeatedly(dest, dest_align, rest, dest_align, elem_size, length - 1, false)?; + } } } diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs index ad571fbe90d5d..bf720540bdcd4 100644 --- a/src/librustc_mir/interpret/memory.rs +++ b/src/librustc_mir/interpret/memory.rs @@ -594,6 +594,19 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> { dest_align: Align, size: Size, nonoverlapping: bool, + ) -> EvalResult<'tcx> { + self.copy_repeatedly(src, src_align, dest, dest_align, size, 1, nonoverlapping) + } + + pub fn copy_repeatedly( + &mut self, + src: Scalar, + src_align: Align, + dest: Scalar, + dest_align: Align, + size: Size, + length: u64, + nonoverlapping: bool, ) -> EvalResult<'tcx> { // Empty accesses don't need to be valid pointers, but they should still be aligned self.check_align(src, src_align)?; @@ -608,16 +621,24 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> { // first copy the relocations to a temporary buffer, because // `get_bytes_mut` will clear the relocations, which is correct, // since we don't want to keep any relocations at the target. - let relocations: Vec<_> = self.relocations(src, size)? - .iter() - .map(|&(offset, alloc_id)| { - // Update relocation offsets for the new positions in the destination allocation. - (offset + dest.offset - src.offset, alloc_id) - }) - .collect(); + let relocations = { + let relocations = self.relocations(src, size)?; + let mut new_relocations = Vec::with_capacity(relocations.len() * (length as usize)); + for i in 0..length { + new_relocations.extend( + relocations + .iter() + .map(|&(offset, alloc_id)| { + (offset + dest.offset - src.offset + (i * size * relocations.len() as u64), alloc_id) + }) + ); + } + + new_relocations + }; let src_bytes = self.get_bytes_unchecked(src, size, src_align)?.as_ptr(); - let dest_bytes = self.get_bytes_mut(dest, size, dest_align)?.as_mut_ptr(); + let dest_bytes = self.get_bytes_mut(dest, size * length, dest_align)?.as_mut_ptr(); // SAFE: The above indexing would have panicked if there weren't at least `size` bytes // behind `src` and `dest`. Also, we use the overlapping-safe `ptr::copy` if `src` and @@ -634,13 +655,18 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> { )); } } - ptr::copy(src_bytes, dest_bytes, size.bytes() as usize); + + for i in 0..length { + ptr::copy(src_bytes, dest_bytes.offset((size.bytes() * i) as isize), size.bytes() as usize); + } } else { - ptr::copy_nonoverlapping(src_bytes, dest_bytes, size.bytes() as usize); + for i in 0..length { + ptr::copy_nonoverlapping(src_bytes, dest_bytes.offset((size.bytes() * i) as isize), size.bytes() as usize); + } } } - self.copy_undef_mask(src, dest, size)?; + self.copy_undef_mask(src, dest, size, length)?; // copy back the relocations self.get_mut(dest.alloc_id)?.relocations.insert_presorted(relocations); @@ -861,21 +887,25 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> { src: Pointer, dest: Pointer, size: Size, + repeat: u64, ) -> EvalResult<'tcx> { // The bits have to be saved locally before writing to dest in case src and dest overlap. assert_eq!(size.bytes() as usize as u64, size.bytes()); - let mut v = Vec::with_capacity(size.bytes() as usize); + + let undef_mask = self.get(src.alloc_id)?.undef_mask.clone(); + let dest_allocation = self.get_mut(dest.alloc_id)?; + for i in 0..size.bytes() { - let defined = self.get(src.alloc_id)?.undef_mask.get(src.offset + Size::from_bytes(i)); - v.push(defined); - } - for (i, defined) in v.into_iter().enumerate() { - self.get_mut(dest.alloc_id)?.undef_mask.set( - dest.offset + - Size::from_bytes(i as u64), - defined, - ); + let defined = undef_mask.get(src.offset + Size::from_bytes(i)); + + for j in 0..repeat { + dest_allocation.undef_mask.set( + dest.offset + Size::from_bytes(i + (size.bytes() * j)), + defined + ); + } } + Ok(()) } diff --git a/src/librustc_target/abi/mod.rs b/src/librustc_target/abi/mod.rs index 9003e30357cbd..5762269242663 100644 --- a/src/librustc_target/abi/mod.rs +++ b/src/librustc_target/abi/mod.rs @@ -229,37 +229,44 @@ pub struct Size { impl Size { pub const ZERO: Size = Self::from_bytes(0); + #[inline] pub fn from_bits(bits: u64) -> Size { // Avoid potential overflow from `bits + 7`. Size::from_bytes(bits / 8 + ((bits % 8) + 7) / 8) } + #[inline] pub const fn from_bytes(bytes: u64) -> Size { Size { raw: bytes } } + #[inline] pub fn bytes(self) -> u64 { self.raw } + #[inline] pub fn bits(self) -> u64 { self.bytes().checked_mul(8).unwrap_or_else(|| { panic!("Size::bits: {} bytes in bits doesn't fit in u64", self.bytes()) }) } + #[inline] pub fn abi_align(self, align: Align) -> Size { let mask = align.abi() - 1; Size::from_bytes((self.bytes() + mask) & !mask) } + #[inline] pub fn is_abi_aligned(self, align: Align) -> bool { let mask = align.abi() - 1; self.bytes() & mask == 0 } + #[inline] pub fn checked_add(self, offset: Size, cx: C) -> Option { let dl = cx.data_layout(); @@ -272,6 +279,7 @@ impl Size { } } + #[inline] pub fn checked_mul(self, count: u64, cx: C) -> Option { let dl = cx.data_layout(); @@ -289,6 +297,7 @@ impl Size { impl Add for Size { type Output = Size; + #[inline] fn add(self, other: Size) -> Size { Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| { panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes()) @@ -298,6 +307,7 @@ impl Add for Size { impl Sub for Size { type Output = Size; + #[inline] fn sub(self, other: Size) -> Size { Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| { panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes()) @@ -307,6 +317,7 @@ impl Sub for Size { impl Mul for u64 { type Output = Size; + #[inline] fn mul(self, size: Size) -> Size { size * self } @@ -314,6 +325,7 @@ impl Mul for u64 { impl Mul for Size { type Output = Size; + #[inline] fn mul(self, count: u64) -> Size { match self.bytes().checked_mul(count) { Some(bytes) => Size::from_bytes(bytes), @@ -325,6 +337,7 @@ impl Mul for Size { } impl AddAssign for Size { + #[inline] fn add_assign(&mut self, other: Size) { *self = *self + other; } diff --git a/src/test/compile-fail/const-err4.rs b/src/test/compile-fail/const-err4.rs new file mode 100644 index 0000000000000..09ebf1681c5e0 --- /dev/null +++ b/src/test/compile-fail/const-err4.rs @@ -0,0 +1,24 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[derive(Copy, Clone)] +union Foo { + a: isize, + b: (), +} + +enum Bar { + Boo = [unsafe { Foo { b: () }.a }; 4][3], + //~^ ERROR constant evaluation of enum discriminant resulted in non-integer +} + +fn main() { + assert_ne!(Bar::Boo as isize, 0); +} diff --git a/src/test/run-pass/const-repeated-values.rs b/src/test/run-pass/const-repeated-values.rs new file mode 100644 index 0000000000000..1d749a2626e9f --- /dev/null +++ b/src/test/run-pass/const-repeated-values.rs @@ -0,0 +1,19 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +const FOO: isize = 42; + +enum Bar { + Boo = *[&FOO; 4][3], +} + +fn main() { + assert_eq!(Bar::Boo as isize, 42); +}