From c0c3327ade8b0caa3043425ee45842c86fbd4f76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Mi=C4=85sko?= Date: Thu, 11 Jun 2020 00:00:00 +0000 Subject: [PATCH 1/2] Check for overflow in DroplessArena and return aligned pointer * Check for overflow when calculating the slice start & end position. * Align the pointer obtained from the allocator, ensuring that it satisfies user requested alignment (the allocator is only asked for layout compatible with u8 slice). * Remove an incorrect assertion from DroplessArena::align. --- src/librustc_arena/lib.rs | 53 ++++++++++++++++++++++++--------------- 1 file changed, 33 insertions(+), 20 deletions(-) diff --git a/src/librustc_arena/lib.rs b/src/librustc_arena/lib.rs index 4da336f8e288d..c8400a953a6e8 100644 --- a/src/librustc_arena/lib.rs +++ b/src/librustc_arena/lib.rs @@ -333,13 +333,6 @@ impl Default for DroplessArena { } impl DroplessArena { - #[inline] - fn align(&self, align: usize) { - let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1); - self.ptr.set(final_address as *mut u8); - assert!(self.ptr <= self.end); - } - #[inline(never)] #[cold] fn grow(&self, additional: usize) { @@ -370,22 +363,42 @@ impl DroplessArena { } } + /// Allocates a byte slice with specified size and alignment from the + /// current memory chunk. Returns `None` if there is no free space left to + /// satisfy the request. #[inline] - pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] { - unsafe { - assert!(bytes != 0); - - self.align(align); + fn alloc_raw_without_grow(&self, bytes: usize, align: usize) -> Option<&mut [u8]> { + let ptr = self.ptr.get() as usize; + let end = self.end.get() as usize; + // The allocation request fits into the current chunk iff: + // + // let aligned = align_to(ptr, align); + // ptr <= aligned && aligned + bytes <= end + // + // Except that we work with fixed width integers and need to be careful + // about potential overflow in the calcuation. If the overflow does + // happen, then we definitely don't have enough free and need to grow + // the arena. + let aligned = ptr.checked_add(align - 1)? & !(align - 1); + let new_ptr = aligned.checked_add(bytes)?; + if new_ptr <= end { + self.ptr.set(new_ptr as *mut u8); + unsafe { Some(slice::from_raw_parts_mut(aligned as *mut u8, bytes)) } + } else { + None + } + } - let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize); - if (future_end as *mut u8) > self.end.get() { - self.grow(bytes); + #[inline] + pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] { + assert!(bytes != 0); + loop { + if let Some(a) = self.alloc_raw_without_grow(bytes, align) { + break a; } - - let ptr = self.ptr.get(); - // Set the pointer past ourselves - self.ptr.set(intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8); - slice::from_raw_parts_mut(ptr, bytes) + // No free space left. Allocate a new chunk to satisfy the request. + // On failure the grow will panic or abort. + self.grow(bytes); } } From 1f0895162ba5a783d4d73d5c263552eaca9343b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Mi=C4=85sko?= Date: Sun, 14 Jun 2020 00:00:00 +0000 Subject: [PATCH 2/2] Avoid forming references to an uninitialized memory in DroplessArena Return a pointer from `alloc_raw` instead of a slice. There is no practical use for slice as a return type and changing it to a pointer avoids forming references to an uninitialized memory. --- src/librustc_arena/lib.rs | 25 +++++++++++-------------- src/librustc_middle/ty/list.rs | 2 +- 2 files changed, 12 insertions(+), 15 deletions(-) diff --git a/src/librustc_arena/lib.rs b/src/librustc_arena/lib.rs index c8400a953a6e8..4b837a28434e9 100644 --- a/src/librustc_arena/lib.rs +++ b/src/librustc_arena/lib.rs @@ -367,7 +367,7 @@ impl DroplessArena { /// current memory chunk. Returns `None` if there is no free space left to /// satisfy the request. #[inline] - fn alloc_raw_without_grow(&self, bytes: usize, align: usize) -> Option<&mut [u8]> { + fn alloc_raw_without_grow(&self, bytes: usize, align: usize) -> Option<*mut u8> { let ptr = self.ptr.get() as usize; let end = self.end.get() as usize; // The allocation request fits into the current chunk iff: @@ -383,14 +383,14 @@ impl DroplessArena { let new_ptr = aligned.checked_add(bytes)?; if new_ptr <= end { self.ptr.set(new_ptr as *mut u8); - unsafe { Some(slice::from_raw_parts_mut(aligned as *mut u8, bytes)) } + Some(aligned as *mut u8) } else { None } } #[inline] - pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] { + pub fn alloc_raw(&self, bytes: usize, align: usize) -> *mut u8 { assert!(bytes != 0); loop { if let Some(a) = self.alloc_raw_without_grow(bytes, align) { @@ -406,7 +406,7 @@ impl DroplessArena { pub fn alloc(&self, object: T) -> &mut T { assert!(!mem::needs_drop::()); - let mem = self.alloc_raw(mem::size_of::(), mem::align_of::()) as *mut _ as *mut T; + let mem = self.alloc_raw(mem::size_of::(), mem::align_of::()) as *mut T; unsafe { // Write into uninitialized memory. @@ -431,13 +431,11 @@ impl DroplessArena { assert!(mem::size_of::() != 0); assert!(!slice.is_empty()); - let mem = self.alloc_raw(slice.len() * mem::size_of::(), mem::align_of::()) as *mut _ - as *mut T; + let mem = self.alloc_raw(slice.len() * mem::size_of::(), mem::align_of::()) as *mut T; unsafe { - let arena_slice = slice::from_raw_parts_mut(mem, slice.len()); - arena_slice.copy_from_slice(slice); - arena_slice + mem.copy_from_nonoverlapping(slice.as_ptr(), slice.len()); + slice::from_raw_parts_mut(mem, slice.len()) } } @@ -480,7 +478,7 @@ impl DroplessArena { return &mut []; } let size = len.checked_mul(mem::size_of::()).unwrap(); - let mem = self.alloc_raw(size, mem::align_of::()) as *mut _ as *mut T; + let mem = self.alloc_raw(size, mem::align_of::()) as *mut T; unsafe { self.write_from_iter(iter, len, mem) } } (_, _) => { @@ -495,7 +493,7 @@ impl DroplessArena { let len = vec.len(); let start_ptr = self .alloc_raw(len * mem::size_of::(), mem::align_of::()) - as *mut _ as *mut T; + as *mut T; vec.as_ptr().copy_to_nonoverlapping(start_ptr, len); vec.set_len(0); slice::from_raw_parts_mut(start_ptr, len) @@ -539,8 +537,7 @@ pub struct DropArena { impl DropArena { #[inline] pub unsafe fn alloc(&self, object: T) -> &mut T { - let mem = - self.arena.alloc_raw(mem::size_of::(), mem::align_of::()) as *mut _ as *mut T; + let mem = self.arena.alloc_raw(mem::size_of::(), mem::align_of::()) as *mut T; // Write into uninitialized memory. ptr::write(mem, object); let result = &mut *mem; @@ -563,7 +560,7 @@ impl DropArena { let start_ptr = self .arena .alloc_raw(len.checked_mul(mem::size_of::()).unwrap(), mem::align_of::()) - as *mut _ as *mut T; + as *mut T; let mut destructors = self.destructors.borrow_mut(); // Reserve space for the destructors so we can't panic while adding them diff --git a/src/librustc_middle/ty/list.rs b/src/librustc_middle/ty/list.rs index 161783bb370d4..76c72e4c2603d 100644 --- a/src/librustc_middle/ty/list.rs +++ b/src/librustc_middle/ty/list.rs @@ -55,7 +55,7 @@ impl List { .dropless .alloc_raw(size, cmp::max(mem::align_of::(), mem::align_of::())); unsafe { - let result = &mut *(mem.as_mut_ptr() as *mut List); + let result = &mut *(mem as *mut List); // Write the length result.len = slice.len();