diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs index cb9daaea0001b..438d2d1a921e1 100644 --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@ -16,6 +16,8 @@ pub use core::alloc::*; #[cfg(test)] mod tests; +#[macro_use] +pub(crate) mod struct_alloc; extern "Rust" { // These are the magic symbols to call the global allocator. rustc generates diff --git a/library/alloc/src/alloc/struct_alloc.rs b/library/alloc/src/alloc/struct_alloc.rs new file mode 100644 index 0000000000000..dff9393b6328b --- /dev/null +++ b/library/alloc/src/alloc/struct_alloc.rs @@ -0,0 +1,221 @@ +use crate::alloc::Global; +use crate::fmt; +use core::alloc::{AllocError, Allocator, Layout}; +use core::fmt::{Debug, Formatter}; +use core::marker::PhantomData; +use core::mem; +use core::ptr::NonNull; + +#[cfg(test)] +mod tests; + +/// Allocator that adds appropriate padding for a repr(C) struct. +/// +/// This allocator takes as type arguments the type of a field `T` and an allocator `A`. +/// +/// Consider +/// +/// ```rust,ignore (not real code) +/// #[repr(C)] +/// struct Struct { +/// t: T, +/// data: Data, +/// } +/// ``` +/// +/// where `Data` is a type with layout `layout`. +/// +/// When this allocator creates an allocation for layout `layout`, the pointer can be +/// offset by `-offsetof(Struct, data)` and the resulting pointer points is an allocation +/// of `A` for `Layout::new::()`. +pub(crate) struct StructAlloc(A, PhantomData<*const T>); + +impl StructAlloc { + #[allow(dead_code)] + /// Creates a new allocator. + pub(crate) fn new(allocator: A) -> Self { + Self(allocator, PhantomData) + } + + /// Computes the layout of `Struct`. + fn struct_layout(data_layout: Layout) -> Result { + let t_align = mem::align_of::(); + let t_size = mem::size_of::(); + if t_size == 0 && t_align == 1 { + // Skip the checks below + return Ok(data_layout); + } + let data_align = data_layout.align(); + // The contract of `Layout` guarantees that `data_align > 0`. + let data_align_minus_1 = data_align.wrapping_sub(1); + let data_size = data_layout.size(); + let align = data_align.max(t_align); + let align_minus_1 = align.wrapping_sub(1); + // `size` is + // t_size rounded up to `data_align` + // plus + // `data_size` rounded up to `align` + // Note that the result is a multiple of `align`. + let (t_size_aligned, t_overflow) = + t_size.overflowing_add(t_size.wrapping_neg() & data_align_minus_1); + let (data_size_aligned, data_overflow) = match data_size.overflowing_add(align_minus_1) { + (sum, req_overflow) => (sum & !align_minus_1, req_overflow), + }; + let (size, sum_overflow) = t_size_aligned.overflowing_add(data_size_aligned); + if t_overflow || data_overflow || sum_overflow { + return Err(AllocError); + } + unsafe { Ok(Layout::from_size_align_unchecked(size, align)) } + } + + /// Returns the offset of `data` in `Struct`. + #[inline] + pub(crate) fn offset_of_data(data_layout: Layout) -> usize { + let t_size = mem::size_of::(); + // The contract of `Layout` guarantees `.align() > 0` + let data_align_minus_1 = data_layout.align().wrapping_sub(1); + t_size.wrapping_add(t_size.wrapping_neg() & data_align_minus_1) + } + + /// Given a pointer to `data`, returns a pointer to `Struct`. + /// + /// # Safety + /// + /// The data pointer must have been allocated by `Self` with the same `data_layout`. + #[inline] + unsafe fn data_ptr_to_struct_ptr(data: NonNull, data_layout: Layout) -> NonNull { + unsafe { + let offset_of_data = Self::offset_of_data(data_layout); + NonNull::new_unchecked(data.as_ptr().sub(offset_of_data)) + } + } + + /// Given a pointer to `Struct`, returns a pointer to `data`. + /// + /// # Safety + /// + /// The struct pointer must have been allocated by `A` with the layout + /// `Self::struct_layout(data_layout)`. + #[inline] + unsafe fn struct_ptr_to_data_ptr( + struct_ptr: NonNull<[u8]>, + data_layout: Layout, + ) -> NonNull<[u8]> { + let offset_of_data = Self::offset_of_data(data_layout); + let data_ptr = + unsafe { NonNull::new_unchecked(struct_ptr.as_mut_ptr().add(offset_of_data)) }; + // Note that the size is the exact size requested in the layout. Let me explain + // why this is necessary: + // + // Assume the original requested layout was `size=1, align=1`. Assume `T=u16` + // Then the struct layout is `size=4, align=2`. Assume that the allocator returns + // a slice with `size=5`. Then the space available for `data` is `3`. + // However, if we returned a slice with `len=3`, then the user would be allowed + // to call `dealloc` with `size=3, align=1`. In this case the struct layout + // would be computed as `size=6, align=2`. This size would be larger than what + // the allocator returned. + NonNull::slice_from_raw_parts(data_ptr, data_layout.size()) + } +} + +impl Debug for StructAlloc { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("StructAlloc").field("0", &self.0).finish() + } +} + +/// Delegates `Self::allocate{,_zereod}` to the allocator after computing the struct +/// layout. Then transforms the new struct pointer to the new data pointer and returns it. +macro delegate_alloc($id:ident) { + fn $id(&self, data_layout: Layout) -> Result, AllocError> { + let struct_layout = Self::struct_layout(data_layout)?; + let struct_ptr = self.0.$id(struct_layout)?; + unsafe { Ok(Self::struct_ptr_to_data_ptr(struct_ptr, data_layout)) } + } +} + +/// Delegates `Self::{{grow{,_zeroed},shrink}` to the allocator after computing the struct +/// layout and transforming the data pointer to the struct pointer. Then transforms +/// the new struct pointer to the new data pointer and returns it. +macro delegate_transform($id:ident) { + unsafe fn $id( + &self, + old_data_ptr: NonNull, + old_data_layout: Layout, + new_data_layout: Layout, + ) -> Result, AllocError> { + let old_struct_layout = Self::struct_layout(old_data_layout)?; + let new_struct_layout = Self::struct_layout(new_data_layout)?; + unsafe { + let old_struct_ptr = Self::data_ptr_to_struct_ptr(old_data_ptr, old_data_layout); + let new_struct_ptr = + self.0.$id(old_struct_ptr, old_struct_layout, new_struct_layout)?; + Ok(Self::struct_ptr_to_data_ptr(new_struct_ptr, new_data_layout)) + } + } +} + +unsafe impl Allocator for StructAlloc { + delegate_alloc!(allocate); + delegate_alloc!(allocate_zeroed); + + unsafe fn deallocate(&self, data_ptr: NonNull, data_layout: Layout) { + unsafe { + let struct_ptr = Self::data_ptr_to_struct_ptr(data_ptr, data_layout); + let struct_layout = + Self::struct_layout(data_layout).expect("deallocate called with invalid layout"); + self.0.deallocate(struct_ptr, struct_layout); + } + } + + delegate_transform!(grow); + delegate_transform!(grow_zeroed); + delegate_transform!(shrink); +} + +#[allow(unused_macros)] +macro_rules! implement_struct_allocator { + ($id:ident) => { + #[unstable(feature = "struct_alloc", issue = "none")] + unsafe impl Allocator for $id { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.0.allocate(layout) + } + + fn allocate_zeroed(&self, layout: Layout) -> Result, AllocError> { + self.0.allocate_zeroed(layout) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + unsafe { self.0.deallocate(ptr, layout) } + } + + unsafe fn grow( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + unsafe { self.0.grow(ptr, old_layout, new_layout) } + } + + unsafe fn grow_zeroed( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + unsafe { self.0.grow_zeroed(ptr, old_layout, new_layout) } + } + + unsafe fn shrink( + &self, + ptr: NonNull, + old_layout: Layout, + new_layout: Layout, + ) -> Result, AllocError> { + unsafe { self.0.shrink(ptr, old_layout, new_layout) } + } + } + }; +} diff --git a/library/alloc/src/alloc/struct_alloc/tests.rs b/library/alloc/src/alloc/struct_alloc/tests.rs new file mode 100644 index 0000000000000..778cadc5284ff --- /dev/null +++ b/library/alloc/src/alloc/struct_alloc/tests.rs @@ -0,0 +1,126 @@ +use super::StructAlloc; +use crate::alloc::Global; +use crate::collections::VecDeque; +use core::alloc::{AllocError, Allocator}; +use std::alloc::Layout; +use std::cell::RefCell; +use std::ptr::NonNull; +use std::{any, ptr}; + +fn test_pair() { + if let Err(_) = std::panic::catch_unwind(|| test_pair_::()) { + panic!("test of {} followed by {} failed", any::type_name::(), any::type_name::()); + } +} + +fn test_pair_() { + #[repr(C)] + struct S { + t: T, + data: Data, + } + + let offset = { + let s: *const S = ptr::null(); + unsafe { std::ptr::addr_of!((*s).data) as usize } + }; + + let expected_layout = RefCell::new(VecDeque::new()); + let expected_ptr = RefCell::new(VecDeque::new()); + + let check_layout = |actual| { + let mut e = expected_layout.borrow_mut(); + match e.pop_front() { + Some(expected) if expected == actual => {} + Some(expected) => panic!("expected layout {:?}, actual layout {:?}", expected, actual), + _ => panic!("unexpected allocator invocation with layout {:?}", actual), + } + }; + + let check_ptr = |actual: NonNull| { + let mut e = expected_ptr.borrow_mut(); + match e.pop_front() { + Some(expected) if expected == actual.as_ptr() => {} + Some(expected) => { + panic!("expected pointer {:p}, actual pointer {:p}", expected, actual) + } + _ => panic!("unexpected allocator invocation with pointer {:p}", actual), + } + }; + + struct TestAlloc(F, G); + + unsafe impl Allocator for TestAlloc + where + F: Fn(Layout), + G: Fn(NonNull), + { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + self.0(layout); + Global.allocate(layout) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + self.1(ptr); + self.0(layout); + unsafe { Global.deallocate(ptr, layout) } + } + } + + let struct_alloc = StructAlloc::::new(TestAlloc(check_layout, check_ptr)); + + fn s_layout() -> Layout { + Layout::new::>() + } + + fn d_layout() -> Layout { + Layout::new::<[Data; N]>() + } + + fn check_slice(ptr: NonNull<[u8]>) { + let expected = d_layout::().size(); + if ptr.len() != expected { + panic!( + "expected allocation size: {:?}, actual allocation size: {:?}", + expected, + ptr.len() + ) + } + } + + expected_layout.borrow_mut().push_back(s_layout::()); + let ptr = struct_alloc.allocate(d_layout::()).unwrap(); + check_slice::(ptr); + unsafe { + expected_ptr.borrow_mut().push_back(ptr.as_mut_ptr().sub(offset)); + } + expected_layout.borrow_mut().push_back(s_layout::()); + expected_layout.borrow_mut().push_back(s_layout::()); + let ptr = unsafe { + struct_alloc + .grow(ptr.as_non_null_ptr(), d_layout::(), d_layout::()) + .unwrap() + }; + check_slice::(ptr); + unsafe { + expected_ptr.borrow_mut().push_back(ptr.as_mut_ptr().sub(offset)); + } + expected_layout.borrow_mut().push_back(s_layout::()); + unsafe { + struct_alloc.deallocate(ptr.as_non_null_ptr(), d_layout::()); + } + if !expected_ptr.borrow().is_empty() || !expected_layout.borrow().is_empty() { + panic!("missing allocator calls"); + } +} + +#[test] +fn test() { + macro_rules! test_ty { + ($($ty:ty),*) => { test_ty!(@2 $($ty),*; ($($ty),*)) }; + (@2 $($tyl:ty),*; $tyr:tt) => { $(test_ty!(@3 $tyl; $tyr);)* }; + (@3 $tyl:ty; ($($tyr:ty),*)) => { $(test_pair::<$tyl, $tyr>();)* }; + } + // call test_pair::() for every combination of these types + test_ty!((), u8, u16, u32, u64, u128); +} diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 99c42a4ba4423..91e4594a2c7c6 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -155,6 +155,7 @@ mod macros; // Heaps provided for low-level allocation strategies +#[macro_use] pub mod alloc; // Primitive types using the heaps above diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index f67f5fc533b49..36368e06bf545 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -257,12 +257,13 @@ use core::hash::{Hash, Hasher}; use core::intrinsics::abort; use core::iter; use core::marker::{self, PhantomData, Unpin, Unsize}; -use core::mem::{self, align_of_val_raw, forget, size_of_val}; +use core::mem::{self, forget, size_of_val, MaybeUninit}; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; use core::slice::from_raw_parts_mut; +use crate::alloc::struct_alloc::StructAlloc; use crate::alloc::{ box_free, handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw, }; @@ -273,13 +274,31 @@ use crate::vec::Vec; #[cfg(test)] mod tests; -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -#[repr(C)] -struct RcBox { +struct RcBoxMetadata { strong: Cell, weak: Cell, +} + +impl RcBoxMetadata { + // There is an implicit weak pointer owned by all the strong + // pointers, which ensures that the weak destructor never frees + // the allocation while the strong destructor is running, even + // if the weak pointer is stored inside the strong one. + #[inline] + fn new_strong() -> Self { + Self { strong: Cell::new(1), weak: Cell::new(1) } + } + + #[inline] + fn new_weak() -> Self { + Self { strong: Cell::new(0), weak: Cell::new(1) } + } +} + +// This is repr(C) to support StructAlloc +#[repr(C)] +struct RcBox { + meta: RcBoxMetadata, value: T, } @@ -340,13 +359,7 @@ impl Rc { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn new(value: T) -> Rc { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - Self::from_inner( - Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(), - ) + Self::from_inner(Box::leak(box RcBox { meta: RcBoxMetadata::new_strong(), value }).into()) } /// Constructs a new `Rc` using a weak reference to itself. Attempting @@ -378,8 +391,7 @@ impl Rc { // Construct the inner in the "uninitialized" state with a single // weak reference. let uninit_ptr: NonNull<_> = Box::leak(box RcBox { - strong: Cell::new(0), - weak: Cell::new(1), + meta: RcBoxMetadata::new_weak(), value: mem::MaybeUninit::::uninit(), }) .into(); @@ -400,9 +412,9 @@ impl Rc { let inner = init_ptr.as_ptr(); ptr::write(ptr::addr_of_mut!((*inner).value), data); - let prev_value = (*inner).strong.get(); + let prev_value = (*inner).meta.strong.get(); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); - (*inner).strong.set(1); + (*inner).meta.strong.set(1); } let strong = Rc::from_inner(init_ptr); @@ -489,13 +501,8 @@ impl Rc { /// ``` #[unstable(feature = "allocator_api", issue = "32838")] pub fn try_new(value: T) -> Result, AllocError> { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. Ok(Self::from_inner( - Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?) - .into(), + Box::leak(Box::try_new(RcBox { meta: RcBoxMetadata::new_strong(), value })?).into(), )) } @@ -846,13 +853,7 @@ impl Rc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - let offset = unsafe { data_offset(ptr) }; - - // Reverse the offset to find the original RcBox. - let rc_ptr = - unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) }; - - unsafe { Self::from_ptr(rc_ptr) } + unsafe { Self::from_data_ptr(ptr).assume_init() } } /// Creates a new [`Weak`] pointer to this allocation. @@ -1170,8 +1171,8 @@ impl Rc { unsafe { debug_assert_eq!(Layout::for_value(&*inner), layout); - ptr::write(&mut (*inner).strong, Cell::new(1)); - ptr::write(&mut (*inner).weak, Cell::new(1)); + ptr::write(&mut (*inner).meta.strong, Cell::new(1)); + ptr::write(&mut (*inner).meta.weak, Cell::new(1)); } Ok(inner) @@ -1210,6 +1211,35 @@ impl Rc { Self::from_ptr(ptr) } } + + /// # Safety + /// + /// The caller must ensure that the pointer points to the `value` field of a `Global` + /// allocation of type `RcBox`. Depending on how the pointer was created, the + /// `meta` field might or might not be uninitialized. It's up to the caller to ensure + /// that this field is set to the correct value before the return value is unwrapped. + #[inline] + unsafe fn from_data_ptr(ptr: *const T) -> MaybeUninit { + let offset = unsafe { data_offset(ptr) }; + + // Reverse the offset to find the original RcBox. + let rc_ptr = + unsafe { (ptr as *mut RcBox).set_ptr_value((ptr as *mut u8).offset(-offset)) }; + + unsafe { MaybeUninit::new(Self::from_ptr(rc_ptr)) } + } + + #[inline] + fn from_rc_alloc_box(v: Box) -> Rc { + unsafe { + // SAFETY: RcAlloc allocations of `T` have the same layout as Global + // allocations of `RcBox`. We use `Self::from_raw` as a shorthand + let data_ptr = Box::into_raw(v); + let mut rc = Self::from_data_ptr(data_ptr); + rc.assume_init_mut().ptr.as_mut().meta = RcBoxMetadata::new_strong(); + rc.assume_init() + } + } } impl Rc<[T]> { @@ -2087,7 +2117,7 @@ impl Weak { // is dropped, the data field will be dropped in-place). Some(unsafe { let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } + WeakInner { strong: &(*ptr).meta.strong, weak: &(*ptr).meta.weak } }) } } @@ -2296,12 +2326,12 @@ trait RcInnerPtr { impl RcInnerPtr for RcBox { #[inline(always)] fn weak_ref(&self) -> &Cell { - &self.weak + &self.meta.weak } #[inline(always)] fn strong_ref(&self) -> &Cell { - &self.strong + &self.meta.strong } } @@ -2334,24 +2364,66 @@ impl AsRef for Rc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Rc {} +type RcStructAlloc = StructAlloc; + /// Get the offset within an `RcBox` for the payload behind a pointer. /// /// # Safety /// /// The pointer must point to (and have valid metadata for) a previously /// valid instance of T, but the T is allowed to be dropped. -unsafe fn data_offset(ptr: *const T) -> isize { - // Align the unsized value to the end of the RcBox. - // Because RcBox is repr(C), it will always be the last field in memory. - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of align_of_val_raw; this is an implementation - // detail of the language that may not be relied upon outside of std. - unsafe { data_offset_align(align_of_val_raw(ptr)) } +unsafe fn data_offset(data_ptr: *const T) -> isize { + unsafe { + // SAFETY: since the only unsized types possible are slices, trait objects, + // and extern types, the input safety requirement is currently enough to + // satisfy the requirements of for_value_raw; this is an implementation + // detail of the language that may not be relied upon outside of std. + let data_layout = Layout::for_value_raw(data_ptr); + RcStructAlloc::offset_of_data(data_layout) as isize + } } -#[inline] -fn data_offset_align(align: usize) -> isize { - let layout = Layout::new::>(); - (layout.size() + layout.padding_needed_for(align)) as isize +/// A memory allocator for [`Rc`] objects. +/// +/// This allocator behaves like the underlying allocator except that values of type +/// [`Box`] can be converted to [`Rc`] without copying the allocation. +/// +/// # Example +/// +/// ``` +/// #![feature(struct_alloc, allocator_api)] +/// +/// use std::rc::{Rc, RcAlloc}; +/// +/// let mut contents = Vec::new_in(RcAlloc::new()); +/// contents.push(1u32); +/// let contents: Rc<[u32]> = contents.into_boxed_slice().into(); +/// ``` +#[derive(Debug)] +#[unstable(feature = "struct_alloc", issue = "none")] +pub struct RcAlloc(StructAlloc); + +#[unstable(feature = "struct_alloc", issue = "none")] +impl RcAlloc { + /// Constructs a new `RcAlloc`. + pub fn new() -> Self { + Self::new_with(Global) + } +} + +#[unstable(feature = "struct_alloc", issue = "none")] +impl RcAlloc { + /// Constructs a new `RcAlloc`. + pub fn new_with(alloc: A) -> Self { + RcAlloc(StructAlloc::new(alloc)) + } +} + +implement_struct_allocator!(RcAlloc); + +#[unstable(feature = "struct_alloc", issue = "none")] +impl From> for Rc { + fn from(v: Box) -> Self { + Self::from_rc_alloc_box(v) + } } diff --git a/library/alloc/src/rc/tests.rs b/library/alloc/src/rc/tests.rs index 843a9b07fa934..fca8cc05efd77 100644 --- a/library/alloc/src/rc/tests.rs +++ b/library/alloc/src/rc/tests.rs @@ -559,3 +559,14 @@ fn test_rc_cyclic_with_two_ref() { assert_eq!(Rc::strong_count(&two_refs), 3); assert_eq!(Rc::weak_count(&two_refs), 2); } + +#[test] +fn test_from_rc_alloc() { + let mut vec = Vec::new_in(RcAlloc::new()); + vec.push(1i32); + let b = vec.into_boxed_slice(); + let b_addr = &*b as *const _; + let rc = Rc::from(b); + let rc_addr = &*rc as *const _; + assert_eq!(b_addr, rc_addr); +} diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 461ca85c0305d..073aa10cafedd 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -14,7 +14,7 @@ use core::hint; use core::intrinsics::abort; use core::iter; use core::marker::{PhantomData, Unpin, Unsize}; -use core::mem::{self, align_of_val_raw, size_of_val}; +use core::mem::{self, size_of_val, MaybeUninit}; use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver}; use core::pin::Pin; use core::ptr::{self, NonNull}; @@ -22,6 +22,7 @@ use core::slice::from_raw_parts_mut; use core::sync::atomic; use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst}; +use crate::alloc::struct_alloc::StructAlloc; use crate::alloc::{ box_free, handle_alloc_error, AllocError, Allocator, Global, Layout, WriteCloneIntoRaw, }; @@ -296,18 +297,33 @@ impl fmt::Debug for Weak { } } -// This is repr(C) to future-proof against possible field-reordering, which -// would interfere with otherwise safe [into|from]_raw() of transmutable -// inner types. -#[repr(C)] -struct ArcInner { +struct ArcInnerMetadata { strong: atomic::AtomicUsize, // the value usize::MAX acts as a sentinel for temporarily "locking" the // ability to upgrade weak pointers or downgrade strong ones; this is used // to avoid races in `make_mut` and `get_mut`. weak: atomic::AtomicUsize, +} + +impl ArcInnerMetadata { + // Start the weak pointer count as 1 which is the weak pointer that's + // held by all the strong pointers (kinda), see std/rc.rs for more info + #[inline] + fn new_strong() -> Self { + Self { strong: atomic::AtomicUsize::new(1), weak: atomic::AtomicUsize::new(1) } + } + + #[inline] + fn new_weak() -> Self { + Self { strong: atomic::AtomicUsize::new(0), weak: atomic::AtomicUsize::new(1) } + } +} +// This is repr(C) to support StructAlloc +#[repr(C)] +struct ArcInner { + meta: ArcInnerMetadata, data: T, } @@ -327,13 +343,7 @@ impl Arc { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new(data: T) -> Arc { - // Start the weak pointer count as 1 which is the weak pointer that's - // held by all the strong pointers (kinda), see std/rc.rs for more info - let x: Box<_> = box ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, - }; + let x: Box<_> = box ArcInner { meta: ArcInnerMetadata::new_strong(), data }; Self::from_inner(Box::leak(x).into()) } @@ -363,8 +373,7 @@ impl Arc { // Construct the inner in the "uninitialized" state with a single // weak reference. let uninit_ptr: NonNull<_> = Box::leak(box ArcInner { - strong: atomic::AtomicUsize::new(0), - weak: atomic::AtomicUsize::new(1), + meta: ArcInnerMetadata::new_weak(), data: mem::MaybeUninit::::uninit(), }) .into(); @@ -398,7 +407,7 @@ impl Arc { // // These side effects do not impact us in any way, and no other side effects are // possible with safe code alone. - let prev_value = (*inner).strong.fetch_add(1, Release); + let prev_value = (*inner).meta.strong.fetch_add(1, Release); debug_assert_eq!(prev_value, 0, "No prior strong references should exist"); } @@ -494,13 +503,7 @@ impl Arc { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new(data: T) -> Result, AllocError> { - // Start the weak pointer count as 1 which is the weak pointer that's - // held by all the strong pointers (kinda), see std/rc.rs for more info - let x: Box<_> = Box::try_new(ArcInner { - strong: atomic::AtomicUsize::new(1), - weak: atomic::AtomicUsize::new(1), - data, - })?; + let x: Box<_> = Box::try_new(ArcInner { meta: ArcInnerMetadata::new_strong(), data })?; Ok(Self::from_inner(Box::leak(x).into())) } @@ -593,11 +596,11 @@ impl Arc { #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { - if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { + if this.inner().meta.strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() { return Err(this); } - acquire!(this.inner().strong); + acquire!(this.inner().meta.strong); unsafe { let elem = ptr::read(&this.ptr.as_ref().data); @@ -842,14 +845,7 @@ impl Arc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub unsafe fn from_raw(ptr: *const T) -> Self { - unsafe { - let offset = data_offset(ptr); - - // Reverse the offset to find the original ArcInner. - let arc_ptr = (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)); - - Self::from_ptr(arc_ptr) - } + unsafe { Self::from_data_ptr(ptr).assume_init() } } /// Creates a new [`Weak`] pointer to this allocation. @@ -867,13 +863,13 @@ impl Arc { pub fn downgrade(this: &Self) -> Weak { // This Relaxed is OK because we're checking the value in the CAS // below. - let mut cur = this.inner().weak.load(Relaxed); + let mut cur = this.inner().meta.weak.load(Relaxed); loop { // check if the weak counter is currently "locked"; if so, spin. if cur == usize::MAX { hint::spin_loop(); - cur = this.inner().weak.load(Relaxed); + cur = this.inner().meta.weak.load(Relaxed); continue; } @@ -884,7 +880,7 @@ impl Arc { // Unlike with Clone(), we need this to be an Acquire read to // synchronize with the write coming from `is_unique`, so that the // events prior to that write happen before this read. - match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { + match this.inner().meta.weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) { Ok(_) => { // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); @@ -918,7 +914,7 @@ impl Arc { #[inline] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn weak_count(this: &Self) -> usize { - let cnt = this.inner().weak.load(SeqCst); + let cnt = this.inner().meta.weak.load(SeqCst); // If the weak count is currently locked, the value of the // count was 0 just before taking the lock. if cnt == usize::MAX { 0 } else { cnt - 1 } @@ -947,7 +943,7 @@ impl Arc { #[inline] #[stable(feature = "arc_counts", since = "1.15.0")] pub fn strong_count(this: &Self) -> usize { - this.inner().strong.load(SeqCst) + this.inner().meta.strong.load(SeqCst) } /// Increments the strong reference count on the `Arc` associated with the @@ -1112,8 +1108,8 @@ impl Arc { debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout); unsafe { - ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); - ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); + ptr::write(&mut (*inner).meta.strong, atomic::AtomicUsize::new(1)); + ptr::write(&mut (*inner).meta.weak, atomic::AtomicUsize::new(1)); } Ok(inner) @@ -1152,6 +1148,34 @@ impl Arc { Self::from_ptr(ptr) } } + + /// # Safety + /// + /// The caller must ensure that the pointer points to the `data` field of a `Global` + /// allocation of type `ArcInner`. Depending on how the pointer was created, the + /// `meta` field might or might not be uninitialized. It's up to the caller to ensure + /// that this field is set to the correct value before the return value is unwrapped. + #[inline] + unsafe fn from_data_ptr(ptr: *const T) -> MaybeUninit { + unsafe { + let offset = data_offset(ptr); + + // Reverse the offset to find the original ArcInner. + let arc_ptr = (ptr as *mut ArcInner).set_ptr_value((ptr as *mut u8).offset(-offset)); + + MaybeUninit::new(Self::from_ptr(arc_ptr)) + } + } + + #[inline] + fn from_arc_alloc_box(v: Box) -> Arc { + unsafe { + let data_ptr = Box::into_raw(v); + let mut rc = Self::from_data_ptr(data_ptr); + rc.assume_init_mut().ptr.as_mut().meta = ArcInnerMetadata::new_strong(); + rc.assume_init() + } + } } impl Arc<[T]> { @@ -1276,7 +1300,7 @@ impl Clone for Arc { // another must already provide any required synchronization. // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) - let old_size = self.inner().strong.fetch_add(1, Relaxed); + let old_size = self.inner().meta.strong.fetch_add(1, Relaxed); // However we need to guard against massive refcounts in case someone // is `mem::forget`ing Arcs. If we don't do this the count can overflow @@ -1352,7 +1376,7 @@ impl Arc { // before release writes (i.e., decrements) to `strong`. Since we hold a // weak count, there's no chance the ArcInner itself could be // deallocated. - if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { + if this.inner().meta.strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { // Another strong pointer exists, so we must clone. // Pre-allocate memory to allow writing the cloned value directly. let mut arc = Self::new_uninit(); @@ -1361,7 +1385,7 @@ impl Arc { (**this).write_clone_into_raw(data.as_mut_ptr()); *this = arc.assume_init(); } - } else if this.inner().weak.load(Relaxed) != 1 { + } else if this.inner().meta.weak.load(Relaxed) != 1 { // Relaxed suffices in the above because this is fundamentally an // optimization: we are always racing with weak pointers being // dropped. Worst case, we end up allocated a new Arc unnecessarily. @@ -1388,7 +1412,7 @@ impl Arc { } else { // We were the sole reference of either kind; bump back up the // strong ref count. - this.inner().strong.store(1, Release); + this.inner().meta.strong.store(1, Release); } // As with `get_mut()`, the unsafety is ok because our reference was @@ -1484,16 +1508,16 @@ impl Arc { // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded // weak ref was never dropped, the CAS here will fail so we do not care to synchronize. - if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { + if self.inner().meta.weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { // This needs to be an `Acquire` to synchronize with the decrement of the `strong` // counter in `drop` -- the only access that happens when any but the last reference // is being dropped. - let unique = self.inner().strong.load(Acquire) == 1; + let unique = self.inner().meta.strong.load(Acquire) == 1; // The release write here synchronizes with a read in `downgrade`, // effectively preventing the above read of `strong` from happening // after the write. - self.inner().weak.store(1, Release); // release the lock + self.inner().meta.weak.store(1, Release); // release the lock unique } else { false @@ -1533,7 +1557,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { // Because `fetch_sub` is already atomic, we do not need to synchronize // with other threads unless we are going to delete the object. This // same logic applies to the below `fetch_sub` to the `weak` count. - if self.inner().strong.fetch_sub(1, Release) != 1 { + if self.inner().meta.strong.fetch_sub(1, Release) != 1 { return; } @@ -1565,7 +1589,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) // [2]: (https://github.com/rust-lang/rust/pull/41714) - acquire!(self.inner().strong); + acquire!(self.inner().meta.strong); unsafe { self.drop_slow(); @@ -1883,7 +1907,7 @@ impl Weak { // is dropped, the data field will be dropped in-place). Some(unsafe { let ptr = self.ptr.as_ptr(); - WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak } + WeakInner { strong: &(*ptr).meta.strong, weak: &(*ptr).meta.weak } }) } } @@ -2455,24 +2479,66 @@ impl AsRef for Arc { #[stable(feature = "pin", since = "1.33.0")] impl Unpin for Arc {} +type ArcStructAlloc = StructAlloc; + /// Get the offset within an `ArcInner` for the payload behind a pointer. /// /// # Safety /// /// The pointer must point to (and have valid metadata for) a previously /// valid instance of T, but the T is allowed to be dropped. -unsafe fn data_offset(ptr: *const T) -> isize { - // Align the unsized value to the end of the ArcInner. - // Because RcBox is repr(C), it will always be the last field in memory. - // SAFETY: since the only unsized types possible are slices, trait objects, - // and extern types, the input safety requirement is currently enough to - // satisfy the requirements of align_of_val_raw; this is an implementation - // detail of the language that may not be relied upon outside of std. - unsafe { data_offset_align(align_of_val_raw(ptr)) } +unsafe fn data_offset(data_ptr: *const T) -> isize { + unsafe { + // SAFETY: since the only unsized types possible are slices, trait objects, + // and extern types, the input safety requirement is currently enough to + // satisfy the requirements of for_value_raw; this is an implementation + // detail of the language that may not be relied upon outside of std. + let data_layout = Layout::for_value_raw(data_ptr); + ArcStructAlloc::offset_of_data(data_layout) as isize + } } -#[inline] -fn data_offset_align(align: usize) -> isize { - let layout = Layout::new::>(); - (layout.size() + layout.padding_needed_for(align)) as isize +/// A memory allocator for [`Arc`] objects. +/// +/// This allocator behaves like the underlying allocator except that values of type +/// [`Box`] can be converted to [`Arc`] without copying the allocation. +/// +/// # Example +/// +/// ``` +/// #![feature(struct_alloc, allocator_api)] +/// +/// use alloc::sync::{Arc, ArcAlloc}; +/// +/// let mut contents = Vec::new_in(ArcAlloc::new()); +/// contents.push(1u32); +/// let contents: Arc<[u32]> = contents.into_boxed_slice().into(); +/// ``` +#[derive(Debug)] +#[unstable(feature = "struct_alloc", issue = "none")] +pub struct ArcAlloc(StructAlloc); + +#[unstable(feature = "struct_alloc", issue = "none")] +impl ArcAlloc { + /// Constructs a new `ArcAlloc`. + pub fn new() -> Self { + ArcAlloc(StructAlloc::new(Global)) + } +} + +#[unstable(feature = "struct_alloc", issue = "none")] +impl ArcAlloc { + /// Constructs a new `ArcAlloc`. + pub fn new_with(alloc: A) -> Self { + ArcAlloc(StructAlloc::new(alloc)) + } +} + +implement_struct_allocator!(ArcAlloc); + +#[unstable(feature = "struct_alloc", issue = "none")] +impl From> for Arc { + fn from(v: Box) -> Self { + Self::from_arc_alloc_box(v) + } } diff --git a/library/alloc/src/sync/tests.rs b/library/alloc/src/sync/tests.rs index 4ccb32fbbf63d..819022c1c62dc 100644 --- a/library/alloc/src/sync/tests.rs +++ b/library/alloc/src/sync/tests.rs @@ -618,3 +618,14 @@ fn test_arc_cyclic_two_refs() { assert_eq!(Arc::strong_count(&two_refs), 3); assert_eq!(Arc::weak_count(&two_refs), 2); } + +#[test] +fn test_from_arc_alloc() { + let mut vec = Vec::new_in(ArcAlloc::new()); + vec.push(1i32); + let b = vec.into_boxed_slice(); + let b_addr = &*b as *const _; + let rc = Arc::from(b); + let rc_addr = &*rc as *const _; + assert_eq!(b_addr, rc_addr); +} diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index 2d902a9b6e08e..7c4d8ce9cbd38 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -146,8 +146,8 @@ def __init__(self, valobj, is_atomic=False): self.is_atomic = is_atomic self.ptr = unwrap_unique_or_non_null(valobj["ptr"]) self.value = self.ptr["data" if is_atomic else "value"] - self.strong = self.ptr["strong"]["v" if is_atomic else "value"]["value"] - self.weak = self.ptr["weak"]["v" if is_atomic else "value"]["value"] - 1 + self.strong = self.ptr["meta"]["strong"]["v" if is_atomic else "value"]["value"] + self.weak = self.ptr["meta"]["weak"]["v" if is_atomic else "value"]["value"] - 1 def to_string(self): if self.is_atomic: diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index ca2685ca31ffd..10bb8ce5d1435 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -601,10 +601,10 @@ def __init__(self, valobj, dict, is_atomic=False): self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value") - self.strong = self.ptr.GetChildMemberWithName("strong").GetChildAtIndex( - 0).GetChildMemberWithName("value") - self.weak = self.ptr.GetChildMemberWithName("weak").GetChildAtIndex( - 0).GetChildMemberWithName("value") + self.strong = self.ptr.GetChildMemberWithName("meta")\ + .GetChildMemberWithName("strong").GetChildAtIndex(0).GetChildMemberWithName("value") + self.weak = self.ptr.GetChildMemberWithName("meta")\ + .GetChildMemberWithName("weak").GetChildAtIndex(0).GetChildMemberWithName("value") self.value_builder = ValueBuilder(valobj)