diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 2b3736019ba40..695642bf92082 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -348,6 +348,17 @@ impl Rc { unsafe fn from_ptr(ptr: *mut RcBox) -> Self { unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } } + + // Non-inlined part of `drop`. + #[inline(never)] + unsafe fn drop_slow(&mut self) { + // Destroy the data at this time, even though we must not free the box + // allocation itself (there might still be weak pointers lying around). + unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; + + // Drop the weak ref collectively held by all strong references. + drop(Weak { ptr: self.ptr }); + } } impl Rc { @@ -1516,20 +1527,19 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { /// drop(foo); // Doesn't print anything /// drop(foo2); // Prints "dropped!" /// ``` + #[inline] fn drop(&mut self) { unsafe { self.inner().dec_strong(); - if self.inner().strong() == 0 { - // destroy the contained object - ptr::drop_in_place(Self::get_mut_unchecked(self)); - - // remove the implicit "strong weak" pointer now that we've - // destroyed the contents. - self.inner().dec_weak(); + if self.inner().strong() != 0 { + return; + } - if self.inner().weak() == 0 { - Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); - } + if mem::needs_drop::() { + self.drop_slow(); + } else { + // Drop the weak ref collectively held by all strong references. + drop(Weak { ptr: self.ptr }); } } } @@ -2457,6 +2467,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak { /// /// assert!(other_weak_foo.upgrade().is_none()); /// ``` + #[inline] fn drop(&mut self) { let inner = if let Some(inner) = self.inner() { inner } else { return }; diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 55d51e0a3c4cf..62d2258b295e1 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -1105,7 +1105,7 @@ impl Arc { // allocation itself (there might still be weak pointers lying around). unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; - // Drop the weak ref collectively held by all strong references + // Drop the weak ref collectively held by all strong references. drop(Weak { ptr: self.ptr }); } @@ -1699,7 +1699,12 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc { acquire!(self.inner().strong); unsafe { - self.drop_slow(); + if mem::needs_drop::() { + self.drop_slow(); + } else { + // Drop the weak ref collectively held by all strong references. + drop(Weak { ptr: self.ptr }); + } } } } @@ -2159,6 +2164,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak { /// /// assert!(other_weak_foo.upgrade().is_none()); /// ``` + #[inline] fn drop(&mut self) { // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about