Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commitf944afe

Browse files
committed
Auto merge of#116113 - kpreid:arcmut, r=dtolnay
Generalize `{Rc,Arc}::make_mut()` to unsized types.* `{Rc,Arc}::make_mut()` now accept any type implementing the new unstable trait `core::clone::CloneToUninit`.* `CloneToUninit` is implemented for `T: Clone` and for `[T] where T: Clone`.* `CloneToUninit` is a generalization of the existing internal trait `alloc::alloc::WriteCloneIntoRaw`.* New feature gate: `clone_to_uninit`This allows performing `make_mut()` on `Rc<[T]>` and `Arc<[T]>`, which was not previously possible.---Previous PR description, now obsolete:> Add `{Rc, Arc}::make_mut_slice()`>> These functions behave identically to `make_mut()`, but operate on `Arc<[T]>` instead of `Arc<T>`.>> This allows performing the operation on slices, which was not previously possible because `make_mut()` requires `T: Clone` (and slices, being `!Sized`, do not and currently cannot implement `Clone`).>> Feature gate: `make_mut_slice`try-job: test-various
2 parentsac47dba +88c3db5 commitf944afe

File tree

10 files changed

+492
-58
lines changed

10 files changed

+492
-58
lines changed

‎library/alloc/src/alloc.rs‎

Lines changed: 0 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -424,29 +424,3 @@ pub mod __alloc_error_handler {
424424
}
425425
}
426426
}
427-
428-
#[cfg(not(no_global_oom_handling))]
429-
/// Specialize clones into pre-allocated, uninitialized memory.
430-
/// Used by `Box::clone` and `Rc`/`Arc::make_mut`.
431-
pub(crate)traitWriteCloneIntoRaw:Sized{
432-
unsafefnwrite_clone_into_raw(&self,target:*mutSelf);
433-
}
434-
435-
#[cfg(not(no_global_oom_handling))]
436-
impl<T:Clone>WriteCloneIntoRawforT{
437-
#[inline]
438-
defaultunsafefnwrite_clone_into_raw(&self,target:*mutSelf){
439-
// Having allocated *first* may allow the optimizer to create
440-
// the cloned value in-place, skipping the local and move.
441-
unsafe{ target.write(self.clone())};
442-
}
443-
}
444-
445-
#[cfg(not(no_global_oom_handling))]
446-
impl<T:Copy>WriteCloneIntoRawforT{
447-
#[inline]
448-
unsafefnwrite_clone_into_raw(&self,target:*mutSelf){
449-
// We can always copy in-place, without ever involving a local value.
450-
unsafe{ target.copy_from_nonoverlapping(self,1)};
451-
}
452-
}

‎library/alloc/src/boxed.rs‎

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -188,6 +188,8 @@
188188
use core::any::Any;
189189
use core::async_iter::AsyncIterator;
190190
use core::borrow;
191+
#[cfg(not(no_global_oom_handling))]
192+
use core::clone::CloneToUninit;
191193
use core::cmp::Ordering;
192194
use core::error::Error;
193195
use core::fmt;
@@ -207,7 +209,7 @@ use core::slice;
207209
use core::task::{Context,Poll};
208210

209211
#[cfg(not(no_global_oom_handling))]
210-
usecrate::alloc::{handle_alloc_error,WriteCloneIntoRaw};
212+
usecrate::alloc::handle_alloc_error;
211213
usecrate::alloc::{AllocError,Allocator,Global,Layout};
212214
#[cfg(not(no_global_oom_handling))]
213215
usecrate::borrow::Cow;
@@ -1346,7 +1348,7 @@ impl<T: Clone, A: Allocator + Clone> Clone for Box<T, A> {
13461348
// Pre-allocate memory to allow writing the cloned value directly.
13471349
letmut boxed =Self::new_uninit_in(self.1.clone());
13481350
unsafe{
1349-
(**self).write_clone_into_raw(boxed.as_mut_ptr());
1351+
(**self).clone_to_uninit(boxed.as_mut_ptr());
13501352
boxed.assume_init()
13511353
}
13521354
}

‎library/alloc/src/lib.rs‎

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,7 @@
103103
#![feature(assert_matches)]
104104
#![feature(async_fn_traits)]
105105
#![feature(async_iterator)]
106+
#![feature(clone_to_uninit)]
106107
#![feature(coerce_unsized)]
107108
#![feature(const_align_of_val)]
108109
#![feature(const_box)]

‎library/alloc/src/rc.rs‎

Lines changed: 101 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -249,6 +249,8 @@ use std::boxed::Box;
249249
use core::any::Any;
250250
use core::borrow;
251251
use core::cell::Cell;
252+
#[cfg(not(no_global_oom_handling))]
253+
use core::clone::CloneToUninit;
252254
use core::cmp::Ordering;
253255
use core::fmt;
254256
use core::hash::{Hash,Hasher};
@@ -268,8 +270,6 @@ use core::slice::from_raw_parts_mut;
268270

269271
#[cfg(not(no_global_oom_handling))]
270272
usecrate::alloc::handle_alloc_error;
271-
#[cfg(not(no_global_oom_handling))]
272-
usecrate::alloc::WriteCloneIntoRaw;
273273
usecrate::alloc::{AllocError,Allocator,Global,Layout};
274274
usecrate::borrow::{Cow,ToOwned};
275275
#[cfg(not(no_global_oom_handling))]
@@ -1749,7 +1749,8 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> {
17491749
}
17501750
}
17511751

1752-
impl<T:Clone,A:Allocator +Clone>Rc<T,A>{
1752+
#[cfg(not(no_global_oom_handling))]
1753+
impl<T: ?Sized +CloneToUninit,A:Allocator +Clone>Rc<T,A>{
17531754
/// Makes a mutable reference into the given `Rc`.
17541755
///
17551756
/// If there are other `Rc` pointers to the same allocation, then `make_mut` will
@@ -1800,31 +1801,52 @@ impl<T: Clone, A: Allocator + Clone> Rc<T, A> {
18001801
/// assert!(76 == *data);
18011802
/// assert!(weak.upgrade().is_none());
18021803
/// ```
1803-
#[cfg(not(no_global_oom_handling))]
18041804
#[inline]
18051805
#[stable(feature ="rc_unique", since ="1.4.0")]
18061806
pubfnmake_mut(this:&mutSelf) ->&mutT{
1807+
let size_of_val =size_of_val::<T>(&**this);
1808+
18071809
ifRc::strong_count(this) !=1{
18081810
// Gotta clone the data, there are other Rcs.
1809-
// Pre-allocate memory to allow writing the cloned value directly.
1810-
letmut rc =Self::new_uninit_in(this.alloc.clone());
1811-
unsafe{
1812-
let data =Rc::get_mut_unchecked(&mut rc);
1813-
(**this).write_clone_into_raw(data.as_mut_ptr());
1814-
*this = rc.assume_init();
1815-
}
1811+
1812+
let this_data_ref:&T =&**this;
1813+
// `in_progress` drops the allocation if we panic before finishing initializing it.
1814+
letmut in_progress:UniqueRcUninit<T,A> =
1815+
UniqueRcUninit::new(this_data_ref, this.alloc.clone());
1816+
1817+
// Initialize with clone of this.
1818+
let initialized_clone =unsafe{
1819+
// Clone. If the clone panics, `in_progress` will be dropped and clean up.
1820+
this_data_ref.clone_to_uninit(in_progress.data_ptr());
1821+
// Cast type of pointer, now that it is initialized.
1822+
in_progress.into_rc()
1823+
};
1824+
1825+
// Replace `this` with newly constructed Rc.
1826+
*this = initialized_clone;
18161827
}elseifRc::weak_count(this) !=0{
18171828
// Can just steal the data, all that's left is Weaks
1818-
letmut rc =Self::new_uninit_in(this.alloc.clone());
1829+
1830+
// We don't need panic-protection like the above branch does, but we might as well
1831+
// use the same mechanism.
1832+
letmut in_progress:UniqueRcUninit<T,A> =
1833+
UniqueRcUninit::new(&**this, this.alloc.clone());
18191834
unsafe{
1820-
let data =Rc::get_mut_unchecked(&mut rc);
1821-
data.as_mut_ptr().copy_from_nonoverlapping(&**this,1);
1835+
// Initialize `in_progress` with move of **this.
1836+
// We have to express this in terms of bytes because `T: ?Sized`; there is no
1837+
// operation that just copies a value based on its `size_of_val()`.
1838+
ptr::copy_nonoverlapping(
1839+
ptr::from_ref(&**this).cast::<u8>(),
1840+
in_progress.data_ptr().cast::<u8>(),
1841+
size_of_val,
1842+
);
18221843

18231844
this.inner().dec_strong();
18241845
// Remove implicit strong-weak ref (no need to craft a fake
18251846
// Weak here -- we know other Weaks can clean up for us)
18261847
this.inner().dec_weak();
1827-
ptr::write(this, rc.assume_init());
1848+
// Replace `this` with newly constructed Rc that has the moved data.
1849+
ptr::write(this, in_progress.into_rc());
18281850
}
18291851
}
18301852
// This unsafety is ok because we're guaranteed that the pointer
@@ -3686,3 +3708,67 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
36863708
}
36873709
}
36883710
}
3711+
3712+
/// A unique owning pointer to a [`RcBox`] **that does not imply the contents are initialized,**
3713+
/// but will deallocate it (without dropping the value) when dropped.
3714+
///
3715+
/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
3716+
/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
3717+
/// which `MaybeUninit` does not.
3718+
#[cfg(not(no_global_oom_handling))]
3719+
structUniqueRcUninit<T: ?Sized,A:Allocator>{
3720+
ptr:NonNull<RcBox<T>>,
3721+
layout_for_value:Layout,
3722+
alloc:Option<A>,
3723+
}
3724+
3725+
#[cfg(not(no_global_oom_handling))]
3726+
impl<T: ?Sized,A:Allocator>UniqueRcUninit<T,A>{
3727+
/// Allocate a RcBox with layout suitable to contain `for_value` or a clone of it.
3728+
fnnew(for_value:&T,alloc:A) ->UniqueRcUninit<T,A>{
3729+
let layout =Layout::for_value(for_value);
3730+
let ptr =unsafe{
3731+
Rc::allocate_for_layout(
3732+
layout,
3733+
|layout_for_rcbox| alloc.allocate(layout_for_rcbox),
3734+
|mem| mem.with_metadata_of(ptr::from_ref(for_value)as*constRcBox<T>),
3735+
)
3736+
};
3737+
Self{ptr:NonNull::new(ptr).unwrap(),layout_for_value: layout,alloc:Some(alloc)}
3738+
}
3739+
3740+
/// Returns the pointer to be written into to initialize the [`Rc`].
3741+
fndata_ptr(&mutself) ->*mutT{
3742+
let offset =data_offset_align(self.layout_for_value.align());
3743+
unsafe{self.ptr.as_ptr().byte_add(offset)as*mutT}
3744+
}
3745+
3746+
/// Upgrade this into a normal [`Rc`].
3747+
///
3748+
/// # Safety
3749+
///
3750+
/// The data must have been initialized (by writing to [`Self::data_ptr()`]).
3751+
unsafefninto_rc(mutself) ->Rc<T,A>{
3752+
let ptr =self.ptr;
3753+
let alloc =self.alloc.take().unwrap();
3754+
mem::forget(self);
3755+
// SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
3756+
// for having initialized the data.
3757+
unsafe{Rc::from_ptr_in(ptr.as_ptr(), alloc)}
3758+
}
3759+
}
3760+
3761+
#[cfg(not(no_global_oom_handling))]
3762+
impl<T: ?Sized,A:Allocator>DropforUniqueRcUninit<T,A>{
3763+
fndrop(&mutself){
3764+
// SAFETY:
3765+
// * new() produced a pointer safe to deallocate.
3766+
// * We own the pointer unless into_rc() was called, which forgets us.
3767+
unsafe{
3768+
self.alloc
3769+
.take()
3770+
.unwrap()
3771+
.deallocate(self.ptr.cast(),rcbox_layout_for_value_layout(self.layout_for_value));
3772+
}
3773+
}
3774+
}

‎library/alloc/src/rc/tests.rs‎

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -316,6 +316,24 @@ fn test_cowrc_clone_weak() {
316316
assert!(cow1_weak.upgrade().is_none());
317317
}
318318

319+
/// This is similar to the doc-test for `Rc::make_mut()`, but on an unsized type (slice).
320+
#[test]
321+
fntest_cowrc_unsized(){
322+
use std::rc::Rc;
323+
324+
letmut data:Rc<[i32]> =Rc::new([10,20,30]);
325+
326+
Rc::make_mut(&mut data)[0] +=1;// Won't clone anything
327+
letmut other_data =Rc::clone(&data);// Won't clone inner data
328+
Rc::make_mut(&mut data)[1] +=1;// Clones inner data
329+
Rc::make_mut(&mut data)[2] +=1;// Won't clone anything
330+
Rc::make_mut(&mut other_data)[0] *=10;// Won't clone anything
331+
332+
// Now `data` and `other_data` point to different allocations.
333+
assert_eq!(*data,[11,21,31]);
334+
assert_eq!(*other_data,[110,20,30]);
335+
}
336+
319337
#[test]
320338
fntest_show(){
321339
let foo =Rc::new(75);

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp