@@ -82,8 +82,10 @@ use core::intrinsics::drop_in_place;
8282use core:: mem;
8383use core:: nonzero:: NonZero ;
8484use core:: ops:: { Deref , CoerceUnsized } ;
85+ use core:: ptr;
8586use core:: marker:: Unsize ;
8687use core:: hash:: { Hash , Hasher } ;
88+ use core:: usize;
8789use heap:: deallocate;
8890
8991/// An atomically reference counted wrapper for shared state.
@@ -156,7 +158,12 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
156158
157159struct ArcInner < T : ?Sized > {
158160 strong : atomic:: AtomicUsize ,
161+
162+ // the value usize::MAX acts as a sentinel for temporarily "locking" the
163+ // ability to upgrade weak pointers or downgrade strong ones; this is used
164+ // to avoid races in `make_unique` and `get_mut`.
159165 weak : atomic:: AtomicUsize ,
166+
160167 data : T ,
161168}
162169
@@ -203,9 +210,25 @@ impl<T: ?Sized> Arc<T> {
203210 #[ unstable( feature = "arc_weak" ,
204211 reason = "Weak pointers may not belong in this module." ) ]
205212 pub fn downgrade ( & self ) -> Weak < T > {
206- // See the clone() impl for why this is relaxed
207- self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
208- Weak { _ptr : self . _ptr }
213+ loop {
214+ // This Relaaxed is OK because we're checking the value in the CAS
215+ // below.
216+ let cur = self . inner ( ) . weak . load ( Relaxed ) ;
217+
218+ // check if the weak counter is currently "locked"; if so, spin.
219+ if cur == usize:: MAX { continue }
220+
221+ // NOTE: this code currently ignores the possibility of overflow
222+ // into usize::MAX; in general both Rc and Arc need to be adjusted
223+ // to deal with overflow.
224+
225+ // Unlike with Clone(), we need this to be an Acquire read to
226+ // synchronize with the write coming from `is_unique`, so that the
227+ // events prior to that write happen before this read.
228+ if self . inner ( ) . weak . compare_and_swap ( cur, cur + 1 , Acquire ) == cur {
229+ return Weak { _ptr : self . _ptr }
230+ }
231+ }
209232 }
210233
211234 /// Get the number of weak references to this value.
@@ -260,51 +283,6 @@ pub fn weak_count<T: ?Sized>(this: &Arc<T>) -> usize { Arc::weak_count(this) }
260283#[ deprecated( since = "1.2.0" , reason = "renamed to Arc::strong_count" ) ]
261284pub fn strong_count < T : ?Sized > ( this : & Arc < T > ) -> usize { Arc :: strong_count ( this) }
262285
263-
264- /// Returns a mutable reference to the contained value if the `Arc<T>` is unique.
265- ///
266- /// Returns `None` if the `Arc<T>` is not unique.
267- ///
268- /// This function is marked **unsafe** because it is racy if weak pointers
269- /// are active.
270- ///
271- /// # Examples
272- ///
273- /// ```
274- /// # #![feature(arc_unique, alloc)]
275- /// extern crate alloc;
276- /// # fn main() {
277- /// use alloc::arc::{Arc, get_mut};
278- ///
279- /// # unsafe {
280- /// let mut x = Arc::new(3);
281- /// *get_mut(&mut x).unwrap() = 4;
282- /// assert_eq!(*x, 4);
283- ///
284- /// let _y = x.clone();
285- /// assert!(get_mut(&mut x).is_none());
286- /// # }
287- /// # }
288- /// ```
289- #[ inline]
290- #[ unstable( feature = "arc_unique" ) ]
291- #[ deprecated( since = "1.2.0" ,
292- reason = "this function is unsafe with weak pointers" ) ]
293- pub unsafe fn get_mut < T : ?Sized > ( this : & mut Arc < T > ) -> Option < & mut T > {
294- // FIXME(#24880) potential race with upgraded weak pointers here
295- if Arc :: strong_count ( this) == 1 && Arc :: weak_count ( this) == 0 {
296- // This unsafety is ok because we're guaranteed that the pointer
297- // returned is the *only* pointer that will ever be returned to T. Our
298- // reference count is guaranteed to be 1 at this point, and we required
299- // the Arc itself to be `mut`, so we're returning the only possible
300- // reference to the inner data.
301- let inner = & mut * * this. _ptr ;
302- Some ( & mut inner. data )
303- } else {
304- None
305- }
306- }
307-
308286#[ stable( feature = "rust1" , since = "1.0.0" ) ]
309287impl < T : ?Sized > Clone for Arc < T > {
310288 /// Makes a clone of the `Arc<T>`.
@@ -352,44 +330,150 @@ impl<T: Clone> Arc<T> {
352330 /// Make a mutable reference from the given `Arc<T>`.
353331 ///
354332 /// This is also referred to as a copy-on-write operation because the inner
355- /// data is cloned if the reference count is greater than one.
356- ///
357- /// This method is marked **unsafe** because it is racy if weak pointers
358- /// are active.
333+ /// data is cloned if the (strong) reference count is greater than one. If
334+ /// we hold the only strong reference, any existing weak references will no
335+ /// longer be upgradeable.
359336 ///
360337 /// # Examples
361338 ///
362339 /// ```
363340 /// # #![feature(arc_unique)]
364341 /// use std::sync::Arc;
365342 ///
366- /// # unsafe {
367343 /// let mut five = Arc::new(5);
368344 ///
369- /// let mut_five = five.make_unique();
370- /// # }
345+ /// let mut_five = Arc::make_unique(&mut five);
371346 /// ```
372347 #[ inline]
373348 #[ unstable( feature = "arc_unique" ) ]
374- # [ deprecated ( since = "1.2.0" ,
375- reason = "this function is unsafe with weak pointers" ) ]
376- pub unsafe fn make_unique ( & mut self ) -> & mut T {
377- // FIXME(#24880) potential race with upgraded weak pointers here
349+ pub fn make_unique ( this : & mut Arc < T > ) -> & mut T {
350+ // Note that we hold both a strong reference and a weak reference.
351+ // Thus, releasing our strong reference only will not, by itself, cause
352+ // the memory to be deallocated.
378353 //
379- // Note that we hold a strong reference, which also counts as a weak
380- // reference, so we only clone if there is an additional reference of
381- // either kind.
382- if self . inner ( ) . strong . load ( SeqCst ) != 1 ||
383- self . inner ( ) . weak . load ( SeqCst ) != 1 {
384- * self = Arc :: new ( ( * * self ) . clone ( ) )
354+ // Use Acquire to ensure that we see any writes to `weak` that happen
355+ // before release writes (i.e., decrements) to `strong`. Since we hold a
356+ // weak count, there's no chance the ArcInner itself could be
357+ // deallocated.
358+ if this. inner ( ) . strong . compare_and_swap ( 1 , 0 , Acquire ) != 1 {
359+ // Another srong pointer exists; clone
360+ * this = Arc :: new ( ( * * this) . clone ( ) ) ;
361+ } else if this. inner ( ) . weak . load ( Relaxed ) != 1 {
362+ // Relaxed suffices in the above because this is fundamentally an
363+ // optimization: we are always racing with weak pointers being
364+ // dropped. Worst case, we end up allocated a new Arc unnecessarily.
365+
366+ // We removed the last strong ref, but there are additional weak
367+ // refs remaining. We'll move the contents to a new Arc, and
368+ // invalidate the other weak refs.
369+
370+ // Note that it is not possible for the read of `weak` to yield
371+ // usize::MAX (i.e., locked), since the weak count can only be
372+ // locked by a thread with a strong reference.
373+
374+ // Materialize our own implicit weak pointer, so that it can clean
375+ // up the ArcInner as needed.
376+ let weak = Weak { _ptr : this. _ptr } ;
377+
378+ // mark the data itself as already deallocated
379+ unsafe {
380+ // there is no data race in the implicit write caused by `read`
381+ // here (due to zeroing) because data is no longer accessed by
382+ // other threads (due to there being no more strong refs at this
383+ // point).
384+ let mut swap = Arc :: new ( ptr:: read ( & ( * * weak. _ptr ) . data ) ) ;
385+ mem:: swap ( this, & mut swap) ;
386+ mem:: forget ( swap) ;
387+ }
388+ } else {
389+ // We were the sole reference of either kind; bump back up the
390+ // strong ref count.
391+ this. inner ( ) . strong . store ( 1 , Release ) ;
385392 }
393+
386394 // As with `get_mut()`, the unsafety is ok because our reference was
387395 // either unique to begin with, or became one upon cloning the contents.
388- let inner = & mut * * self . _ptr ;
389- & mut inner. data
396+ unsafe {
397+ let inner = & mut * * this. _ptr ;
398+ & mut inner. data
399+ }
390400 }
391401}
392402
403+ impl < T : ?Sized > Arc < T > {
404+ /// Returns a mutable reference to the contained value if the `Arc<T>` is unique.
405+ ///
406+ /// Returns `None` if the `Arc<T>` is not unique.
407+ ///
408+ /// # Examples
409+ ///
410+ /// ```
411+ /// # #![feature(arc_unique, alloc)]
412+ /// extern crate alloc;
413+ /// # fn main() {
414+ /// use alloc::arc::Arc;
415+ ///
416+ /// let mut x = Arc::new(3);
417+ /// *Arc::get_mut(&mut x).unwrap() = 4;
418+ /// assert_eq!(*x, 4);
419+ ///
420+ /// let _y = x.clone();
421+ /// assert!(Arc::get_mut(&mut x).is_none());
422+ /// # }
423+ /// ```
424+ #[ inline]
425+ #[ unstable( feature = "arc_unique" ) ]
426+ pub fn get_mut ( this : & mut Arc < T > ) -> Option < & mut T > {
427+ if this. is_unique ( ) {
428+ // This unsafety is ok because we're guaranteed that the pointer
429+ // returned is the *only* pointer that will ever be returned to T. Our
430+ // reference count is guaranteed to be 1 at this point, and we required
431+ // the Arc itself to be `mut`, so we're returning the only possible
432+ // reference to the inner data.
433+ unsafe {
434+ let inner = & mut * * this. _ptr ;
435+ Some ( & mut inner. data )
436+ }
437+ } else {
438+ None
439+ }
440+ }
441+
442+ /// Determine whether this is the unique reference (including weak refs) to
443+ /// the underlying data.
444+ ///
445+ /// Note that this requires locking the weak ref count.
446+ fn is_unique ( & mut self ) -> bool {
447+ // lock the weak pointer count if we appear to be the sole weak pointer
448+ // holder.
449+ //
450+ // The acquire label here ensures a happens-before relationship with any
451+ // writes to `strong` prior to decrements of the `weak` count (via drop,
452+ // which uses Release).
453+ if self . inner ( ) . weak . compare_and_swap ( 1 , usize:: MAX , Acquire ) == 1 {
454+ // Due to the previous acquire read, this will observe any writes to
455+ // `strong` that were due to upgrading weak pointers; only strong
456+ // clones remain, which require that the strong count is > 1 anyway.
457+ let unique = self . inner ( ) . strong . load ( Relaxed ) == 1 ;
458+
459+ // The release write here synchronizes with a read in `downgrade`,
460+ // effectively preventing the above read of `strong` from happening
461+ // after the write.
462+ self . inner ( ) . weak . store ( 1 , Release ) ; // release the lock
463+ unique
464+ } else {
465+ false
466+ }
467+ }
468+ }
469+
470+ #[ inline]
471+ #[ unstable( feature = "arc_unique" ) ]
472+ #[ deprecated( since = "1.2" , reason = "use Arc::get_mut instead" ) ]
473+ pub fn get_mut < T : ?Sized > ( this : & mut Arc < T > ) -> Option < & mut T > {
474+ Arc :: get_mut ( this)
475+ }
476+
393477#[ stable( feature = "rust1" , since = "1.0.0" ) ]
394478impl < T : ?Sized > Drop for Arc < T > {
395479 /// Drops the `Arc<T>`.
@@ -485,9 +569,15 @@ impl<T: ?Sized> Weak<T> {
485569 // fetch_add because once the count hits 0 it must never be above 0.
486570 let inner = self . inner ( ) ;
487571 loop {
488- let n = inner. strong . load ( SeqCst ) ;
572+ // Relaxed load because any write of 0 that we can observe
573+ // leaves the field in a permanently zero state (so a
574+ // "stale" read of 0 is fine), and any other value is
575+ // confirmed via the CAS below.
576+ let n = inner. strong . load ( Relaxed ) ;
489577 if n == 0 { return None }
490- let old = inner. strong . compare_and_swap ( n, n + 1 , SeqCst ) ;
578+
579+ // Relaxed is valid for the same reason it is on Arc's Clone impl
580+ let old = inner. strong . compare_and_swap ( n, n + 1 , Relaxed ) ;
491581 if old == n { return Some ( Arc { _ptr : self . _ptr } ) }
492582 }
493583 }
@@ -518,9 +608,12 @@ impl<T: ?Sized> Clone for Weak<T> {
518608 /// ```
519609 #[ inline]
520610 fn clone ( & self ) -> Weak < T > {
521- // See comments in Arc::clone() for why this is relaxed
611+ // See comments in Arc::clone() for why this is relaxed. This can use a
612+ // fetch_add (ignoring the lock) because the weak count is only locked
613+ // where are *no other* weak pointers in existence. (So we can't be
614+ // running this code in that case).
522615 self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
523- Weak { _ptr : self . _ptr }
616+ return Weak { _ptr : self . _ptr }
524617 }
525618}
526619
@@ -563,6 +656,11 @@ impl<T: ?Sized> Drop for Weak<T> {
563656 // If we find out that we were the last weak pointer, then its time to
564657 // deallocate the data entirely. See the discussion in Arc::drop() about
565658 // the memory orderings
659+ //
660+ // It's not necessary to check for the locked state here, because the
661+ // weak count can only be locked if there was precisely one weak ref,
662+ // meaning that drop could only subsequently run ON that remaining weak
663+ // ref, which can only happen after the lock is released.
566664 if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
567665 atomic:: fence ( Acquire ) ;
568666 unsafe { deallocate ( ptr as * mut u8 ,
@@ -794,13 +892,13 @@ mod tests {
794892 let mut cow1 = cow0. clone ( ) ;
795893 let mut cow2 = cow1. clone ( ) ;
796894
797- assert ! ( 75 == * cow0 . make_unique( ) ) ;
798- assert ! ( 75 == * cow1 . make_unique( ) ) ;
799- assert ! ( 75 == * cow2 . make_unique( ) ) ;
895+ assert ! ( 75 == * Arc :: make_unique( & mut cow0 ) ) ;
896+ assert ! ( 75 == * Arc :: make_unique( & mut cow1 ) ) ;
897+ assert ! ( 75 == * Arc :: make_unique( & mut cow2 ) ) ;
800898
801- * cow0 . make_unique ( ) += 1 ;
802- * cow1 . make_unique ( ) += 2 ;
803- * cow2 . make_unique ( ) += 3 ;
899+ * Arc :: make_unique ( & mut cow0 ) += 1 ;
900+ * Arc :: make_unique ( & mut cow1 ) += 2 ;
901+ * Arc :: make_unique ( & mut cow2 ) += 3 ;
804902
805903 assert ! ( 76 == * cow0) ;
806904 assert ! ( 77 == * cow1) ;
@@ -824,7 +922,7 @@ mod tests {
824922 assert ! ( 75 == * cow2) ;
825923
826924 unsafe {
827- * cow0 . make_unique ( ) += 1 ;
925+ * Arc :: make_unique ( & mut cow0 ) += 1 ;
828926 }
829927
830928 assert ! ( 76 == * cow0) ;
@@ -847,7 +945,7 @@ mod tests {
847945 assert ! ( 75 == * cow1_weak. upgrade( ) . unwrap( ) ) ;
848946
849947 unsafe {
850- * cow0 . make_unique ( ) += 1 ;
948+ * Arc :: make_unique ( & mut cow0 ) += 1 ;
851949 }
852950
853951 assert ! ( 76 == * cow0) ;
0 commit comments