@@ -78,16 +78,18 @@ use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
7878use core:: fmt;
7979use core:: cmp:: Ordering ;
8080use core:: mem:: { align_of_val, size_of_val} ;
81- use core:: intrinsics:: drop_in_place;
81+ use core:: intrinsics:: { drop_in_place, abort } ;
8282use core:: mem;
8383use core:: nonzero:: NonZero ;
8484use core:: ops:: { Deref , CoerceUnsized } ;
8585use core:: ptr;
8686use core:: marker:: Unsize ;
8787use core:: hash:: { Hash , Hasher } ;
88- use core:: usize;
88+ use core:: { usize, isize } ;
8989use heap:: deallocate;
9090
91+ const MAX_REFCOUNT : usize = ( isize:: MAX ) as usize ;
92+
9193/// An atomically reference counted wrapper for shared state.
9294///
9395/// # Examples
@@ -311,7 +313,21 @@ impl<T: ?Sized> Clone for Arc<T> {
311313 // another must already provide any required synchronization.
312314 //
313315 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
314- self . inner ( ) . strong . fetch_add ( 1 , Relaxed ) ;
316+ let old_size = self . inner ( ) . strong . fetch_add ( 1 , Relaxed ) ;
317+
318+ // However we need to guard against massive refcounts in case someone
319+ // is `mem::forget`ing Arcs. If we don't do this the count can overflow
320+ // and users will use-after free. We racily saturate to `isize::MAX` on
321+ // the assumption that there aren't ~2 billion threads incrementing
322+ // the reference count at once. This branch will never be taken in
323+ // any realistic program.
324+ //
325+ // We abort because such a program is incredibly degenerate, and we
326+ // don't care to support it.
327+ if old_size > MAX_REFCOUNT {
328+ unsafe { abort ( ) ; }
329+ }
330+
315331 Arc { _ptr : self . _ptr }
316332 }
317333}
@@ -612,7 +628,13 @@ impl<T: ?Sized> Clone for Weak<T> {
612628 // fetch_add (ignoring the lock) because the weak count is only locked
613629 // where are *no other* weak pointers in existence. (So we can't be
614630 // running this code in that case).
615- self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
631+ let old_size = self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
632+
633+ // See comments in Arc::clone() for why we do this (for mem::forget).
634+ if old_size > MAX_REFCOUNT {
635+ unsafe { abort ( ) ; }
636+ }
637+
616638 return Weak { _ptr : self . _ptr }
617639 }
618640}
0 commit comments