@@ -78,16 +78,18 @@ use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
7878use core:: fmt;
7979use core:: cmp:: Ordering ;
8080use core:: mem:: { align_of_val, size_of_val} ;
81- use core:: intrinsics:: drop_in_place;
81+ use core:: intrinsics:: { drop_in_place, abort } ;
8282use core:: mem;
8383use core:: nonzero:: NonZero ;
8484use core:: ops:: { Deref , CoerceUnsized } ;
8585use core:: ptr;
8686use core:: marker:: Unsize ;
8787use core:: hash:: { Hash , Hasher } ;
88- use core:: usize;
88+ use core:: { usize, isize } ;
8989use heap:: deallocate;
9090
91+ const MAX_REFCOUNT : usize = ( isize:: MAX ) as usize ;
92+
9193/// An atomically reference counted wrapper for shared state.
9294///
9395/// # Examples
@@ -312,7 +314,21 @@ impl<T: ?Sized> Clone for Arc<T> {
312314 // another must already provide any required synchronization.
313315 //
314316 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
315- self . inner ( ) . strong . fetch_add ( 1 , Relaxed ) ;
317+ let old_size = self . inner ( ) . strong . fetch_add ( 1 , Relaxed ) ;
318+
319+ // However we need to guard against massive refcounts in case someone
320+ // is `mem::forget`ing Arcs. If we don't do this the count can overflow
321+ // and users will use-after free. We racily saturate to `isize::MAX` on
322+ // the assumption that there aren't ~2 billion threads incrementing
323+ // the reference count at once. This branch will never be taken in
324+ // any realistic program.
325+ //
326+ // We abort because such a program is incredibly degenerate, and we
327+ // don't care to support it.
328+ if old_size > MAX_REFCOUNT {
329+ unsafe { abort ( ) ; }
330+ }
331+
316332 Arc { _ptr : self . _ptr }
317333 }
318334}
@@ -617,7 +633,13 @@ impl<T: ?Sized> Clone for Weak<T> {
617633 // fetch_add (ignoring the lock) because the weak count is only locked
618634 // where are *no other* weak pointers in existence. (So we can't be
619635 // running this code in that case).
620- self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
636+ let old_size = self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
637+
638+ // See comments in Arc::clone() for why we do this (for mem::forget).
639+ if old_size > MAX_REFCOUNT {
640+ unsafe { abort ( ) ; }
641+ }
642+
621643 return Weak { _ptr : self . _ptr }
622644 }
623645}
0 commit comments