@@ -78,16 +78,18 @@ use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
78
78
use core:: fmt;
79
79
use core:: cmp:: Ordering ;
80
80
use core:: mem:: { align_of_val, size_of_val} ;
81
- use core:: intrinsics:: drop_in_place;
81
+ use core:: intrinsics:: { drop_in_place, abort } ;
82
82
use core:: mem;
83
83
use core:: nonzero:: NonZero ;
84
84
use core:: ops:: { Deref , CoerceUnsized } ;
85
85
use core:: ptr;
86
86
use core:: marker:: Unsize ;
87
87
use core:: hash:: { Hash , Hasher } ;
88
- use core:: usize;
88
+ use core:: { usize, isize } ;
89
89
use heap:: deallocate;
90
90
91
+ const MAX_REFCOUNT : usize = ( isize:: MAX ) as usize ;
92
+
91
93
/// An atomically reference counted wrapper for shared state.
92
94
///
93
95
/// # Examples
@@ -312,7 +314,21 @@ impl<T: ?Sized> Clone for Arc<T> {
312
314
// another must already provide any required synchronization.
313
315
//
314
316
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
315
- self . inner ( ) . strong . fetch_add ( 1 , Relaxed ) ;
317
+ let old_size = self . inner ( ) . strong . fetch_add ( 1 , Relaxed ) ;
318
+
319
+ // However we need to guard against massive refcounts in case someone
320
+ // is `mem::forget`ing Arcs. If we don't do this the count can overflow
321
+ // and users will use-after free. We racily saturate to `isize::MAX` on
322
+ // the assumption that there aren't ~2 billion threads incrementing
323
+ // the reference count at once. This branch will never be taken in
324
+ // any realistic program.
325
+ //
326
+ // We abort because such a program is incredibly degenerate, and we
327
+ // don't care to support it.
328
+ if old_size > MAX_REFCOUNT {
329
+ unsafe { abort ( ) ; }
330
+ }
331
+
316
332
Arc { _ptr : self . _ptr }
317
333
}
318
334
}
@@ -617,7 +633,13 @@ impl<T: ?Sized> Clone for Weak<T> {
617
633
// fetch_add (ignoring the lock) because the weak count is only locked
618
634
// where are *no other* weak pointers in existence. (So we can't be
619
635
// running this code in that case).
620
- self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
636
+ let old_size = self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
637
+
638
+ // See comments in Arc::clone() for why we do this (for mem::forget).
639
+ if old_size > MAX_REFCOUNT {
640
+ unsafe { abort ( ) ; }
641
+ }
642
+
621
643
return Weak { _ptr : self . _ptr }
622
644
}
623
645
}
0 commit comments