@@ -78,16 +78,18 @@ use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
78
78
use core:: fmt;
79
79
use core:: cmp:: Ordering ;
80
80
use core:: mem:: { align_of_val, size_of_val} ;
81
- use core:: intrinsics:: drop_in_place;
81
+ use core:: intrinsics:: { drop_in_place, abort } ;
82
82
use core:: mem;
83
83
use core:: nonzero:: NonZero ;
84
84
use core:: ops:: { Deref , CoerceUnsized } ;
85
85
use core:: ptr;
86
86
use core:: marker:: Unsize ;
87
87
use core:: hash:: { Hash , Hasher } ;
88
- use core:: usize;
88
+ use core:: { usize, isize } ;
89
89
use heap:: deallocate;
90
90
91
+ const MAX_REFCOUNT : usize = ( isize:: MAX ) as usize ;
92
+
91
93
/// An atomically reference counted wrapper for shared state.
92
94
///
93
95
/// # Examples
@@ -311,7 +313,21 @@ impl<T: ?Sized> Clone for Arc<T> {
311
313
// another must already provide any required synchronization.
312
314
//
313
315
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
314
- self . inner ( ) . strong . fetch_add ( 1 , Relaxed ) ;
316
+ let old_size = self . inner ( ) . strong . fetch_add ( 1 , Relaxed ) ;
317
+
318
+ // However we need to guard against massive refcounts in case someone
319
+ // is `mem::forget`ing Arcs. If we don't do this the count can overflow
320
+ // and users will use-after free. We racily saturate to `isize::MAX` on
321
+ // the assumption that there aren't ~2 billion threads incrementing
322
+ // the reference count at once. This branch will never be taken in
323
+ // any realistic program.
324
+ //
325
+ // We abort because such a program is incredibly degenerate, and we
326
+ // don't care to support it.
327
+ if old_size > MAX_REFCOUNT {
328
+ unsafe { abort ( ) ; }
329
+ }
330
+
315
331
Arc { _ptr : self . _ptr }
316
332
}
317
333
}
@@ -612,7 +628,13 @@ impl<T: ?Sized> Clone for Weak<T> {
612
628
// fetch_add (ignoring the lock) because the weak count is only locked
613
629
// where are *no other* weak pointers in existence. (So we can't be
614
630
// running this code in that case).
615
- self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
631
+ let old_size = self . inner ( ) . weak . fetch_add ( 1 , Relaxed ) ;
632
+
633
+ // See comments in Arc::clone() for why we do this (for mem::forget).
634
+ if old_size > MAX_REFCOUNT {
635
+ unsafe { abort ( ) ; }
636
+ }
637
+
616
638
return Weak { _ptr : self . _ptr }
617
639
}
618
640
}
0 commit comments