@@ -203,7 +203,6 @@ impl AtomicBool {
203
203
/// # Examples
204
204
///
205
205
/// ```
206
- /// #![feature(atomic_access)]
207
206
/// use std::sync::atomic::{AtomicBool, Ordering};
208
207
///
209
208
/// let mut some_bool = AtomicBool::new(true);
@@ -212,7 +211,7 @@ impl AtomicBool {
212
211
/// assert_eq!(some_bool.load(Ordering::SeqCst), false);
213
212
/// ```
214
213
#[ inline]
215
- #[ unstable ( feature = "atomic_access" , issue = "35603 " ) ]
214
+ #[ stable ( feature = "atomic_access" , since = "1.15.0 " ) ]
216
215
pub fn get_mut ( & mut self ) -> & mut bool {
217
216
unsafe { & mut * ( self . v . get ( ) as * mut bool ) }
218
217
}
@@ -225,14 +224,13 @@ impl AtomicBool {
225
224
/// # Examples
226
225
///
227
226
/// ```
228
- /// #![feature(atomic_access)]
229
227
/// use std::sync::atomic::AtomicBool;
230
228
///
231
229
/// let some_bool = AtomicBool::new(true);
232
230
/// assert_eq!(some_bool.into_inner(), true);
233
231
/// ```
234
232
#[ inline]
235
- #[ unstable ( feature = "atomic_access" , issue = "35603 " ) ]
233
+ #[ stable ( feature = "atomic_access" , since = "1.15.0 " ) ]
236
234
pub fn into_inner ( self ) -> bool {
237
235
unsafe { self . v . into_inner ( ) != 0 }
238
236
}
@@ -588,15 +586,14 @@ impl<T> AtomicPtr<T> {
588
586
/// # Examples
589
587
///
590
588
/// ```
591
- /// #![feature(atomic_access)]
592
589
/// use std::sync::atomic::{AtomicPtr, Ordering};
593
590
///
594
591
/// let mut atomic_ptr = AtomicPtr::new(&mut 10);
595
592
/// *atomic_ptr.get_mut() = &mut 5;
596
593
/// assert_eq!(unsafe { *atomic_ptr.load(Ordering::SeqCst) }, 5);
597
594
/// ```
598
595
#[ inline]
599
- #[ unstable ( feature = "atomic_access" , issue = "35603 " ) ]
596
+ #[ stable ( feature = "atomic_access" , since = "1.15.0 " ) ]
600
597
pub fn get_mut ( & mut self ) -> & mut * mut T {
601
598
unsafe { & mut * self . p . get ( ) }
602
599
}
@@ -609,14 +606,13 @@ impl<T> AtomicPtr<T> {
609
606
/// # Examples
610
607
///
611
608
/// ```
612
- /// #![feature(atomic_access)]
613
609
/// use std::sync::atomic::AtomicPtr;
614
610
///
615
611
/// let atomic_ptr = AtomicPtr::new(&mut 5);
616
612
/// assert_eq!(unsafe { *atomic_ptr.into_inner() }, 5);
617
613
/// ```
618
614
#[ inline]
619
- #[ unstable ( feature = "atomic_access" , issue = "35603 " ) ]
615
+ #[ stable ( feature = "atomic_access" , since = "1.15.0 " ) ]
620
616
pub fn into_inner ( self ) -> * mut T {
621
617
unsafe { self . p . into_inner ( ) }
622
618
}
@@ -883,7 +879,6 @@ macro_rules! atomic_int {
883
879
/// # Examples
884
880
///
885
881
/// ```
886
- /// #![feature(atomic_access)]
887
882
/// use std::sync::atomic::{AtomicIsize, Ordering};
888
883
///
889
884
/// let mut some_isize = AtomicIsize::new(10);
@@ -905,7 +900,6 @@ macro_rules! atomic_int {
905
900
/// # Examples
906
901
///
907
902
/// ```
908
- /// #![feature(atomic_access)]
909
903
/// use std::sync::atomic::AtomicIsize;
910
904
///
911
905
/// let some_isize = AtomicIsize::new(5);
@@ -1261,15 +1255,15 @@ atomic_int!{
1261
1255
stable( feature = "rust1" , since = "1.0.0" ) ,
1262
1256
stable( feature = "extended_compare_and_swap" , since = "1.10.0" ) ,
1263
1257
stable( feature = "atomic_debug" , since = "1.3.0" ) ,
1264
- unstable ( feature = "atomic_access" , issue = "35603 " ) ,
1258
+ stable ( feature = "atomic_access" , since = "1.15.0 " ) ,
1265
1259
isize AtomicIsize ATOMIC_ISIZE_INIT
1266
1260
}
1267
1261
#[ cfg( target_has_atomic = "ptr" ) ]
1268
1262
atomic_int ! {
1269
1263
stable( feature = "rust1" , since = "1.0.0" ) ,
1270
1264
stable( feature = "extended_compare_and_swap" , since = "1.10.0" ) ,
1271
1265
stable( feature = "atomic_debug" , since = "1.3.0" ) ,
1272
- unstable ( feature = "atomic_access" , issue = "35603 " ) ,
1266
+ stable ( feature = "atomic_access" , since = "1.15.0 " ) ,
1273
1267
usize AtomicUsize ATOMIC_USIZE_INIT
1274
1268
}
1275
1269
0 commit comments