Skip to content

Commit 156cfda

Browse files
authored
Merge pull request #381 from rust-osdev/gdt_atomic
Allow GDT to be loaded with shared reference
2 parents e70b8a3 + 8bb01e8 commit 156cfda

File tree

3 files changed

+50
-73
lines changed

3 files changed

+50
-73
lines changed

src/lib.rs

-60
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,6 @@
1111
#![deny(missing_debug_implementations)]
1212
#![deny(unsafe_op_in_unsafe_fn)]
1313

14-
use core::cell::UnsafeCell;
15-
use core::sync::atomic::{AtomicBool, Ordering};
16-
1714
pub use crate::addr::{align_down, align_up, PhysAddr, VirtAddr};
1815

1916
pub mod addr;
@@ -66,60 +63,3 @@ impl PrivilegeLevel {
6663
}
6764
}
6865
}
69-
70-
/// A wrapper that can be used to safely create one mutable reference `&'static mut T` from a static variable.
71-
///
72-
/// `SingleUseCell` is safe because it ensures that it only ever gives out one reference.
73-
///
74-
/// ``SingleUseCell<T>` is a safe alternative to `static mut` or a static `UnsafeCell<T>`.
75-
#[derive(Debug)]
76-
pub struct SingleUseCell<T> {
77-
used: AtomicBool,
78-
value: UnsafeCell<T>,
79-
}
80-
81-
impl<T> SingleUseCell<T> {
82-
/// Construct a new SingleUseCell.
83-
pub const fn new(value: T) -> Self {
84-
Self {
85-
used: AtomicBool::new(false),
86-
value: UnsafeCell::new(value),
87-
}
88-
}
89-
90-
/// Try to acquire a mutable reference to the wrapped value.
91-
/// This will only succeed the first time the function is
92-
/// called and fail on all following calls.
93-
///
94-
/// ```
95-
/// use x86_64::SingleUseCell;
96-
///
97-
/// static FOO: SingleUseCell<i32> = SingleUseCell::new(0);
98-
///
99-
/// // Call `try_get_mut` for the first time and get a reference.
100-
/// let first: &'static mut i32 = FOO.try_get_mut().unwrap();
101-
/// assert_eq!(first, &0);
102-
///
103-
/// // Calling `try_get_mut` again will return `None`.
104-
/// assert_eq!(FOO.try_get_mut(), None);
105-
/// ```
106-
pub fn try_get_mut(&self) -> Option<&mut T> {
107-
let already_used = self.used.swap(true, Ordering::AcqRel);
108-
if already_used {
109-
None
110-
} else {
111-
Some(unsafe {
112-
// SAFETY: no reference has been given out yet and we won't give out another.
113-
&mut *self.value.get()
114-
})
115-
}
116-
}
117-
}
118-
119-
// SAFETY: Sharing a `SingleUseCell<T>` between threads is safe regardless of whether `T` is `Sync`
120-
// because we only expose the inner value once to one thread. The `T: Send` bound makes sure that
121-
// sending a unique reference to another thread is safe.
122-
unsafe impl<T: Send> Sync for SingleUseCell<T> {}
123-
124-
// SAFETY: It's safe to send a `SingleUseCell<T>` to another thread if it's safe to send `T`.
125-
unsafe impl<T: Send> Send for SingleUseCell<T> {}

src/structures/gdt.rs

+33-5
Original file line numberDiff line numberDiff line change
@@ -10,30 +10,55 @@ use core::fmt;
1010
#[cfg(doc)]
1111
use crate::registers::segmentation::{Segment, CS, SS};
1212

13+
#[cfg(feature = "instructions")]
14+
use core::sync::atomic::{AtomicU64 as EntryValue, Ordering};
15+
#[cfg(not(feature = "instructions"))]
16+
use u64 as EntryValue;
17+
1318
/// 8-byte entry in a descriptor table.
1419
///
1520
/// A [`GlobalDescriptorTable`] (or LDT) is an array of these entries, and
1621
/// [`SegmentSelector`]s index into this array. Each [`Descriptor`] in the table
1722
/// uses either 1 Entry (if it is a [`UserSegment`](Descriptor::UserSegment)) or
1823
/// 2 Entries (if it is a [`SystemSegment`](Descriptor::SystemSegment)). This
1924
/// type exists to give users access to the raw entry bits in a GDT.
20-
#[derive(Clone, PartialEq, Eq)]
2125
#[repr(transparent)]
22-
pub struct Entry(u64);
26+
pub struct Entry(EntryValue);
2327

2428
impl Entry {
2529
// Create a new Entry from a raw value.
2630
const fn new(raw: u64) -> Self {
31+
#[cfg(feature = "instructions")]
32+
let raw = EntryValue::new(raw);
2733
Self(raw)
2834
}
2935

3036
/// The raw bits for this entry. Depending on the [`Descriptor`] type, these
3137
/// bits may correspond to those in [`DescriptorFlags`].
3238
pub fn raw(&self) -> u64 {
33-
self.0
39+
// TODO: Make this const fn when AtomicU64::load is const.
40+
#[cfg(feature = "instructions")]
41+
let raw = self.0.load(Ordering::SeqCst);
42+
#[cfg(not(feature = "instructions"))]
43+
let raw = self.0;
44+
raw
3445
}
3546
}
3647

48+
impl Clone for Entry {
49+
fn clone(&self) -> Self {
50+
Self::new(self.raw())
51+
}
52+
}
53+
54+
impl PartialEq for Entry {
55+
fn eq(&self, other: &Self) -> bool {
56+
self.raw() == other.raw()
57+
}
58+
}
59+
60+
impl Eq for Entry {}
61+
3762
impl fmt::Debug for Entry {
3863
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3964
// Display inner value as hex
@@ -99,6 +124,9 @@ impl<const MAX: usize> GlobalDescriptorTable<MAX> {
99124
// TODO: Replace with compiler error when feature(generic_const_exprs) is stable.
100125
assert!(MAX > 0, "A GDT cannot have 0 entries");
101126
assert!(MAX <= (1 << 13), "A GDT can only have at most 2^13 entries");
127+
128+
// TODO: Replace with inline_const when it's stable.
129+
#[allow(clippy::declare_interior_mutable_const)]
102130
const NULL: Entry = Entry::new(0);
103131
Self {
104132
table: [NULL; MAX],
@@ -195,7 +223,7 @@ impl<const MAX: usize> GlobalDescriptorTable<MAX> {
195223
/// [`SS::set_reg()`] and [`CS::set_reg()`].
196224
#[cfg(feature = "instructions")]
197225
#[inline]
198-
pub fn load(&'static mut self) {
226+
pub fn load(&'static self) {
199227
// SAFETY: static lifetime ensures no modification after loading.
200228
unsafe { self.load_unsafe() };
201229
}
@@ -213,7 +241,7 @@ impl<const MAX: usize> GlobalDescriptorTable<MAX> {
213241
///
214242
#[cfg(feature = "instructions")]
215243
#[inline]
216-
pub unsafe fn load_unsafe(&mut self) {
244+
pub unsafe fn load_unsafe(&self) {
217245
use crate::instructions::tables::lgdt;
218246
unsafe {
219247
lgdt(&self.pointer());

testing/src/gdt.rs

+17-8
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use lazy_static::lazy_static;
22
use x86_64::structures::gdt::{Descriptor, GlobalDescriptorTable, SegmentSelector};
33
use x86_64::structures::tss::TaskStateSegment;
4-
use x86_64::{SingleUseCell, VirtAddr};
4+
use x86_64::VirtAddr;
55

66
pub const DOUBLE_FAULT_IST_INDEX: u16 = 0;
77

@@ -18,12 +18,14 @@ lazy_static! {
1818
};
1919
tss
2020
};
21-
static ref GDT: (SingleUseCell<GlobalDescriptorTable>, Selectors) = {
21+
static ref GDT: (GlobalDescriptorTable, Selectors) = {
2222
let mut gdt = GlobalDescriptorTable::new();
23+
// Add an unused segment so we get a different value for CS
24+
gdt.append(Descriptor::kernel_data_segment());
2325
let code_selector = gdt.append(Descriptor::kernel_code_segment());
2426
let tss_selector = gdt.append(Descriptor::tss_segment(&TSS));
2527
(
26-
SingleUseCell::new(gdt),
28+
gdt,
2729
Selectors {
2830
code_selector,
2931
tss_selector,
@@ -41,9 +43,16 @@ pub fn init() {
4143
use x86_64::instructions::segmentation::{Segment, CS};
4244
use x86_64::instructions::tables::load_tss;
4345

44-
GDT.0.try_get_mut().unwrap().load();
45-
unsafe {
46-
CS::set_reg(GDT.1.code_selector);
47-
load_tss(GDT.1.tss_selector);
48-
}
46+
// Make sure loading CS actually changes the value
47+
GDT.0.load();
48+
assert_ne!(CS::get_reg(), GDT.1.code_selector);
49+
unsafe { CS::set_reg(GDT.1.code_selector) };
50+
assert_eq!(CS::get_reg(), GDT.1.code_selector);
51+
52+
// Loading the TSS should mark the GDT entry as busy
53+
let tss_idx: usize = GDT.1.tss_selector.index().into();
54+
let old_tss_entry = GDT.0.entries()[tss_idx].clone();
55+
unsafe { load_tss(GDT.1.tss_selector) };
56+
let new_tss_entry = GDT.0.entries()[tss_idx].clone();
57+
assert_ne!(old_tss_entry, new_tss_entry);
4958
}

0 commit comments

Comments
 (0)