Skip to content

Commit b7b5988

Browse files
committed
Refactor ChunkState to encode space index
1 parent 5f48a8a commit b7b5988

File tree

6 files changed

+103
-24
lines changed

6 files changed

+103
-24
lines changed

src/policy/immix/immixspace.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -524,7 +524,7 @@ impl<VM: VMBinding> ImmixSpace<VM> {
524524
self.defrag.notify_new_clean_block(copy);
525525
let block = Block::from_aligned_address(block_address);
526526
block.init(copy);
527-
self.chunk_map.set(block.chunk(), ChunkState::Allocated);
527+
self.chunk_map.set(block.chunk(), ChunkState::allocated(self.common().descriptor.get_index()));
528528
self.lines_consumed
529529
.fetch_add(Block::LINES, Ordering::SeqCst);
530530
Some(block)
@@ -899,7 +899,7 @@ struct SweepChunk<VM: VMBinding> {
899899

900900
impl<VM: VMBinding> GCWork<VM> for SweepChunk<VM> {
901901
fn do_work(&mut self, _worker: &mut GCWorker<VM>, mmtk: &'static MMTK<VM>) {
902-
assert_eq!(self.space.chunk_map.get(self.chunk), ChunkState::Allocated);
902+
assert!(self.space.chunk_map.get(self.chunk).is_allocated());
903903

904904
let mut histogram = self.space.defrag.new_histogram();
905905
let line_mark_state = if super::BLOCK_ONLY {
@@ -950,7 +950,7 @@ impl<VM: VMBinding> GCWork<VM> for SweepChunk<VM> {
950950
probe!(mmtk, sweep_chunk, allocated_blocks);
951951
// Set this chunk as free if there is not live blocks.
952952
if allocated_blocks == 0 {
953-
self.space.chunk_map.set(self.chunk, ChunkState::Free)
953+
self.space.chunk_map.set(self.chunk, ChunkState::free())
954954
}
955955
self.space.defrag.add_completed_mark_histogram(histogram);
956956
self.epilogue.finish_one_work_packet();

src/policy/marksweepspace/native_ms/global.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -402,7 +402,7 @@ impl<VM: VMBinding> MarkSweepSpace<VM> {
402402

403403
pub fn record_new_block(&self, block: Block) {
404404
block.init();
405-
self.chunk_map.set(block.chunk(), ChunkState::Allocated);
405+
self.chunk_map.set(block.chunk(), ChunkState::allocated(self.common.descriptor.get_index()));
406406
}
407407

408408
pub fn prepare(&mut self, full_heap: bool) {
@@ -567,7 +567,7 @@ struct PrepareChunkMap<VM: VMBinding> {
567567

568568
impl<VM: VMBinding> GCWork<VM> for PrepareChunkMap<VM> {
569569
fn do_work(&mut self, _worker: &mut GCWorker<VM>, _mmtk: &'static MMTK<VM>) {
570-
debug_assert!(self.space.chunk_map.get(self.chunk) == ChunkState::Allocated);
570+
debug_assert!(self.space.chunk_map.get(self.chunk).is_allocated());
571571
// number of allocated blocks.
572572
let mut n_occupied_blocks = 0;
573573
self.chunk
@@ -581,7 +581,7 @@ impl<VM: VMBinding> GCWork<VM> for PrepareChunkMap<VM> {
581581
});
582582
if n_occupied_blocks == 0 {
583583
// Set this chunk as free if there is no live blocks.
584-
self.space.chunk_map.set(self.chunk, ChunkState::Free)
584+
self.space.chunk_map.set(self.chunk, ChunkState::free())
585585
} else {
586586
// Otherwise this chunk is occupied, and we reset the mark bit if it is on the side.
587587
if let MetadataSpec::OnSide(side) = *VM::VMObjectModel::LOCAL_MARK_BIT_SPEC {
@@ -617,7 +617,7 @@ struct SweepChunk<VM: VMBinding> {
617617

618618
impl<VM: VMBinding> GCWork<VM> for SweepChunk<VM> {
619619
fn do_work(&mut self, _worker: &mut GCWorker<VM>, _mmtk: &'static MMTK<VM>) {
620-
assert_eq!(self.space.chunk_map.get(self.chunk), ChunkState::Allocated);
620+
assert!(self.space.chunk_map.get(self.chunk).is_allocated());
621621

622622
// number of allocated blocks.
623623
let mut allocated_blocks = 0;
@@ -636,7 +636,7 @@ impl<VM: VMBinding> GCWork<VM> for SweepChunk<VM> {
636636
probe!(mmtk, sweep_chunk, allocated_blocks);
637637
// Set this chunk as free if there is not live blocks.
638638
if allocated_blocks == 0 {
639-
self.space.chunk_map.set(self.chunk, ChunkState::Free)
639+
self.space.chunk_map.set(self.chunk, ChunkState::free());
640640
}
641641
self.epilogue.finish_one_work_packet();
642642
}

src/util/heap/chunk_map.rs

+49-15
Original file line numberDiff line numberDiff line change
@@ -45,13 +45,44 @@ impl Chunk {
4545
}
4646

4747
/// Chunk allocation state
48-
#[repr(u8)]
49-
#[derive(Debug, PartialEq, Clone, Copy)]
50-
pub enum ChunkState {
51-
/// The chunk is not allocated.
52-
Free = 0,
53-
/// The chunk is allocated.
54-
Allocated = 1,
48+
/// Highest bit: 0 = free, 1 = allocated
49+
/// Lower 4 bits: Space index (0-15)
50+
#[repr(transparent)]
51+
#[derive(PartialEq, Clone, Copy)]
52+
pub struct ChunkState(u8);
53+
54+
impl ChunkState {
55+
pub fn allocated(space_index: usize) -> ChunkState {
56+
debug_assert!(space_index < crate::util::heap::layout::heap_parameters::MAX_SPACES);
57+
let mut encode = space_index as u8;
58+
encode |= 0x80;
59+
ChunkState(encode)
60+
}
61+
pub fn free() -> ChunkState {
62+
ChunkState(0)
63+
}
64+
pub fn is_free(&self) -> bool {
65+
self.0 & 0x80 == 0
66+
}
67+
pub fn is_allocated(&self) -> bool {
68+
!self.is_free()
69+
}
70+
pub fn get_space_index(&self) -> usize {
71+
debug_assert!(self.is_allocated());
72+
let index = (self.0 & 0x0F) as usize;
73+
debug_assert!(index < crate::util::heap::layout::heap_parameters::MAX_SPACES);
74+
index
75+
}
76+
}
77+
78+
impl std::fmt::Debug for ChunkState {
79+
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
80+
if self.is_free() {
81+
write!(f, "Free")
82+
} else {
83+
write!(f, "Allocated in space {}", self.get_space_index())
84+
}
85+
}
5586
}
5687

5788
/// A byte-map to record all the allocated chunks.
@@ -78,10 +109,17 @@ impl ChunkMap {
78109
if self.get(chunk) == state {
79110
return;
80111
}
112+
#[cfg(debug_assertions)]
113+
{
114+
let old_state = self.get(chunk);
115+
if state.is_allocated() {
116+
assert!(old_state.is_free() || old_state.get_space_index() == state.get_space_index(), "Chunk {:?}: old state {:?}, new state {:?}. Cannot set to new state.", chunk, old_state, state);
117+
}
118+
}
81119
// Update alloc byte
82-
unsafe { Self::ALLOC_TABLE.store::<u8>(chunk.start(), state as u8) };
120+
unsafe { Self::ALLOC_TABLE.store::<u8>(chunk.start(), state.0) };
83121
// If this is a newly allcoated chunk, then expand the chunk range.
84-
if state == ChunkState::Allocated {
122+
if state.is_allocated() {
85123
debug_assert!(!chunk.start().is_zero());
86124
let mut range = self.chunk_range.lock();
87125
if range.start == Chunk::ZERO {
@@ -99,11 +137,7 @@ impl ChunkMap {
99137
/// Get chunk state
100138
pub fn get(&self, chunk: Chunk) -> ChunkState {
101139
let byte = unsafe { Self::ALLOC_TABLE.load::<u8>(chunk.start()) };
102-
match byte {
103-
0 => ChunkState::Free,
104-
1 => ChunkState::Allocated,
105-
_ => unreachable!(),
106-
}
140+
ChunkState(byte)
107141
}
108142

109143
/// A range of all chunks in the heap.
@@ -120,7 +154,7 @@ impl ChunkMap {
120154
let mut work_packets: Vec<Box<dyn GCWork<VM>>> = vec![];
121155
for chunk in self
122156
.all_chunks()
123-
.filter(|c| self.get(*c) == ChunkState::Allocated)
157+
.filter(|c| self.get(*c).is_allocated())
124158
{
125159
work_packets.push(func(chunk));
126160
}

src/util/object_enum.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ pub(crate) fn enumerate_blocks_from_chunk_map<B>(
8585
B: BlockMayHaveObjects,
8686
{
8787
for chunk in chunk_map.all_chunks() {
88-
if chunk_map.get(chunk) == ChunkState::Allocated {
88+
if chunk_map.get(chunk).is_allocated() {
8989
for block in chunk.iter_region::<B>() {
9090
if block.may_have_objects() {
9191
enumerator.visit_address_range(block.start(), block.end());
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
// GITHUB-CI: MMTK_PLAN=all
2+
3+
use lazy_static::lazy_static;
4+
5+
use super::mock_test_prelude::*;
6+
use crate::plan::AllocationSemantics;
7+
8+
#[test]
9+
pub fn allocate_alignment() {
10+
with_mockvm(
11+
|| -> MockVM {
12+
MockVM {
13+
is_collection_enabled: MockMethod::new_fixed(Box::new(|_| false)),
14+
..MockVM::default()
15+
}
16+
},
17+
|| {
18+
// 1MB heap
19+
const MB: usize = 1024 * 1024;
20+
let mut fixture = MutatorFixture::create_with_heapsize(MB);
21+
22+
// Normal alloc
23+
let addr = memory_manager::alloc(
24+
&mut fixture.mutator,
25+
16,
26+
8,
27+
0,
28+
AllocationSemantics::Default,
29+
);
30+
assert!(!addr.is_zero());
31+
32+
// Non moving alloc
33+
let addr = memory_manager::alloc(
34+
&mut fixture.mutator,
35+
16,
36+
8,
37+
0,
38+
AllocationSemantics::NonMoving,
39+
);
40+
assert!(!addr.is_zero());
41+
},
42+
no_cleanup,
43+
)
44+
}

src/vm/tests/mock_tests/mod.rs

+1
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ pub(crate) mod mock_test_prelude {
2424
}
2525

2626
mod mock_test_allocate_align_offset;
27+
mod mock_test_allocate_nonmoving;
2728
mod mock_test_allocate_with_disable_collection;
2829
mod mock_test_allocate_with_initialize_collection;
2930
mod mock_test_allocate_with_re_enable_collection;

0 commit comments

Comments
 (0)