Skip to content

Commit 565bc48

Browse files
rmacnak-googleCommit Queue
authored and
Commit Queue
committed
[vm, gc] Use FreeListElement as the filler object for array truncation.
This is consistent with new-space sweeping using FreeListElement as the filler object, and allows for stronger asserts in incremental compaction. TEST=ci Change-Id: I43a87a46fb1211a88589cf4a349ada7b556e4c11 Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/366885 Reviewed-by: Alexander Aprelev <[email protected]> Commit-Queue: Ryan Macnak <[email protected]>
1 parent 8b53d26 commit 565bc48

File tree

2 files changed

+28
-90
lines changed

2 files changed

+28
-90
lines changed

runtime/vm/object.cc

Lines changed: 13 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -1605,8 +1605,8 @@ void Object::set_vm_isolate_snapshot_object_table(const Array& table) {
16051605

16061606
// Make unused space in an object whose type has been transformed safe
16071607
// for traversing during GC.
1608-
// The unused part of the transformed object is marked as an TypedDataInt8Array
1609-
// object.
1608+
// The unused part of the transformed object is marked as a FreeListElement
1609+
// object that is not inserted into to the freelist.
16101610
void Object::MakeUnusedSpaceTraversable(const Object& obj,
16111611
intptr_t original_size,
16121612
intptr_t used_size) {
@@ -1615,62 +1615,19 @@ void Object::MakeUnusedSpaceTraversable(const Object& obj,
16151615
ASSERT(original_size >= used_size);
16161616
if (original_size > used_size) {
16171617
intptr_t leftover_size = original_size - used_size;
1618-
16191618
uword addr = UntaggedObject::ToAddr(obj.ptr()) + used_size;
1620-
if (leftover_size >= TypedData::InstanceSize(0)) {
1621-
// Update the leftover space as a TypedDataInt8Array object.
1622-
TypedDataPtr raw =
1623-
static_cast<TypedDataPtr>(UntaggedObject::FromAddr(addr));
1624-
uword new_tags =
1625-
UntaggedObject::ClassIdTag::update(kTypedDataInt8ArrayCid, 0);
1626-
new_tags = UntaggedObject::SizeTag::update(leftover_size, new_tags);
1627-
const bool is_old = obj.ptr()->IsOldObject();
1628-
new_tags = UntaggedObject::AlwaysSetBit::update(true, new_tags);
1629-
new_tags = UntaggedObject::NotMarkedBit::update(true, new_tags);
1630-
new_tags =
1631-
UntaggedObject::OldAndNotRememberedBit::update(is_old, new_tags);
1632-
new_tags = UntaggedObject::NewBit::update(!is_old, new_tags);
1633-
// On architectures with a relaxed memory model, the concurrent marker may
1634-
// observe the write of the filler object's header before observing the
1635-
// new array length, and so treat it as a pointer. Ensure it is a Smi so
1636-
// the marker won't dereference it.
1637-
ASSERT((new_tags & kSmiTagMask) == kSmiTag);
1638-
1639-
intptr_t leftover_len = (leftover_size - TypedData::InstanceSize(0));
1640-
ASSERT(TypedData::InstanceSize(leftover_len) == leftover_size);
1641-
raw->untag()->set_length<std::memory_order_release>(
1642-
Smi::New(leftover_len));
1643-
raw->untag()->tags_ = new_tags;
1644-
raw->untag()->RecomputeDataField();
1619+
if (obj.ptr()->IsNewObject()) {
1620+
FreeListElement::AsElementNew(addr, leftover_size);
16451621
} else {
1646-
// Update the leftover space as a basic object.
1647-
ASSERT(leftover_size == Object::InstanceSize());
1648-
ObjectPtr raw = static_cast<ObjectPtr>(UntaggedObject::FromAddr(addr));
1649-
uword new_tags = UntaggedObject::ClassIdTag::update(kInstanceCid, 0);
1650-
new_tags = UntaggedObject::SizeTag::update(leftover_size, new_tags);
1651-
const bool is_old = obj.ptr()->IsOldObject();
1652-
new_tags = UntaggedObject::AlwaysSetBit::update(true, new_tags);
1653-
new_tags = UntaggedObject::NotMarkedBit::update(true, new_tags);
1654-
new_tags =
1655-
UntaggedObject::OldAndNotRememberedBit::update(is_old, new_tags);
1656-
new_tags = UntaggedObject::NewBit::update(!is_old, new_tags);
1657-
// On architectures with a relaxed memory model, the concurrent marker may
1658-
// observe the write of the filler object's header before observing the
1659-
// new array length, and so treat it as a pointer. Ensure it is a Smi so
1660-
// the marker won't dereference it.
1661-
ASSERT((new_tags & kSmiTagMask) == kSmiTag);
1662-
1663-
// The array might have an uninitialized alignment gap since the visitors
1664-
// for Arrays are precise based on element count, but the visitors for
1665-
// Instance are based on the size rounded to the allocation unit, so we
1666-
// need to ensure the alignment gap is initialized.
1667-
for (intptr_t offset = Instance::UnroundedSize();
1668-
offset < Instance::InstanceSize(); offset += sizeof(uword)) {
1669-
reinterpret_cast<std::atomic<uword>*>(addr + offset)
1670-
->store(0, std::memory_order_release);
1671-
}
1672-
raw->untag()->tags_ = new_tags;
1673-
}
1622+
FreeListElement::AsElement(addr, leftover_size);
1623+
}
1624+
// On architectures with a relaxed memory model, the concurrent marker may
1625+
// observe the write of the filler object's header before observing the
1626+
// new array length, and so treat it as a pointer. Ensure it is a Smi so
1627+
// the marker won't dereference it.
1628+
ASSERT((*reinterpret_cast<uword*>(addr) & kSmiTagMask) == kSmiTag);
1629+
ASSERT((*reinterpret_cast<uword*>(addr + kWordSize) & kSmiTagMask) ==
1630+
kSmiTag);
16741631
}
16751632
}
16761633

runtime/vm/object_test.cc

Lines changed: 15 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -2095,9 +2095,8 @@ ISOLATE_UNIT_TEST_CASE(GrowableObjectArray) {
20952095

20962096
// Test the MakeFixedLength functionality to make sure the resulting array
20972097
// object is properly setup.
2098-
// 1. Should produce an array of length 2 and a left over int8 array.
2098+
// 1. Should produce an array of length 2 and a filler of minimal size.
20992099
Array& new_array = Array::Handle();
2100-
TypedData& left_over_array = TypedData::Handle();
21012100
Object& obj = Object::Handle();
21022101
uword addr = 0;
21032102
intptr_t used_size = 0;
@@ -2117,19 +2116,12 @@ ISOLATE_UNIT_TEST_CASE(GrowableObjectArray) {
21172116
new_array ^= obj.ptr();
21182117
EXPECT_EQ(2, new_array.Length());
21192118
addr += used_size;
2120-
obj = UntaggedObject::FromAddr(addr);
2121-
#if defined(DART_COMPRESSED_POINTERS)
2122-
// In compressed pointer mode, the TypedData doesn't fit.
2123-
EXPECT(obj.IsInstance());
2124-
#else
2125-
EXPECT(obj.IsTypedData());
2126-
left_over_array ^= obj.ptr();
2127-
EXPECT_EQ(4 * kWordSize - TypedData::InstanceSize(0),
2128-
left_over_array.Length());
2129-
#endif
2119+
ObjectPtr filler = UntaggedObject::FromAddr(addr);
2120+
EXPECT(filler->IsFreeListElement());
2121+
EXPECT_EQ(filler->untag()->HeapSize(),
2122+
Array::InstanceSize(kArrayLen + 1) - used_size);
21302123

2131-
// 2. Should produce an array of length 3 and a left over int8 array or
2132-
// instance.
2124+
// 2. Should produce an array of length 3 and a filler object.
21332125
array = GrowableObjectArray::New(kArrayLen);
21342126
EXPECT_EQ(kArrayLen, array.Capacity());
21352127
EXPECT_EQ(0, array.Length());
@@ -2145,17 +2137,12 @@ ISOLATE_UNIT_TEST_CASE(GrowableObjectArray) {
21452137
new_array ^= obj.ptr();
21462138
EXPECT_EQ(3, new_array.Length());
21472139
addr += used_size;
2148-
obj = UntaggedObject::FromAddr(addr);
2149-
if (TypedData::InstanceSize(0) <= 2 * kCompressedWordSize) {
2150-
EXPECT(obj.IsTypedData());
2151-
left_over_array ^= obj.ptr();
2152-
EXPECT_EQ(2 * kCompressedWordSize - TypedData::InstanceSize(0),
2153-
left_over_array.Length());
2154-
} else {
2155-
EXPECT(obj.IsInstance());
2156-
}
2140+
filler = UntaggedObject::FromAddr(addr);
2141+
EXPECT(filler->IsFreeListElement());
2142+
EXPECT_EQ(filler->untag()->HeapSize(),
2143+
Array::InstanceSize(kArrayLen) - used_size);
21572144

2158-
// 3. Should produce an array of length 1 and a left over int8 array.
2145+
// 3. Should produce an array of length 1 and a filler object.
21592146
array = GrowableObjectArray::New(kArrayLen + 3);
21602147
EXPECT_EQ((kArrayLen + 3), array.Capacity());
21612148
EXPECT_EQ(0, array.Length());
@@ -2171,16 +2158,10 @@ ISOLATE_UNIT_TEST_CASE(GrowableObjectArray) {
21712158
new_array ^= obj.ptr();
21722159
EXPECT_EQ(1, new_array.Length());
21732160
addr += used_size;
2174-
obj = UntaggedObject::FromAddr(addr);
2175-
#if defined(DART_COMPRESSED_POINTERS)
2176-
// In compressed pointer mode, the TypedData doesn't fit.
2177-
EXPECT(obj.IsInstance());
2178-
#else
2179-
EXPECT(obj.IsTypedData());
2180-
left_over_array ^= obj.ptr();
2181-
EXPECT_EQ(8 * kWordSize - TypedData::InstanceSize(0),
2182-
left_over_array.Length());
2183-
#endif
2161+
filler = UntaggedObject::FromAddr(addr);
2162+
EXPECT(filler->IsFreeListElement());
2163+
EXPECT_EQ(filler->untag()->HeapSize(),
2164+
Array::InstanceSize(kArrayLen + 3) - used_size);
21842165

21852166
// 4. Verify that GC can handle the filler object for a large array.
21862167
array = GrowableObjectArray::New((1 * MB) >> kWordSizeLog2);

0 commit comments

Comments
 (0)