Skip to content
This repository has been archived by the owner on Apr 3, 2020. It is now read-only.

Commit

Permalink
Revert of Reland "Filter invalid slots out from the SlotsBuffer after…
Browse files Browse the repository at this point in the history
… marking." (patchset #2 id:2 of https://codereview.chromium.org/1032833002/)

Reason for revert:
Reverting risky GC changes that block v8 roll.

Original issue's description:
> Reland "Filter invalid slots out from the SlotsBuffer after marking."
>
> > There are two reasons that could cause invalid slots appearance in SlotsBuffer:
> > 1) If GC trims "tail" of an array for which it has already recorded a slots and then migrate another object to the "tail".
> > 2) Tagged slot could become a double slot after migrating of an object to another map with "shifted" fields (for example as a result of generalizing immutable data property to a data field).
>
> > This CL also adds useful machinery that helps triggering incremental write barriers.
>
> > BUG=chromium:454297
> > LOG=Y
>
> NOTRY=true
>
> Committed: https://crrev.com/f86aadd1d45c756467dff8e08a055b462d7a060b
> Cr-Commit-Position: refs/heads/master@{#27433}

TBR=machenbach@chromium.org,ulan@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true

Review URL: https://codereview.chromium.org/1041593002

Cr-Commit-Position: refs/heads/master@{#27491}
  • Loading branch information
isheludko authored and Commit bot committed Mar 27, 2015
1 parent d23a9f7 commit de018fb
Show file tree
Hide file tree
Showing 6 changed files with 32 additions and 404 deletions.
5 changes: 0 additions & 5 deletions src/flag-definitions.h
Original file line number Diff line number Diff line change
Expand Up @@ -767,11 +767,6 @@ DEFINE_BOOL(stress_compaction, false,
"stress the GC compactor to flush out bugs (implies "
"--force_marking_deque_overflows)")

DEFINE_BOOL(manual_evacuation_candidates_selection, false,
"Test mode only flag. It allows an unit test to select evacuation "
"candidates pages (requires --stress_compaction).")


//
// Dev shell flags
//
Expand Down
179 changes: 22 additions & 157 deletions src/heap/mark-compact.cc
Original file line number Diff line number Diff line change
Expand Up @@ -287,60 +287,6 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
}


void MarkCompactCollector::ClearInvalidSlotsBufferEntries(PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
SlotsBuffer::RemoveInvalidSlots(heap_, p->slots_buffer());
}
}


void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
heap_->store_buffer()->ClearInvalidStoreBufferEntries();

ClearInvalidSlotsBufferEntries(heap_->old_pointer_space());
ClearInvalidSlotsBufferEntries(heap_->old_data_space());
ClearInvalidSlotsBufferEntries(heap_->code_space());
ClearInvalidSlotsBufferEntries(heap_->cell_space());
ClearInvalidSlotsBufferEntries(heap_->map_space());

LargeObjectIterator it(heap_->lo_space());
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
SlotsBuffer::RemoveInvalidSlots(heap_, chunk->slots_buffer());
}
}


#ifdef VERIFY_HEAP
static void VerifyValidSlotsBufferEntries(Heap* heap, PagedSpace* space) {
PageIterator it(space);
while (it.has_next()) {
Page* p = it.next();
SlotsBuffer::VerifySlots(heap, p->slots_buffer());
}
}


static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) {
heap->store_buffer()->VerifyValidStoreBufferEntries();

VerifyValidSlotsBufferEntries(heap, heap->old_pointer_space());
VerifyValidSlotsBufferEntries(heap, heap->old_data_space());
VerifyValidSlotsBufferEntries(heap, heap->code_space());
VerifyValidSlotsBufferEntries(heap, heap->cell_space());
VerifyValidSlotsBufferEntries(heap, heap->map_space());

LargeObjectIterator it(heap->lo_space());
for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
MemoryChunk* chunk = MemoryChunk::FromAddress(object->address());
SlotsBuffer::VerifySlots(heap, chunk->slots_buffer());
}
}
#endif


void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
Expand All @@ -366,11 +312,11 @@ void MarkCompactCollector::CollectGarbage() {
}
#endif

ClearInvalidStoreAndSlotsBufferEntries();
heap_->store_buffer()->ClearInvalidStoreBufferEntries();

#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyValidStoreAndSlotsBufferEntries(heap_);
heap_->store_buffer()->VerifyValidStoreBufferEntries();
}
#endif

Expand Down Expand Up @@ -777,7 +723,6 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {

int count = 0;
int fragmentation = 0;
int page_number = 0;
Candidate* least = NULL;

PageIterator it(space);
Expand All @@ -792,16 +737,9 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
CHECK(p->slots_buffer() == NULL);

if (FLAG_stress_compaction) {
if (FLAG_manual_evacuation_candidates_selection) {
if (p->IsFlagSet(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING)) {
p->ClearFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
fragmentation = 1;
}
} else {
unsigned int counter = space->heap()->ms_count();
if ((counter & 1) == (page_number & 1)) fragmentation = 1;
page_number++;
}
unsigned int counter = space->heap()->ms_count();
uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits;
if ((counter & 1) == (page_number & 1)) fragmentation = 1;
} else if (mode == REDUCE_MEMORY_FOOTPRINT && !FLAG_always_compact) {
// Don't try to release too many pages.
if (estimated_release >= over_reserved) {
Expand Down Expand Up @@ -3109,14 +3047,10 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
}


bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot,
HeapObject** out_object) {
bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot) {
// This function does not support large objects right now.
Space* owner = p->owner();
if (owner == heap_->lo_space() || owner == NULL) {
*out_object = NULL;
return true;
}
if (owner == heap_->lo_space() || owner == NULL) return true;

uint32_t mark_bit_index = p->AddressToMarkbitIndex(slot);
unsigned int start_index = mark_bit_index >> Bitmap::kBitsPerCellLog2;
Expand Down Expand Up @@ -3170,7 +3104,6 @@ bool MarkCompactCollector::IsSlotInBlackObject(Page* p, Address slot,
(object->address() + object->Size()) > slot) {
// If the slot is within the last found object in the cell, the slot is
// in a live object.
*out_object = object;
return true;
}
return false;
Expand Down Expand Up @@ -3212,46 +3145,38 @@ bool MarkCompactCollector::IsSlotInBlackObjectSlow(Page* p, Address slot) {
}


bool MarkCompactCollector::IsSlotInLiveObject(Address slot) {
HeapObject* object = NULL;
bool MarkCompactCollector::IsSlotInLiveObject(HeapObject** address,
HeapObject* object) {
// If the target object is not black, the source slot must be part
// of a non-black (dead) object.
if (!Marking::IsBlack(Marking::MarkBitFrom(object))) {
return false;
}

// The target object is black but we don't know if the source slot is black.
// The source object could have died and the slot could be part of a free
// space. Find out based on mark bits if the slot is part of a live object.
if (!IsSlotInBlackObject(Page::FromAddress(slot), slot, &object)) {
if (!IsSlotInBlackObject(
Page::FromAddress(reinterpret_cast<Address>(address)),
reinterpret_cast<Address>(address))) {
return false;
}

#if V8_DOUBLE_FIELDS_UNBOXING
// |object| is NULL only when the slot belongs to large object space.
DCHECK(object != NULL ||
Page::FromAnyPointerAddress(heap_, slot)->owner() ==
heap_->lo_space());
// We don't need to check large objects' layout descriptor since it can't
// contain in-object fields anyway.
if (object != NULL) {
// Filter out slots that happens to point to unboxed double fields.
LayoutDescriptorHelper helper(object->map());
bool has_only_tagged_fields = helper.all_fields_tagged();
if (!has_only_tagged_fields &&
!helper.IsTagged(static_cast<int>(slot - object->address()))) {
return false;
}
}
#endif

return true;
}


void MarkCompactCollector::VerifyIsSlotInLiveObject(Address slot,
void MarkCompactCollector::VerifyIsSlotInLiveObject(HeapObject** address,
HeapObject* object) {
// The target object has to be black.
CHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));

// The target object is black but we don't know if the source slot is black.
// The source object could have died and the slot could be part of a free
// space. Use the mark bit iterator to find out about liveness of the slot.
CHECK(IsSlotInBlackObjectSlow(Page::FromAddress(slot), slot));
CHECK(IsSlotInBlackObjectSlow(
Page::FromAddress(reinterpret_cast<Address>(address)),
reinterpret_cast<Address>(address)));
}


Expand Down Expand Up @@ -4570,66 +4495,6 @@ bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator,
}


static Object* g_smi_slot = NULL;


void SlotsBuffer::RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer) {
DCHECK_EQ(Smi::FromInt(0), g_smi_slot);

// Remove entries by replacing them with a dummy slot containing a smi.
const ObjectSlot kRemovedEntry = &g_smi_slot;

while (buffer != NULL) {
SlotsBuffer::ObjectSlot* slots = buffer->slots_;
intptr_t slots_count = buffer->idx_;

for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
ObjectSlot slot = slots[slot_idx];
if (!IsTypedSlot(slot)) {
Object* object = *slot;
if (object->IsHeapObject()) {
if (heap->InNewSpace(object) ||
!heap->mark_compact_collector()->IsSlotInLiveObject(
reinterpret_cast<Address>(slot))) {
slots[slot_idx] = kRemovedEntry;
}
}
} else {
++slot_idx;
DCHECK(slot_idx < slots_count);
}
}
buffer = buffer->next();
}
}


void SlotsBuffer::VerifySlots(Heap* heap, SlotsBuffer* buffer) {
DCHECK_EQ(Smi::FromInt(0), g_smi_slot);

while (buffer != NULL) {
SlotsBuffer::ObjectSlot* slots = buffer->slots_;
intptr_t slots_count = buffer->idx_;

for (int slot_idx = 0; slot_idx < slots_count; ++slot_idx) {
ObjectSlot slot = slots[slot_idx];
if (!IsTypedSlot(slot)) {
Object* object = *slot;
if (object->IsHeapObject()) {
CHECK(!heap->InNewSpace(object));
CHECK(heap->mark_compact_collector()->IsSlotInLiveObject(
reinterpret_cast<Address>(slot)));
}
} else {
++slot_idx;
DCHECK(slot_idx < slots_count);
}
}
buffer = buffer->next();
}
}


static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
if (RelocInfo::IsCodeTarget(rmode)) {
return SlotsBuffer::CODE_TARGET_SLOT;
Expand Down
17 changes: 3 additions & 14 deletions src/heap/mark-compact.h
Original file line number Diff line number Diff line change
Expand Up @@ -363,15 +363,6 @@ class SlotsBuffer {
SlotsBuffer** buffer_address, SlotType type, Address addr,
AdditionMode mode);

// Eliminates all stale entries from the slots buffer, i.e., slots that
// are not part of live objects anymore. This method must be called after
// marking, when the whole transitive closure is known and must be called
// before sweeping when mark bits are still intact.
static void RemoveInvalidSlots(Heap* heap, SlotsBuffer* buffer);

// Ensures that there are no invalid slots in the chain of slots buffers.
static void VerifySlots(Heap* heap, SlotsBuffer* buffer);

static const int kNumberOfElements = 1021;

private:
Expand Down Expand Up @@ -688,10 +679,10 @@ class MarkCompactCollector {
// The following four methods can just be called after marking, when the
// whole transitive closure is known. They must be called before sweeping
// when mark bits are still intact.
bool IsSlotInBlackObject(Page* p, Address slot, HeapObject** out_object);
bool IsSlotInBlackObject(Page* p, Address slot);
bool IsSlotInBlackObjectSlow(Page* p, Address slot);
bool IsSlotInLiveObject(Address slot);
void VerifyIsSlotInLiveObject(Address slot, HeapObject* object);
bool IsSlotInLiveObject(HeapObject** address, HeapObject* object);
void VerifyIsSlotInLiveObject(HeapObject** address, HeapObject* object);

private:
class SweeperTask;
Expand All @@ -703,8 +694,6 @@ class MarkCompactCollector {
bool WillBeDeoptimized(Code* code);
void RemoveDeadInvalidatedCode();
void ProcessInvalidatedCode(ObjectVisitor* visitor);
void ClearInvalidSlotsBufferEntries(PagedSpace* space);
void ClearInvalidStoreAndSlotsBufferEntries();

void StartSweeperThreads();

Expand Down
6 changes: 0 additions & 6 deletions src/heap/spaces.h
Original file line number Diff line number Diff line change
Expand Up @@ -385,12 +385,6 @@ class MemoryChunk {
// to grey transition is performed in the value.
HAS_PROGRESS_BAR,

// This flag is intended to be used for testing. Works only when both
// FLAG_stress_compaction and FLAG_manual_evacuation_candidates_selection
// are set. It forces the page to become an evacuation candidate at next
// candidates selection cycle.
FORCE_EVACUATION_CANDIDATE_FOR_TESTING,

// Last flag, keep at bottom.
NUM_MEMORY_CHUNK_FLAGS
};
Expand Down
16 changes: 7 additions & 9 deletions src/heap/store-buffer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -361,14 +361,12 @@ void StoreBuffer::ClearInvalidStoreBufferEntries() {
Address* new_top = old_start_;
for (Address* current = old_start_; current < old_top_; current++) {
Address addr = *current;
Object** slot = reinterpret_cast<Object**>(addr);
Object** slot = reinterpret_cast<Object**>(*current);
Object* object = *slot;
if (heap_->InNewSpace(object) && object->IsHeapObject()) {
// If the target object is not black, the source slot must be part
// of a non-black (dead) object.
HeapObject* heap_object = HeapObject::cast(object);
if (Marking::IsBlack(Marking::MarkBitFrom(heap_object)) &&
heap_->mark_compact_collector()->IsSlotInLiveObject(addr)) {
if (heap_->InNewSpace(object)) {
if (heap_->mark_compact_collector()->IsSlotInLiveObject(
reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<HeapObject*>(object))) {
*new_top++ = addr;
}
}
Expand All @@ -391,10 +389,10 @@ void StoreBuffer::VerifyValidStoreBufferEntries() {
for (Address* current = old_start_; current < old_top_; current++) {
Object** slot = reinterpret_cast<Object**>(*current);
Object* object = *slot;
CHECK(object->IsHeapObject());
CHECK(heap_->InNewSpace(object));
heap_->mark_compact_collector()->VerifyIsSlotInLiveObject(
reinterpret_cast<Address>(slot), HeapObject::cast(object));
reinterpret_cast<HeapObject**>(slot),
reinterpret_cast<HeapObject*>(object));
}
}

Expand Down
Loading

0 comments on commit de018fb

Please sign in to comment.