diff --git a/src/heap/heap-inl.h b/src/heap/heap-inl.h index afe107fe35e..e31d3d6859b 100644 --- a/src/heap/heap-inl.h +++ b/src/heap/heap-inl.h @@ -251,6 +251,12 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space, } else { old_gen_exhausted_ = true; } + + if (!old_gen_exhausted_ && incremental_marking()->black_allocation() && + space != OLD_SPACE) { + Marking::MarkBlack(Marking::MarkBitFrom(object)); + MemoryChunk::IncrementLiveBytesFromGC(object, size_in_bytes); + } return allocation; } diff --git a/src/heap/heap.cc b/src/heap/heap.cc index a2daab0925c..ce92611c075 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -4239,7 +4239,23 @@ void Heap::RegisterReservationsForBlackAllocation(Reservation* reservations) { // TODO(hpayer): We do not have to iterate reservations on black objects // for marking. We just have to execute the special visiting side effect // code that adds objects to global data structures, e.g. for array buffers. + + // Code space, map space, and large object space do not use black pages. + // Hence we have to color all objects of the reservation first black to avoid + // unnecessary marking deque load. if (incremental_marking()->black_allocation()) { + for (int i = CODE_SPACE; i < Serializer::kNumberOfSpaces; i++) { + const Heap::Reservation& res = reservations[i]; + for (auto& chunk : res) { + Address addr = chunk.start; + while (addr < chunk.end) { + HeapObject* obj = HeapObject::FromAddress(addr); + Marking::MarkBlack(Marking::MarkBitFrom(obj)); + MemoryChunk::IncrementLiveBytesFromGC(obj, obj->Size()); + addr += obj->Size(); + } + } + } for (int i = OLD_SPACE; i < Serializer::kNumberOfSpaces; i++) { const Heap::Reservation& res = reservations[i]; for (auto& chunk : res) { diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc index 686c6738821..a3de6a13ad7 100644 --- a/src/heap/incremental-marking.cc +++ b/src/heap/incremental-marking.cc @@ -571,12 +571,9 @@ void IncrementalMarking::StartBlackAllocation() { DCHECK(FLAG_black_allocation); DCHECK(IsMarking()); black_allocation_ = true; - PagedSpaces spaces(heap()); - for (PagedSpace* space = spaces.next(); space != NULL; - space = spaces.next()) { - space->EmptyAllocationInfo(); - space->free_list()->Reset(); - } + OldSpace* old_space = heap()->old_space(); + old_space->EmptyAllocationInfo(); + old_space->free_list()->Reset(); if (FLAG_trace_incremental_marking) { PrintF("[IncrementalMarking] Black allocation started\n"); } diff --git a/src/heap/spaces.cc b/src/heap/spaces.cc index f1029c4d7d7..3a93a46814e 100644 --- a/src/heap/spaces.cc +++ b/src/heap/spaces.cc @@ -1172,9 +1172,10 @@ bool PagedSpace::Expand() { // Pages created during bootstrapping may contain immortal immovable objects. if (!heap()->deserialization_complete()) p->MarkNeverEvacuate(); - // When incremental marking was activated, old generation pages are allocated + // When incremental marking was activated, old space pages are allocated // black. - if (heap()->incremental_marking()->black_allocation()) { + if (heap()->incremental_marking()->black_allocation() && + identity() == OLD_SPACE) { Bitmap::SetAllBits(p); p->SetFlag(Page::BLACK_PAGE); if (FLAG_trace_incremental_marking) { @@ -2887,11 +2888,6 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size, } HeapObject* object = page->GetObject(); - if (heap()->incremental_marking()->black_allocation()) { - MarkBit mark_bit = Marking::MarkBitFrom(object); - Marking::MarkBlack(mark_bit); - page->SetFlag(Page::BLACK_PAGE); - } MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object->address(), object_size); if (Heap::ShouldZapGarbage()) {