Skip to content

Commit

Permalink
deps: cherry-pick a715957 from V8 upstream
Browse files Browse the repository at this point in the history
Original commit message:
  Iterate handles with special left-trim visitor

  BUG=chromium:620553
  LOG=N
  R=hpayer@chromium.org

  Review-Url: https://codereview.chromium.org/2102243002
  Cr-Commit-Position: refs/heads/master@{#37366}

PR-URL: #10666
Reviewed-By: James M Snell <jasnell@gmail.com>
Reviewed-By: Fedor Indutny <fedor.indutny@gmail.com>
Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com>
  • Loading branch information
MylesBorins committed Jan 31, 2017
1 parent 87839ca commit e71129e
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 35 deletions.
25 changes: 0 additions & 25 deletions deps/v8/src/heap/heap-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -449,31 +449,6 @@ void Heap::CopyBlock(Address dst, Address src, int byte_size) {
static_cast<size_t>(byte_size / kPointerSize));
}

bool Heap::PurgeLeftTrimmedObject(Object** object) {
HeapObject* current = reinterpret_cast<HeapObject*>(*object);
const MapWord map_word = current->map_word();
if (current->IsFiller() && !map_word.IsForwardingAddress()) {
#ifdef DEBUG
// We need to find a FixedArrayBase map after walking the fillers.
while (current->IsFiller()) {
Address next = reinterpret_cast<Address>(current);
if (current->map() == one_pointer_filler_map()) {
next += kPointerSize;
} else if (current->map() == two_pointer_filler_map()) {
next += 2 * kPointerSize;
} else {
next += current->Size();
}
current = reinterpret_cast<HeapObject*>(next);
}
DCHECK(current->IsFixedArrayBase());
#endif // DEBUG
*object = nullptr;
return true;
}
return false;
}

template <Heap::FindMementoMode mode>
AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
// Check if there is potentially a memento behind the object. If
Expand Down
45 changes: 45 additions & 0 deletions deps/v8/src/heap/heap.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4800,6 +4800,49 @@ void Heap::IterateSmiRoots(ObjectVisitor* v) {
v->Synchronize(VisitorSynchronization::kSmiRootList);
}

// We cannot avoid stale handles to left-trimmed objects, but can only make
// sure all handles still needed are updated. Filter out a stale pointer
// and clear the slot to allow post processing of handles (needed because
// the sweeper might actually free the underlying page).
class FixStaleLeftTrimmedHandlesVisitor : public ObjectVisitor {
public:
explicit FixStaleLeftTrimmedHandlesVisitor(Heap* heap) : heap_(heap) {
USE(heap_);
}

void VisitPointer(Object** p) override { FixHandle(p); }

void VisitPointers(Object** start, Object** end) override {
for (Object** p = start; p < end; p++) FixHandle(p);
}

private:
inline void FixHandle(Object** p) {
HeapObject* current = reinterpret_cast<HeapObject*>(*p);
if (!current->IsHeapObject()) return;
const MapWord map_word = current->map_word();
if (!map_word.IsForwardingAddress() && current->IsFiller()) {
#ifdef DEBUG
// We need to find a FixedArrayBase map after walking the fillers.
while (current->IsFiller()) {
Address next = reinterpret_cast<Address>(current);
if (current->map() == heap_->one_pointer_filler_map()) {
next += kPointerSize;
} else if (current->map() == heap_->two_pointer_filler_map()) {
next += 2 * kPointerSize;
} else {
next += current->Size();
}
current = reinterpret_cast<HeapObject*>(next);
}
DCHECK(current->IsFixedArrayBase());
#endif // DEBUG
*p = nullptr;
}
}

Heap* heap_;
};

void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
Expand All @@ -4820,6 +4863,8 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
v->Synchronize(VisitorSynchronization::kCompilationCache);

// Iterate over local handles in handle scopes.
FixStaleLeftTrimmedHandlesVisitor left_trim_visitor(this);
isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor);
isolate_->handle_scope_implementer()->Iterate(v);
isolate_->IterateDeferredHandles(v);
v->Synchronize(VisitorSynchronization::kHandleScope);
Expand Down
6 changes: 0 additions & 6 deletions deps/v8/src/heap/heap.h
Original file line number Diff line number Diff line change
Expand Up @@ -602,12 +602,6 @@ class Heap {
// stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
static int GetStaticVisitorIdForMap(Map* map);

// We cannot avoid stale handles to left-trimmed objects, but can only make
// sure all handles still needed are updated. Filter out a stale pointer
// and clear the slot to allow post processing of handles (needed because
// the sweeper might actually free the underlying page).
inline bool PurgeLeftTrimmedObject(Object** object);

// Notifies the heap that is ok to start marking or other activities that
// should not happen during deserialization.
void NotifyDeserializationComplete();
Expand Down
2 changes: 0 additions & 2 deletions deps/v8/src/heap/mark-compact.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1376,8 +1376,6 @@ class RootMarkingVisitor : public ObjectVisitor {

HeapObject* object = HeapObject::cast(*p);

if (collector_->heap()->PurgeLeftTrimmedObject(p)) return;

MarkBit mark_bit = Marking::MarkBitFrom(object);
if (Marking::IsBlackOrGrey(mark_bit)) return;

Expand Down
2 changes: 0 additions & 2 deletions deps/v8/src/heap/scavenger.cc
Original file line number Diff line number Diff line change
Expand Up @@ -463,8 +463,6 @@ void ScavengeVisitor::ScavengePointer(Object** p) {
Object* object = *p;
if (!heap_->InNewSpace(object)) return;

if (heap_->PurgeLeftTrimmedObject(p)) return;

Scavenger::ScavengeObject(reinterpret_cast<HeapObject**>(p),
reinterpret_cast<HeapObject*>(object));
}
Expand Down

0 comments on commit e71129e

Please sign in to comment.