Skip to content
This repository has been archived by the owner on Apr 3, 2020. It is now read-only.

Commit

Permalink
Revert of [heap] Fine-grained JSArrayBuffer tracking (patchset #7 id:…
Browse files Browse the repository at this point in the history
…200001 of https://codereview.chromium.org/1936233002/ )

Reason for revert:
Breaks readonly test
  https://uberchromegw.corp.google.com/i/client.v8/builders/V8%20Linux/builds/10121

Original issue's description:
> [heap] Fine-grained JSArrayBuffer tracking
>
> Track based on JSArrayBuffer addresses instead of the attached backing store.
> This way we can later on iterate buffers on a single page.
>
> BUG=chromium:581412
> LOG=N
> R=jochen@chromium.org, hpayer@chromium.org
>
> Committed: https://crrev.com/4cdf71e2d07c038d7af84e41c6e6d0093af0f0de
> Cr-Commit-Position: refs/heads/master@{#36140}

TBR=hpayer@chromium.org,jochen@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:581412

Review-Url: https://codereview.chromium.org/1961403002
Cr-Commit-Position: refs/heads/master@{#36143}
  • Loading branch information
mlippautz authored and Commit bot committed May 10, 2016
1 parent f2d649a commit c0fe26d
Show file tree
Hide file tree
Showing 8 changed files with 93 additions and 359 deletions.
161 changes: 61 additions & 100 deletions src/heap/array-buffer-tracker.cc
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,18 @@ namespace internal {
ArrayBufferTracker::~ArrayBufferTracker() {
Isolate* isolate = heap()->isolate();
size_t freed_memory = 0;
for (auto& buffer : live_old_gen_) {
isolate->array_buffer_allocator()->Free(buffer.second.first,
buffer.second.second);
freed_memory += buffer.second.second;
for (auto& buffer : live_array_buffers_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
for (auto& buffer : live_young_gen_) {
isolate->array_buffer_allocator()->Free(buffer.second.first,
buffer.second.second);
freed_memory += buffer.second.second;
for (auto& buffer : live_array_buffers_for_scavenge_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
}
live_old_gen_.clear();
live_young_gen_.clear();
not_yet_discovered_old_gen_.clear();
not_yet_discovered_young_gen_.clear();
live_array_buffers_.clear();
live_array_buffers_for_scavenge_.clear();
not_yet_discovered_array_buffers_.clear();
not_yet_discovered_array_buffers_for_scavenge_.clear();

if (freed_memory > 0) {
heap()->update_amount_of_external_allocated_memory(
Expand All @@ -44,13 +42,9 @@ void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) {
bool in_new_space = heap()->InNewSpace(buffer);
size_t length = NumberToSize(heap()->isolate(), buffer->byte_length());
if (in_new_space) {
live_young_gen_[buffer->address()] = std::make_pair(data, length);
not_yet_discovered_young_gen_[buffer->address()] =
std::make_pair(data, length);
live_array_buffers_for_scavenge_[data] = length;
} else {
live_old_gen_[buffer->address()] = std::make_pair(data, length);
not_yet_discovered_old_gen_[buffer->address()] =
std::make_pair(data, length);
live_array_buffers_[data] = length;
}

// We may go over the limit of externally allocated memory here. We call the
Expand All @@ -65,115 +59,82 @@ void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
if (!data) return;

bool in_new_space = heap()->InNewSpace(buffer);
Key key = buffer->address();
TrackingMap* live_buffers = in_new_space ? &live_young_gen_ : &live_old_gen_;
TrackingMap* not_yet_discovered_buffers = in_new_space
? &not_yet_discovered_young_gen_
: &not_yet_discovered_old_gen_;
std::map<void*, size_t>* live_buffers =
in_new_space ? &live_array_buffers_for_scavenge_ : &live_array_buffers_;
std::map<void*, size_t>* not_yet_discovered_buffers =
in_new_space ? &not_yet_discovered_array_buffers_for_scavenge_
: &not_yet_discovered_array_buffers_;

DCHECK(live_buffers->count(key) > 0);
DCHECK(live_buffers->count(data) > 0);

size_t length = (*live_buffers)[key].second;
live_buffers->erase(key);
not_yet_discovered_buffers->erase(key);
size_t length = (*live_buffers)[data];
live_buffers->erase(data);
not_yet_discovered_buffers->erase(data);

heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(length));
}


void ArrayBufferTracker::MarkLive(JSArrayBuffer* buffer) {
base::LockGuard<base::Mutex> guard(&mutex_);
void* data = buffer->backing_store();

// ArrayBuffer might be in the middle of being constructed.
if (data == heap()->undefined_value()) return;
if (heap()->InNewSpace(buffer)) {
not_yet_discovered_array_buffers_for_scavenge_.erase(data);
} else {
not_yet_discovered_array_buffers_.erase(data);
}
}


void ArrayBufferTracker::FreeDead(bool from_scavenge) {
size_t freed_memory = 0;
Isolate* isolate = heap()->isolate();
for (auto& buffer : not_yet_discovered_young_gen_) {
isolate->array_buffer_allocator()->Free(buffer.second.first,
buffer.second.second);
freed_memory += buffer.second.second;
live_young_gen_.erase(buffer.first);
for (auto& buffer : not_yet_discovered_array_buffers_for_scavenge_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
live_array_buffers_for_scavenge_.erase(buffer.first);
}

if (!from_scavenge) {
for (auto& buffer : not_yet_discovered_old_gen_) {
isolate->array_buffer_allocator()->Free(buffer.second.first,
buffer.second.second);
freed_memory += buffer.second.second;
live_old_gen_.erase(buffer.first);
for (auto& buffer : not_yet_discovered_array_buffers_) {
isolate->array_buffer_allocator()->Free(buffer.first, buffer.second);
freed_memory += buffer.second;
live_array_buffers_.erase(buffer.first);
}
}

not_yet_discovered_young_gen_ = live_young_gen_;
if (!from_scavenge) not_yet_discovered_old_gen_ = live_old_gen_;
not_yet_discovered_array_buffers_for_scavenge_ =
live_array_buffers_for_scavenge_;
if (!from_scavenge) not_yet_discovered_array_buffers_ = live_array_buffers_;

// Do not call through the api as this code is triggered while doing a GC.
heap()->update_amount_of_external_allocated_memory(
-static_cast<int64_t>(freed_memory));
}

#define UPDATE_GUARD(buffer, data) \
if (buffer->is_external()) return; \
data = buffer->backing_store(); \
if (data == nullptr) return; \
if (data == heap()->undefined_value()) return; \
base::LockGuard<base::Mutex> guard(&mutex_);

void ArrayBufferTracker::MarkLive(JSArrayBuffer* buffer) {
void* data = nullptr;
UPDATE_GUARD(buffer, data);

if (heap()->InNewSpace(buffer)) {
not_yet_discovered_young_gen_.erase(buffer->address());
} else {
not_yet_discovered_old_gen_.erase(buffer->address());
}
void ArrayBufferTracker::PrepareDiscoveryInNewSpace() {
not_yet_discovered_array_buffers_for_scavenge_ =
live_array_buffers_for_scavenge_;
}

void ArrayBufferTracker::Promote(JSArrayBuffer* new_buffer,
JSArrayBuffer* old_buffer) {
void* data = nullptr;
UPDATE_GUARD(new_buffer, data);

Key new_key = new_buffer->address();
Key old_key = old_buffer->address();
DCHECK(live_young_gen_.count(old_key) > 0);
live_old_gen_[new_key] = live_young_gen_[old_key];
live_young_gen_.erase(old_key);
not_yet_discovered_young_gen_.erase(old_key);
}

void ArrayBufferTracker::Compact(JSArrayBuffer* new_buffer,
JSArrayBuffer* old_buffer) {
void* data = nullptr;
UPDATE_GUARD(new_buffer, data);

Key new_key = new_buffer->address();
Key old_key = old_buffer->address();
DCHECK_NE(new_key, old_key);
DCHECK(live_old_gen_.count(old_key) > 0);
live_old_gen_[new_key] = live_old_gen_[old_key];
live_old_gen_.erase(old_key);
not_yet_discovered_old_gen_.erase(old_key);
}

void ArrayBufferTracker::SemiSpaceCopy(JSArrayBuffer* new_buffer,
JSArrayBuffer* old_buffer) {
void* data = nullptr;
UPDATE_GUARD(new_buffer, data);

Key new_key = new_buffer->address();
Key old_key = old_buffer->address();
DCHECK(live_young_gen_.count(old_key) > 0);
live_young_gen_[new_key] = live_young_gen_[old_key];
live_young_gen_.erase(old_key);
not_yet_discovered_young_gen_.erase(old_key);
}

#undef UPDATE_GUARD

bool ArrayBufferTracker::IsTrackedInOldGenForTesting(JSArrayBuffer* buffer) {
return live_old_gen_.find(buffer->address()) != live_old_gen_.end();
}
void ArrayBufferTracker::Promote(JSArrayBuffer* buffer) {
base::LockGuard<base::Mutex> guard(&mutex_);

bool ArrayBufferTracker::IsTrackedInYoungGenForTesting(JSArrayBuffer* buffer) {
return live_young_gen_.find(buffer->address()) != live_young_gen_.end();
if (buffer->is_external()) return;
void* data = buffer->backing_store();
if (!data) return;
// ArrayBuffer might be in the middle of being constructed.
if (data == heap()->undefined_value()) return;
DCHECK(live_array_buffers_for_scavenge_.count(data) > 0);
live_array_buffers_[data] = live_array_buffers_for_scavenge_[data];
live_array_buffers_for_scavenge_.erase(data);
not_yet_discovered_array_buffers_for_scavenge_.erase(data);
}

} // namespace internal
Expand Down
77 changes: 26 additions & 51 deletions src/heap/array-buffer-tracker.h
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,11 @@ class JSArrayBuffer;

class ArrayBufferTracker {
public:
typedef void* Key;

enum CallbackResult { kKeepEntry, kRemoveEntry };
enum ListType { kNewSpace, kOldSpace };

explicit ArrayBufferTracker(Heap* heap) : heap_(heap) {}
~ArrayBufferTracker();

inline Heap* heap() { return heap_; }

// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.

Expand All @@ -43,58 +40,36 @@ class ArrayBufferTracker {
// marking or scavenge phase.
void FreeDead(bool from_scavenge);

// Update methods used to update the tracking state of given ArrayBuffers.
void Promote(JSArrayBuffer* new_buffer, JSArrayBuffer* old_buffer);
void SemiSpaceCopy(JSArrayBuffer* new_buffer, JSArrayBuffer* old_buffer);
void Compact(JSArrayBuffer* new_buffer, JSArrayBuffer* old_buffer);

// Callback should be of type:
// CallbackResult fn(Key);
template <typename Callback>
void IterateNotYetDiscoveredEntries(ListType list, Key from, Key to,
Callback callback) {
TrackingMap::iterator it =
list == kNewSpace ? not_yet_discovered_young_gen_.lower_bound(from)
: not_yet_discovered_old_gen_.lower_bound(from);
const TrackingMap::iterator end =
list == kNewSpace ? not_yet_discovered_young_gen_.upper_bound(to)
: not_yet_discovered_old_gen_.upper_bound(to);
{
base::LockGuard<base::Mutex> guard(&mutex_);
while (it != end) {
if (callback(it->first) == kKeepEntry) {
++it;
} else {
live_old_gen_.erase(it++);
}
}
}
}

bool IsTrackedInOldGenForTesting(JSArrayBuffer* buffer);
bool IsTrackedInYoungGenForTesting(JSArrayBuffer* buffer);
// Prepare for a new scavenge phase. A new marking phase is implicitly
// prepared by finishing the previous one.
void PrepareDiscoveryInNewSpace();

private:
typedef std::map<Key, std::pair<void*, size_t>> TrackingMap;

inline Heap* heap() { return heap_; }
// An ArrayBuffer moved from new space to old space.
void Promote(JSArrayBuffer* buffer);

private:
base::Mutex mutex_;
Heap* heap_;

// |live_*| maps tracked JSArrayBuffers to the internally allocated backing
// store and length.
// For each GC round (Scavenger, or incremental/full MC)
// |not_yet_discovered_*| is initialized as a copy of |live_*|. Upon finding
// a JSArrayBuffer during GC, the buffer is removed from
// |not_yet_discovered_*|. At the end of a GC, we free up the remaining
// JSArrayBuffers in |not_yet_discovered_*|.
TrackingMap live_old_gen_;
TrackingMap not_yet_discovered_old_gen_;
TrackingMap live_young_gen_;
TrackingMap not_yet_discovered_young_gen_;
// |live_array_buffers_| maps externally allocated memory used as backing
// store for ArrayBuffers to the length of the respective memory blocks.
//
// At the beginning of mark/compact, |not_yet_discovered_array_buffers_| is
// a copy of |live_array_buffers_| and we remove pointers as we discover live
// ArrayBuffer objects during marking. At the end of mark/compact, the
// remaining memory blocks can be freed.
std::map<void*, size_t> live_array_buffers_;
std::map<void*, size_t> not_yet_discovered_array_buffers_;

// To be able to free memory held by ArrayBuffers during scavenge as well, we
// have a separate list of allocated memory held by ArrayBuffers in new space.
//
// Since mark/compact also evacuates the new space, all pointers in the
// |live_array_buffers_for_scavenge_| list are also in the
// |live_array_buffers_| list.
std::map<void*, size_t> live_array_buffers_for_scavenge_;
std::map<void*, size_t> not_yet_discovered_array_buffers_for_scavenge_;
};

} // namespace internal
} // namespace v8
#endif // V8_HEAP_ARRAY_BUFFER_TRACKER_H_
2 changes: 2 additions & 0 deletions src/heap/heap.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1626,6 +1626,8 @@ void Heap::Scavenge() {

scavenge_collector_->SelectScavengingVisitorsTable();

array_buffer_tracker()->PrepareDiscoveryInNewSpace();

// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
new_space_.Flip();
Expand Down
17 changes: 3 additions & 14 deletions src/heap/mark-compact.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1675,8 +1675,7 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target_object),
reinterpret_cast<JSArrayBuffer*>(object));
JSArrayBuffer::cast(target_object));
}
promoted_size_ += size;
return true;
Expand All @@ -1685,9 +1684,7 @@ class MarkCompactCollector::EvacuateNewSpaceVisitor final
AllocationSpace space = AllocateTargetObject(object, &target);
MigrateObject(HeapObject::cast(target), object, size, space);
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->SemiSpaceCopy(
JSArrayBuffer::cast(target),
reinterpret_cast<JSArrayBuffer*>(object));
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(target));
}
semispace_copied_size_ += size;
return true;
Expand Down Expand Up @@ -1814,7 +1811,7 @@ class MarkCompactCollector::EvacuateNewSpacePageVisitor final
inline bool Visit(HeapObject* object) {
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
object->GetHeap()->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(object), JSArrayBuffer::cast(object));
JSArrayBuffer::cast(object));
}
RecordMigratedSlotVisitor visitor;
object->IterateBodyFast(&visitor);
Expand All @@ -1841,16 +1838,8 @@ class MarkCompactCollector::EvacuateOldSpaceVisitor final
HeapObject* target_object = nullptr;
if (TryEvacuateObject(target_space, object, &target_object)) {
DCHECK(object->map_word().IsForwardingAddress());
if (V8_UNLIKELY(target_object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->Compact(
JSArrayBuffer::cast(target_object),
reinterpret_cast<JSArrayBuffer*>(object));
}
return true;
}
if (V8_UNLIKELY(object->IsJSArrayBuffer())) {
heap_->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
return false;
}
};
Expand Down
8 changes: 1 addition & 7 deletions src/heap/scavenger.cc
Original file line number Diff line number Diff line change
Expand Up @@ -295,13 +295,7 @@ class ScavengingVisitor : public StaticVisitorBase {
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (!heap->InNewSpace(target)) {
heap->array_buffer_tracker()->Promote(
JSArrayBuffer::cast(target),
reinterpret_cast<JSArrayBuffer*>(object));
} else {
heap->array_buffer_tracker()->SemiSpaceCopy(
JSArrayBuffer::cast(target),
reinterpret_cast<JSArrayBuffer*>(object));
heap->array_buffer_tracker()->Promote(JSArrayBuffer::cast(target));
}
}

Expand Down
1 change: 0 additions & 1 deletion test/cctest/cctest.gyp
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,6 @@
'gay-shortest.cc',
'heap/heap-tester.h',
'heap/test-alloc.cc',
'heap/test-array-buffer-tracker.cc',
'heap/test-compaction.cc',
'heap/test-heap.cc',
'heap/test-incremental-marking.cc',
Expand Down
Loading

0 comments on commit c0fe26d

Please sign in to comment.