Skip to content
This repository has been archived by the owner on Apr 3, 2020. It is now read-only.

Commit

Permalink
Revert of [heap] Improve size profiling for ArrayBuffer tracking (pat…
Browse files Browse the repository at this point in the history
…chset #6 id:140001 of https://codereview.chromium.org/2210263002/ )

Reason for revert:
Tanks octane

Original issue's description:
> [heap] Improve size profiling for ArrayBuffer tracking
>
> Eagerly account for retained sizes during ArrayBuffer tracking. Following up on this,
> we can now do Scavenges if the amount of memory retained from new space is too large.
>
> BUG=chromium:621829
> R=jochen@chromium.org,hpayer@chromium.org
>
> Committed: https://crrev.com/28e13bd6a75c9467dae43043e7b741a1387d5252
> Cr-Commit-Position: refs/heads/master@{#38731}

TBR=jochen@chromium.org,hpayer@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:621829

Review-Url: https://codereview.chromium.org/2261513003
Cr-Commit-Position: refs/heads/master@{#38739}
  • Loading branch information
mlippautz authored and Commit bot committed Aug 19, 2016
1 parent ed08838 commit fbf1bc6
Show file tree
Hide file tree
Showing 9 changed files with 83 additions and 236 deletions.
2 changes: 1 addition & 1 deletion include/v8.h
Original file line number Diff line number Diff line change
Expand Up @@ -7605,7 +7605,7 @@ class Internals {
kExternalMemoryOffset + kApiInt64Size;
static const int kIsolateRootsOffset = kExternalMemoryLimitOffset +
kApiInt64Size + kApiInt64Size +
kApiPointerSize;
kApiPointerSize + kApiPointerSize;
static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6;
Expand Down
18 changes: 4 additions & 14 deletions src/heap/array-buffer-tracker-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
namespace v8 {
namespace internal {

void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) {
void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;

Expand All @@ -26,18 +26,13 @@ void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker);
tracker->Add(buffer, length);
}
if (page->InNewSpace()) {
retained_from_new_space_.Increment(length);
} else {
retained_from_old_space_.Increment(length);
}
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap_->isolate())
reinterpret_cast<v8::Isolate*>(heap->isolate())
->AdjustAmountOfExternalAllocatedMemory(length);
}

void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;

Expand All @@ -49,12 +44,7 @@ void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker);
length = tracker->Remove(buffer);
}
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(length);
} else {
retained_from_old_space_.Decrement(length);
}
heap_->update_external_memory(-static_cast<int64_t>(length));
heap->update_external_memory(-static_cast<intptr_t>(length));
}

void LocalArrayBufferTracker::Add(Key key, const Value& value) {
Expand Down
117 changes: 43 additions & 74 deletions src/heap/array-buffer-tracker.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ LocalArrayBufferTracker::~LocalArrayBufferTracker() {
}

template <LocalArrayBufferTracker::FreeMode free_mode>
LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Free() {
void LocalArrayBufferTracker::Free() {
size_t freed_memory = 0;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
Expand All @@ -30,85 +30,68 @@ LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Free() {
++it;
}
}
return ProcessResult(freed_memory, 0);
if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
}

template <typename Callback>
LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Process(
Callback callback) {
void LocalArrayBufferTracker::Process(Callback callback) {
JSArrayBuffer* new_buffer = nullptr;
size_t freed_memory = 0;
size_t promoted_memory = 0;
size_t len = 0;
Page* target_page = nullptr;
LocalArrayBufferTracker* tracker = nullptr;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
switch (callback(it->first, &new_buffer)) {
case kKeepEntry:
++it;
break;
case kUpdateEntry:
DCHECK_NOT_NULL(new_buffer);
target_page = Page::FromAddress(new_buffer->address());
// We need to lock the target page because we cannot guarantee
// exclusive access to new space pages.
if (target_page->InNewSpace()) target_page->mutex()->Lock();
const CallbackResult result = callback(it->first, &new_buffer);
if (result == kKeepEntry) {
++it;
} else if (result == kUpdateEntry) {
DCHECK_NOT_NULL(new_buffer);
Page* target_page = Page::FromAddress(new_buffer->address());
// We need to lock the target page because we cannot guarantee
// exclusive access to new space pages.
if (target_page->InNewSpace()) target_page->mutex()->Lock();
LocalArrayBufferTracker* tracker = target_page->local_tracker();
if (tracker == nullptr) {
target_page->AllocateLocalTracker();
tracker = target_page->local_tracker();
if (tracker == nullptr) {
target_page->AllocateLocalTracker();
tracker = target_page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
len = it->second;
tracker->Add(new_buffer, len);
if (target_page->InNewSpace()) {
target_page->mutex()->Unlock();
} else {
promoted_memory += len;
}
it = array_buffers_.erase(it);
break;
case kRemoveEntry:
len = it->second;
heap_->isolate()->array_buffer_allocator()->Free(
it->first->backing_store(), len);
freed_memory += len;
it = array_buffers_.erase(it);
break;
}
DCHECK_NOT_NULL(tracker);
tracker->Add(new_buffer, it->second);
if (target_page->InNewSpace()) target_page->mutex()->Unlock();
it = array_buffers_.erase(it);
} else if (result == kRemoveEntry) {
const size_t len = it->second;
heap_->isolate()->array_buffer_allocator()->Free(
it->first->backing_store(), len);
freed_memory += len;
it = array_buffers_.erase(it);
} else {
UNREACHABLE();
}
}
return ProcessResult(freed_memory, promoted_memory);
}

void ArrayBufferTracker::AccountForConcurrentlyFreedMemory() {
heap_->update_external_memory(
static_cast<int64_t>(concurrently_freed_.Value()));
concurrently_freed_.SetValue(0);
if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
}

void ArrayBufferTracker::FreeDeadInNewSpace() {
DCHECK_EQ(heap_->gc_state(), Heap::HeapState::SCAVENGE);
for (Page* page : NewSpacePageRange(heap_->new_space()->FromSpaceStart(),
heap_->new_space()->FromSpaceEnd())) {
void ArrayBufferTracker::FreeDeadInNewSpace(Heap* heap) {
DCHECK_EQ(heap->gc_state(), Heap::HeapState::SCAVENGE);
for (Page* page : NewSpacePageRange(heap->new_space()->FromSpaceStart(),
heap->new_space()->FromSpaceEnd())) {
bool empty = ProcessBuffers(page, kUpdateForwardedRemoveOthers);
CHECK(empty);
}
AccountForConcurrentlyFreedMemory();
heap->account_external_memory_concurrently_freed();
}

void ArrayBufferTracker::FreeDead(Page* page) {
// Callers need to ensure having the page lock.
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
DCHECK(!page->SweepingDone());
LocalArrayBufferTracker::ProcessResult result =
tracker->Free<LocalArrayBufferTracker::kFreeDead>();
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed);
} else {
retained_from_old_space_.Decrement(result.freed);
}
tracker->Free<LocalArrayBufferTracker::kFreeDead>();
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
}
Expand All @@ -117,14 +100,7 @@ void ArrayBufferTracker::FreeDead(Page* page) {
void ArrayBufferTracker::FreeAll(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
LocalArrayBufferTracker::ProcessResult result =
tracker->Free<LocalArrayBufferTracker::kFreeAll>();
concurrently_freed_.Increment(result.freed);
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed);
} else {
retained_from_old_space_.Decrement(result.freed);
}
tracker->Free<LocalArrayBufferTracker::kFreeAll>();
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
}
Expand All @@ -135,7 +111,7 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
if (tracker == nullptr) return true;

DCHECK(page->SweepingDone());
LocalArrayBufferTracker::ProcessResult result = tracker->Process(
tracker->Process(
[mode](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
MapWord map_word = old_buffer->map_word();
if (map_word.IsForwardingAddress()) {
Expand All @@ -146,13 +122,6 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
? LocalArrayBufferTracker::kKeepEntry
: LocalArrayBufferTracker::kRemoveEntry;
});
concurrently_freed_.Increment(result.freed);
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed + result.promoted);
} else {
retained_from_old_space_.Decrement(result.freed);
}
retained_from_old_space_.Increment(result.promoted);
return tracker->IsEmpty();
}

Expand Down
57 changes: 13 additions & 44 deletions src/heap/array-buffer-tracker.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
#include <unordered_map>

#include "src/allocation.h"
#include "src/base/atomic-utils.h"
#include "src/base/platform/mutex.h"
#include "src/globals.h"

Expand All @@ -19,61 +18,40 @@ class Heap;
class JSArrayBuffer;
class Page;

class ArrayBufferTracker {
class ArrayBufferTracker : public AllStatic {
public:
enum ProcessingMode {
kUpdateForwardedRemoveOthers,
kUpdateForwardedKeepOthers,
};

// Returns whether a buffer is currently tracked.
static bool IsTracked(JSArrayBuffer* buffer);

explicit ArrayBufferTracker(Heap* heap)
: heap_(heap),
concurrently_freed_(0),
retained_from_new_space_(0),
retained_from_old_space_(0) {}

// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.

// Register/unregister a new JSArrayBuffer |buffer| for tracking. Guards all
// access to the tracker by taking the page lock for the corresponding page.
inline void RegisterNew(JSArrayBuffer* buffer);
inline void Unregister(JSArrayBuffer* buffer);
inline static void RegisterNew(Heap* heap, JSArrayBuffer* buffer);
inline static void Unregister(Heap* heap, JSArrayBuffer* buffer);

// Frees all backing store pointers for dead JSArrayBuffers in new space.
// Does not take any locks and can only be called during Scavenge.
void FreeDeadInNewSpace();
static void FreeDeadInNewSpace(Heap* heap);

// Frees all backing store pointers for dead JSArrayBuffer on a given page.
// Requires marking information to be present. Requires the page lock to be
// taken by the caller.
void FreeDead(Page* page);
static void FreeDead(Page* page);

// Frees all remaining, live or dead, array buffers on a page. Only useful
// during tear down.
void FreeAll(Page* page);
static void FreeAll(Page* page);

// Processes all array buffers on a given page. |mode| specifies the action
// to perform on the buffers. Returns whether the tracker is empty or not.
bool ProcessBuffers(Page* page, ProcessingMode mode);
static bool ProcessBuffers(Page* page, ProcessingMode mode);

void AccountForConcurrentlyFreedMemory();

size_t retained_from_new_space() { return retained_from_new_space_.Value(); }
size_t retained_from_old_space() { return retained_from_old_space_.Value(); }

private:
Heap* heap_;

base::AtomicNumber<size_t> concurrently_freed_;

// Number of bytes retained from new space.
base::AtomicNumber<size_t> retained_from_new_space_;
// Number of bytes retained from old space.
base::AtomicNumber<size_t> retained_from_old_space_;
// Returns whether a buffer is currently tracked.
static bool IsTracked(JSArrayBuffer* buffer);
};

// LocalArrayBufferTracker tracks internalized array buffers.
Expand All @@ -87,32 +65,23 @@ class LocalArrayBufferTracker {
enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
enum FreeMode { kFreeDead, kFreeAll };

struct ProcessResult {
ProcessResult(size_t freed, size_t promoted)
: freed(freed), promoted(promoted) {}

size_t freed;
size_t promoted;
};

explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap) {}
~LocalArrayBufferTracker();

inline void Add(Key key, const Value& value);
inline Value Remove(Key key);

// Frees up array buffers determined by |free_mode|. Returns statistics in
// ProcessResult.
// Frees up array buffers determined by |free_mode|.
template <FreeMode free_mode>
ProcessResult Free();
void Free();

// Processes buffers one by one. The CallbackResult of the callback decides
// what action to take on the buffer. Returns statistics in ProcessResult.
// what action to take on the buffer.
//
// Callback should be of type:
// CallbackResult fn(JSArrayBuffer* buffer, JSArrayBuffer** new_buffer);
template <typename Callback>
ProcessResult Process(Callback callback);
void Process(Callback callback);

bool IsEmpty() { return array_buffers_.empty(); }

Expand Down
Loading

0 comments on commit fbf1bc6

Please sign in to comment.