Skip to content

Commit

Permalink
Version 4.4.63.8 (cherry-pick)
Browse files Browse the repository at this point in the history
Merged 29715d7

Reland "Keep track of array buffers in new space separately"

BUG=v8:3996
LOG=N
TBR=hpayer@chromium.org

Review URL: https://codereview.chromium.org/1188313006.

Cr-Commit-Position: refs/branch-heads/4.4@{crosswalk-project#12}
Cr-Branched-From: 2e4c550-refs/heads/4.4.63@{#1}
Cr-Branched-From: 0208b8e-refs/heads/master@{#28333}
  • Loading branch information
jeisinger committed Jun 18, 2015
1 parent d389bfb commit e757ff1
Show file tree
Hide file tree
Showing 8 changed files with 177 additions and 38 deletions.
2 changes: 1 addition & 1 deletion include/v8-version.h
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
#define V8_MAJOR_VERSION 4
#define V8_MINOR_VERSION 4
#define V8_BUILD_NUMBER 63
#define V8_PATCH_LEVEL 7
#define V8_PATCH_LEVEL 8

// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
Expand Down
3 changes: 2 additions & 1 deletion src/api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -6258,7 +6258,8 @@ v8::ArrayBuffer::Contents v8::ArrayBuffer::Externalize() {
Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize",
"ArrayBuffer already externalized");
self->set_is_external(true);
isolate->heap()->UnregisterArrayBuffer(self->backing_store());
isolate->heap()->UnregisterArrayBuffer(isolate->heap()->InNewSpace(*self),
self->backing_store());

return GetContents();
}
Expand Down
140 changes: 115 additions & 25 deletions src/heap/heap.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1561,6 +1561,8 @@ void Heap::Scavenge() {

incremental_marking()->PrepareForScavenge();

PrepareArrayBufferDiscoveryInNewSpace();

// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
new_space_.Flip();
Expand Down Expand Up @@ -1642,6 +1644,8 @@ void Heap::Scavenge() {
new_space_.LowerInlineAllocationLimit(
new_space_.inline_allocation_limit_step());

FreeDeadArrayBuffers(true);

// Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>(
(PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
Expand Down Expand Up @@ -1735,46 +1739,122 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
}


void Heap::RegisterNewArrayBuffer(void* data, size_t length) {
void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length) {
live_buffers[data] = length;
}


void Heap::UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
DCHECK(live_buffers.count(data) > 0);
live_buffers.erase(data);
not_yet_discovered_buffers.erase(data);
}


void Heap::RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
not_yet_discovered_buffers.erase(data);
}


size_t Heap::FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
size_t freed_memory = 0;
for (auto buffer = not_yet_discovered_buffers.begin();
buffer != not_yet_discovered_buffers.end(); ++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
freed_memory += buffer->second;
live_buffers.erase(buffer->first);
}
not_yet_discovered_buffers = live_buffers;
return freed_memory;
}


void Heap::TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
}
live_buffers.clear();
not_yet_discovered_buffers.clear();
}


void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
size_t length) {
if (!data) return;
live_array_buffers_[data] = length;
RegisterNewArrayBufferHelper(
in_new_space ? live_new_array_buffers_ : live_array_buffers_, data,
length);
reinterpret_cast<v8::Isolate*>(isolate_)
->AdjustAmountOfExternalAllocatedMemory(length);
}


void Heap::UnregisterArrayBuffer(void* data) {
void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
if (!data) return;
DCHECK(live_array_buffers_.count(data) > 0);
live_array_buffers_.erase(data);
not_yet_discovered_array_buffers_.erase(data);
UnregisterArrayBufferHelper(
in_new_space ? live_new_array_buffers_ : live_array_buffers_,
in_new_space ? not_yet_discovered_new_array_buffers_
: not_yet_discovered_array_buffers_,
data);
}


void Heap::RegisterLiveArrayBuffer(void* data) {
not_yet_discovered_array_buffers_.erase(data);
void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) {
// ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return;
RegisterLiveArrayBufferHelper(in_new_space
? not_yet_discovered_new_array_buffers_
: not_yet_discovered_array_buffers_,
data);
}


void Heap::FreeDeadArrayBuffers() {
for (auto buffer = not_yet_discovered_array_buffers_.begin();
buffer != not_yet_discovered_array_buffers_.end(); ++buffer) {
isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
// Don't use the API method here since this could trigger another GC.
amount_of_external_allocated_memory_ -= buffer->second;
live_array_buffers_.erase(buffer->first);
void Heap::FreeDeadArrayBuffers(bool in_new_space) {
size_t freed_memory = FreeDeadArrayBuffersHelper(
isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_,
in_new_space ? not_yet_discovered_new_array_buffers_
: not_yet_discovered_array_buffers_);
if (freed_memory) {
reinterpret_cast<v8::Isolate*>(isolate_)
->AdjustAmountOfExternalAllocatedMemory(
-static_cast<int64_t>(freed_memory));
}
not_yet_discovered_array_buffers_ = live_array_buffers_;
}


void Heap::TearDownArrayBuffers() {
for (auto buffer = live_array_buffers_.begin();
buffer != live_array_buffers_.end(); ++buffer) {
isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
}
live_array_buffers_.clear();
not_yet_discovered_array_buffers_.clear();
TearDownArrayBuffersHelper(isolate_, live_array_buffers_,
not_yet_discovered_array_buffers_);
TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_,
not_yet_discovered_new_array_buffers_);
}


void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
not_yet_discovered_new_array_buffers_ = live_new_array_buffers_;
}


void Heap::PromoteArrayBuffer(Object* obj) {
JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
if (buffer->is_external()) return;
void* data = buffer->backing_store();
if (!data) return;
// ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return;
DCHECK(live_new_array_buffers_.count(data) > 0);
live_array_buffers_[data] = live_new_array_buffers_[data];
live_new_array_buffers_.erase(data);
not_yet_discovered_new_array_buffers_.erase(data);
}


Expand Down Expand Up @@ -1986,6 +2066,7 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);

table_.Register(
kVisitNativeContext,
Expand Down Expand Up @@ -2015,9 +2096,6 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitJSWeakCollection,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);

table_.Register(kVisitJSArrayBuffer,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);

table_.Register(kVisitJSTypedArray,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);

Expand Down Expand Up @@ -2264,6 +2342,18 @@ class ScavengingVisitor : public StaticVisitorBase {
}


static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
HeapObject* object) {
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);

Heap* heap = map->GetHeap();
MapWord map_word = object->map_word();
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target);
}


static inline void EvacuateByteArray(Map* map, HeapObject** slot,
HeapObject* object) {
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
Expand Down
45 changes: 40 additions & 5 deletions src/heap/heap.h
Original file line number Diff line number Diff line change
Expand Up @@ -1512,10 +1512,28 @@ class Heap {

bool deserialization_complete() const { return deserialization_complete_; }

void RegisterNewArrayBuffer(void* data, size_t length);
void UnregisterArrayBuffer(void* data);
void RegisterLiveArrayBuffer(void* data);
void FreeDeadArrayBuffers();
// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.

// A new ArrayBuffer was created with |data| as backing store.
void RegisterNewArrayBuffer(bool in_new_space, void* data, size_t length);

// The backing store |data| is no longer owned by V8.
void UnregisterArrayBuffer(bool in_new_space, void* data);

// A live ArrayBuffer was discovered during marking/scavenge.
void RegisterLiveArrayBuffer(bool in_new_space, void* data);

// Frees all backing store pointers that weren't discovered in the previous
// marking or scavenge phase.
void FreeDeadArrayBuffers(bool in_new_space);

// Prepare for a new scavenge phase. A new marking phase is implicitly
// prepared by finishing the previous one.
void PrepareArrayBufferDiscoveryInNewSpace();

// An ArrayBuffer moved from new space to old space.
void PromoteArrayBuffer(Object* buffer);

protected:
// Methods made available to tests.
Expand Down Expand Up @@ -2034,9 +2052,24 @@ class Heap {
// the old space.
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);

// Called on heap tear-down.
// Called on heap tear-down. Frees all remaining ArrayBuffer backing stores.
void TearDownArrayBuffers();

// These correspond to the non-Helper versions.
void RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length);
void UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
void RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
size_t FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
void TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);

// Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC();
Expand Down Expand Up @@ -2184,7 +2217,9 @@ class Heap {
bool concurrent_sweeping_enabled_;

std::map<void*, size_t> live_array_buffers_;
std::map<void*, size_t> live_new_array_buffers_;
std::map<void*, size_t> not_yet_discovered_array_buffers_;
std::map<void*, size_t> not_yet_discovered_new_array_buffers_;

struct StrongRootsList;
StrongRootsList* strong_roots_list_;
Expand Down
7 changes: 6 additions & 1 deletion src/heap/mark-compact.cc
Original file line number Diff line number Diff line change
Expand Up @@ -3131,6 +3131,10 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
#endif
if (allocation.To(&target)) {
MigrateObject(target, object, object_size, old_space->identity());
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap()->PromoteArrayBuffer(target);
}
heap()->IncrementPromotedObjectsSize(object_size);
return true;
}
Expand Down Expand Up @@ -4437,7 +4441,6 @@ void MarkCompactCollector::SweepSpaces() {
#ifdef DEBUG
state_ = SWEEP_SPACES;
#endif
heap()->FreeDeadArrayBuffers();

MoveEvacuationCandidatesToEndOfPagesList();

Expand Down Expand Up @@ -4465,6 +4468,8 @@ void MarkCompactCollector::SweepSpaces() {

EvacuateNewSpaceAndCandidates();

heap()->FreeDeadArrayBuffers(false);

// ClearNonLiveReferences depends on precise sweeping of map space to
// detect whether unmarked map became dead in this collection or in one
// of the previous ones.
Expand Down
7 changes: 6 additions & 1 deletion src/heap/objects-visiting-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,10 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
heap,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) {
heap->RegisterLiveArrayBuffer(true,
JSArrayBuffer::cast(object)->backing_store());
}
return JSArrayBuffer::kSizeWithInternalFields;
}

Expand Down Expand Up @@ -532,7 +536,8 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) {
heap->RegisterLiveArrayBuffer(JSArrayBuffer::cast(object)->backing_store());
heap->RegisterLiveArrayBuffer(heap->InNewSpace(object),
JSArrayBuffer::cast(object)->backing_store());
}
}

Expand Down
5 changes: 3 additions & 2 deletions src/objects.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17100,10 +17100,11 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
void* backing_store =
isolate->array_buffer_allocator()->AllocateUninitialized(
fixed_typed_array->DataSize());
isolate->heap()->RegisterNewArrayBuffer(backing_store,
fixed_typed_array->DataSize());
buffer->set_backing_store(backing_store);
buffer->set_is_external(false);
isolate->heap()->RegisterNewArrayBuffer(isolate->heap()->InNewSpace(*buffer),
backing_store,
fixed_typed_array->DataSize());
memcpy(buffer->backing_store(),
fixed_typed_array->DataPtr(),
fixed_typed_array->DataSize());
Expand Down
6 changes: 4 additions & 2 deletions src/runtime/runtime-typedarray.cc
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ void Runtime::SetupArrayBuffer(Isolate* isolate,
array_buffer->set_byte_length(*byte_length);

if (data && !is_external) {
isolate->heap()->RegisterNewArrayBuffer(data, allocated_length);
isolate->heap()->RegisterNewArrayBuffer(
isolate->heap()->InNewSpace(*array_buffer), data, allocated_length);
}
}

Expand Down Expand Up @@ -143,7 +144,8 @@ RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) {
size_t byte_length = NumberToSize(isolate, array_buffer->byte_length());
array_buffer->set_is_external(true);
Runtime::NeuterArrayBuffer(array_buffer);
isolate->heap()->UnregisterArrayBuffer(backing_store);
isolate->heap()->UnregisterArrayBuffer(
isolate->heap()->InNewSpace(*array_buffer), backing_store);
isolate->array_buffer_allocator()->Free(backing_store, byte_length);
return isolate->heap()->undefined_value();
}
Expand Down

0 comments on commit e757ff1

Please sign in to comment.