diff --git a/include/v8-version.h b/include/v8-version.h index 6bfa9c931f7..98a97318a3e 100644 --- a/include/v8-version.h +++ b/include/v8-version.h @@ -11,7 +11,7 @@ #define V8_MAJOR_VERSION 4 #define V8_MINOR_VERSION 4 #define V8_BUILD_NUMBER 63 -#define V8_PATCH_LEVEL 7 +#define V8_PATCH_LEVEL 8 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/src/api.cc b/src/api.cc index daaaf765e23..01441351c5a 100644 --- a/src/api.cc +++ b/src/api.cc @@ -6258,7 +6258,8 @@ v8::ArrayBuffer::Contents v8::ArrayBuffer::Externalize() { Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize", "ArrayBuffer already externalized"); self->set_is_external(true); - isolate->heap()->UnregisterArrayBuffer(self->backing_store()); + isolate->heap()->UnregisterArrayBuffer(isolate->heap()->InNewSpace(*self), + self->backing_store()); return GetContents(); } diff --git a/src/heap/heap.cc b/src/heap/heap.cc index b15d75ad684..b9d5e3bf38e 100644 --- a/src/heap/heap.cc +++ b/src/heap/heap.cc @@ -1561,6 +1561,8 @@ void Heap::Scavenge() { incremental_marking()->PrepareForScavenge(); + PrepareArrayBufferDiscoveryInNewSpace(); + // Flip the semispaces. After flipping, to space is empty, from space has // live objects. new_space_.Flip(); @@ -1642,6 +1644,8 @@ void Heap::Scavenge() { new_space_.LowerInlineAllocationLimit( new_space_.inline_allocation_limit_step()); + FreeDeadArrayBuffers(true); + // Update how much has survived scavenge. IncrementYoungSurvivorsCounter(static_cast( (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); @@ -1735,46 +1739,122 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { } -void Heap::RegisterNewArrayBuffer(void* data, size_t length) { +void Heap::RegisterNewArrayBufferHelper(std::map& live_buffers, + void* data, size_t length) { + live_buffers[data] = length; +} + + +void Heap::UnregisterArrayBufferHelper( + std::map& live_buffers, + std::map& not_yet_discovered_buffers, void* data) { + DCHECK(live_buffers.count(data) > 0); + live_buffers.erase(data); + not_yet_discovered_buffers.erase(data); +} + + +void Heap::RegisterLiveArrayBufferHelper( + std::map& not_yet_discovered_buffers, void* data) { + not_yet_discovered_buffers.erase(data); +} + + +size_t Heap::FreeDeadArrayBuffersHelper( + Isolate* isolate, std::map& live_buffers, + std::map& not_yet_discovered_buffers) { + size_t freed_memory = 0; + for (auto buffer = not_yet_discovered_buffers.begin(); + buffer != not_yet_discovered_buffers.end(); ++buffer) { + isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); + freed_memory += buffer->second; + live_buffers.erase(buffer->first); + } + not_yet_discovered_buffers = live_buffers; + return freed_memory; +} + + +void Heap::TearDownArrayBuffersHelper( + Isolate* isolate, std::map& live_buffers, + std::map& not_yet_discovered_buffers) { + for (auto buffer = live_buffers.begin(); buffer != live_buffers.end(); + ++buffer) { + isolate->array_buffer_allocator()->Free(buffer->first, buffer->second); + } + live_buffers.clear(); + not_yet_discovered_buffers.clear(); +} + + +void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data, + size_t length) { if (!data) return; - live_array_buffers_[data] = length; + RegisterNewArrayBufferHelper( + in_new_space ? live_new_array_buffers_ : live_array_buffers_, data, + length); reinterpret_cast(isolate_) ->AdjustAmountOfExternalAllocatedMemory(length); } -void Heap::UnregisterArrayBuffer(void* data) { +void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { if (!data) return; - DCHECK(live_array_buffers_.count(data) > 0); - live_array_buffers_.erase(data); - not_yet_discovered_array_buffers_.erase(data); + UnregisterArrayBufferHelper( + in_new_space ? live_new_array_buffers_ : live_array_buffers_, + in_new_space ? not_yet_discovered_new_array_buffers_ + : not_yet_discovered_array_buffers_, + data); } -void Heap::RegisterLiveArrayBuffer(void* data) { - not_yet_discovered_array_buffers_.erase(data); +void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) { + // ArrayBuffer might be in the middle of being constructed. + if (data == undefined_value()) return; + RegisterLiveArrayBufferHelper(in_new_space + ? not_yet_discovered_new_array_buffers_ + : not_yet_discovered_array_buffers_, + data); } -void Heap::FreeDeadArrayBuffers() { - for (auto buffer = not_yet_discovered_array_buffers_.begin(); - buffer != not_yet_discovered_array_buffers_.end(); ++buffer) { - isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); - // Don't use the API method here since this could trigger another GC. - amount_of_external_allocated_memory_ -= buffer->second; - live_array_buffers_.erase(buffer->first); +void Heap::FreeDeadArrayBuffers(bool in_new_space) { + size_t freed_memory = FreeDeadArrayBuffersHelper( + isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_, + in_new_space ? not_yet_discovered_new_array_buffers_ + : not_yet_discovered_array_buffers_); + if (freed_memory) { + reinterpret_cast(isolate_) + ->AdjustAmountOfExternalAllocatedMemory( + -static_cast(freed_memory)); } - not_yet_discovered_array_buffers_ = live_array_buffers_; } void Heap::TearDownArrayBuffers() { - for (auto buffer = live_array_buffers_.begin(); - buffer != live_array_buffers_.end(); ++buffer) { - isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); - } - live_array_buffers_.clear(); - not_yet_discovered_array_buffers_.clear(); + TearDownArrayBuffersHelper(isolate_, live_array_buffers_, + not_yet_discovered_array_buffers_); + TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_, + not_yet_discovered_new_array_buffers_); +} + + +void Heap::PrepareArrayBufferDiscoveryInNewSpace() { + not_yet_discovered_new_array_buffers_ = live_new_array_buffers_; +} + + +void Heap::PromoteArrayBuffer(Object* obj) { + JSArrayBuffer* buffer = JSArrayBuffer::cast(obj); + if (buffer->is_external()) return; + void* data = buffer->backing_store(); + if (!data) return; + // ArrayBuffer might be in the middle of being constructed. + if (data == undefined_value()) return; + DCHECK(live_new_array_buffers_.count(data) > 0); + live_array_buffers_[data] = live_new_array_buffers_[data]; + live_new_array_buffers_.erase(data); + not_yet_discovered_new_array_buffers_.erase(data); } @@ -1986,6 +2066,7 @@ class ScavengingVisitor : public StaticVisitorBase { table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); + table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer); table_.Register( kVisitNativeContext, @@ -2015,9 +2096,6 @@ class ScavengingVisitor : public StaticVisitorBase { table_.Register(kVisitJSWeakCollection, &ObjectEvacuationStrategy::Visit); - table_.Register(kVisitJSArrayBuffer, - &ObjectEvacuationStrategy::Visit); - table_.Register(kVisitJSTypedArray, &ObjectEvacuationStrategy::Visit); @@ -2264,6 +2342,18 @@ class ScavengingVisitor : public StaticVisitorBase { } + static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot, + HeapObject* object) { + ObjectEvacuationStrategy::Visit(map, slot, object); + + Heap* heap = map->GetHeap(); + MapWord map_word = object->map_word(); + DCHECK(map_word.IsForwardingAddress()); + HeapObject* target = map_word.ToForwardingAddress(); + if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target); + } + + static inline void EvacuateByteArray(Map* map, HeapObject** slot, HeapObject* object) { int object_size = reinterpret_cast(object)->ByteArraySize(); diff --git a/src/heap/heap.h b/src/heap/heap.h index ff6a11ee687..42a0886acf9 100644 --- a/src/heap/heap.h +++ b/src/heap/heap.h @@ -1512,10 +1512,28 @@ class Heap { bool deserialization_complete() const { return deserialization_complete_; } - void RegisterNewArrayBuffer(void* data, size_t length); - void UnregisterArrayBuffer(void* data); - void RegisterLiveArrayBuffer(void* data); - void FreeDeadArrayBuffers(); + // The following methods are used to track raw C++ pointers to externally + // allocated memory used as backing store in live array buffers. + + // A new ArrayBuffer was created with |data| as backing store. + void RegisterNewArrayBuffer(bool in_new_space, void* data, size_t length); + + // The backing store |data| is no longer owned by V8. + void UnregisterArrayBuffer(bool in_new_space, void* data); + + // A live ArrayBuffer was discovered during marking/scavenge. + void RegisterLiveArrayBuffer(bool in_new_space, void* data); + + // Frees all backing store pointers that weren't discovered in the previous + // marking or scavenge phase. + void FreeDeadArrayBuffers(bool in_new_space); + + // Prepare for a new scavenge phase. A new marking phase is implicitly + // prepared by finishing the previous one. + void PrepareArrayBufferDiscoveryInNewSpace(); + + // An ArrayBuffer moved from new space to old space. + void PromoteArrayBuffer(Object* buffer); protected: // Methods made available to tests. @@ -2034,9 +2052,24 @@ class Heap { // the old space. void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); - // Called on heap tear-down. + // Called on heap tear-down. Frees all remaining ArrayBuffer backing stores. void TearDownArrayBuffers(); + // These correspond to the non-Helper versions. + void RegisterNewArrayBufferHelper(std::map& live_buffers, + void* data, size_t length); + void UnregisterArrayBufferHelper( + std::map& live_buffers, + std::map& not_yet_discovered_buffers, void* data); + void RegisterLiveArrayBufferHelper( + std::map& not_yet_discovered_buffers, void* data); + size_t FreeDeadArrayBuffersHelper( + Isolate* isolate, std::map& live_buffers, + std::map& not_yet_discovered_buffers); + void TearDownArrayBuffersHelper( + Isolate* isolate, std::map& live_buffers, + std::map& not_yet_discovered_buffers); + // Record statistics before and after garbage collection. void ReportStatisticsBeforeGC(); void ReportStatisticsAfterGC(); @@ -2184,7 +2217,9 @@ class Heap { bool concurrent_sweeping_enabled_; std::map live_array_buffers_; + std::map live_new_array_buffers_; std::map not_yet_discovered_array_buffers_; + std::map not_yet_discovered_new_array_buffers_; struct StrongRootsList; StrongRootsList* strong_roots_list_; diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc index be9938b4a1b..5b29c2175bc 100644 --- a/src/heap/mark-compact.cc +++ b/src/heap/mark-compact.cc @@ -3131,6 +3131,10 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object, #endif if (allocation.To(&target)) { MigrateObject(target, object, object_size, old_space->identity()); + // If we end up needing more special cases, we should factor this out. + if (V8_UNLIKELY(target->IsJSArrayBuffer())) { + heap()->PromoteArrayBuffer(target); + } heap()->IncrementPromotedObjectsSize(object_size); return true; } @@ -4437,7 +4441,6 @@ void MarkCompactCollector::SweepSpaces() { #ifdef DEBUG state_ = SWEEP_SPACES; #endif - heap()->FreeDeadArrayBuffers(); MoveEvacuationCandidatesToEndOfPagesList(); @@ -4465,6 +4468,8 @@ void MarkCompactCollector::SweepSpaces() { EvacuateNewSpaceAndCandidates(); + heap()->FreeDeadArrayBuffers(false); + // ClearNonLiveReferences depends on precise sweeping of map space to // detect whether unmarked map became dead in this collection or in one // of the previous ones. diff --git a/src/heap/objects-visiting-inl.h b/src/heap/objects-visiting-inl.h index 6afee2602b2..62be04ff349 100644 --- a/src/heap/objects-visiting-inl.h +++ b/src/heap/objects-visiting-inl.h @@ -84,6 +84,10 @@ int StaticNewSpaceVisitor::VisitJSArrayBuffer( heap, HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); + if (!JSArrayBuffer::cast(object)->is_external()) { + heap->RegisterLiveArrayBuffer(true, + JSArrayBuffer::cast(object)->backing_store()); + } return JSArrayBuffer::kSizeWithInternalFields; } @@ -532,7 +536,8 @@ void StaticMarkingVisitor::VisitJSArrayBuffer( HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); if (!JSArrayBuffer::cast(object)->is_external()) { - heap->RegisterLiveArrayBuffer(JSArrayBuffer::cast(object)->backing_store()); + heap->RegisterLiveArrayBuffer(heap->InNewSpace(object), + JSArrayBuffer::cast(object)->backing_store()); } } diff --git a/src/objects.cc b/src/objects.cc index 67e32a63ac4..8c14d057d2e 100644 --- a/src/objects.cc +++ b/src/objects.cc @@ -17100,10 +17100,11 @@ Handle JSTypedArray::MaterializeArrayBuffer( void* backing_store = isolate->array_buffer_allocator()->AllocateUninitialized( fixed_typed_array->DataSize()); - isolate->heap()->RegisterNewArrayBuffer(backing_store, - fixed_typed_array->DataSize()); buffer->set_backing_store(backing_store); buffer->set_is_external(false); + isolate->heap()->RegisterNewArrayBuffer(isolate->heap()->InNewSpace(*buffer), + backing_store, + fixed_typed_array->DataSize()); memcpy(buffer->backing_store(), fixed_typed_array->DataPtr(), fixed_typed_array->DataSize()); diff --git a/src/runtime/runtime-typedarray.cc b/src/runtime/runtime-typedarray.cc index dfc1bab8d3d..10aba0cc424 100644 --- a/src/runtime/runtime-typedarray.cc +++ b/src/runtime/runtime-typedarray.cc @@ -33,7 +33,8 @@ void Runtime::SetupArrayBuffer(Isolate* isolate, array_buffer->set_byte_length(*byte_length); if (data && !is_external) { - isolate->heap()->RegisterNewArrayBuffer(data, allocated_length); + isolate->heap()->RegisterNewArrayBuffer( + isolate->heap()->InNewSpace(*array_buffer), data, allocated_length); } } @@ -143,7 +144,8 @@ RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) { size_t byte_length = NumberToSize(isolate, array_buffer->byte_length()); array_buffer->set_is_external(true); Runtime::NeuterArrayBuffer(array_buffer); - isolate->heap()->UnregisterArrayBuffer(backing_store); + isolate->heap()->UnregisterArrayBuffer( + isolate->heap()->InNewSpace(*array_buffer), backing_store); isolate->array_buffer_allocator()->Free(backing_store, byte_length); return isolate->heap()->undefined_value(); }