From 28ffc9f599a877afecde47a171cb960834296de5 Mon Sep 17 00:00:00 2001 From: Benjamin Coe Date: Thu, 29 Aug 2019 07:31:26 -0700 Subject: [PATCH] deps: V8: cherry-pick 597f885 Original commit message: [coverage] Deterministically sort collected shared function infos Prior to this CL, collected shared function infos with identical source ranges were sorted non-deterministically during coverage collection. This lead to non-deterministically incorrectly-reported coverage due to an optimization which depended on the sort order later on. With this CL, we now sort shared function infos by the source range *and* call count. Bug: v8:6000,v8:9212 Change-Id: If8bf900727591e71dbd0df621e472a4303f3a353 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1771776 Reviewed-by: Yang Guo Commit-Queue: Jakob Gruber Cr-Commit-Position: refs/heads/master@{#63411} Refs: https://github.com/v8/v8/commit/597f885eaa6189f8c8466ee9916763c679e84e9a PR-URL: https://github.com/nodejs/node/pull/29367 Reviewed-By: Rich Trott Reviewed-By: Colin Ihrig Reviewed-By: Michael Dawson Reviewed-By: Ben Noordhuis --- common.gypi | 2 +- deps/v8/src/debug/debug-coverage.cc | 124 ++++++++++++++++++---------- 2 files changed, 81 insertions(+), 45 deletions(-) diff --git a/common.gypi b/common.gypi index 6c4b04d7d06ecc..b86e5e05d7df9a 100644 --- a/common.gypi +++ b/common.gypi @@ -38,7 +38,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.15', + 'v8_embedder_string': '-node.16', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/debug/debug-coverage.cc b/deps/v8/src/debug/debug-coverage.cc index 4021cd503898ce..24aa617524d011 100644 --- a/deps/v8/src/debug/debug-coverage.cc +++ b/deps/v8/src/debug/debug-coverage.cc @@ -54,13 +54,6 @@ int StartPosition(SharedFunctionInfo info) { return start; } -bool CompareSharedFunctionInfo(SharedFunctionInfo a, SharedFunctionInfo b) { - int a_start = StartPosition(a); - int b_start = StartPosition(b); - if (a_start == b_start) return a.EndPosition() > b.EndPosition(); - return a_start < b_start; -} - bool CompareCoverageBlock(const CoverageBlock& a, const CoverageBlock& b) { DCHECK_NE(kNoSourcePosition, a.start); DCHECK_NE(kNoSourcePosition, b.start); @@ -481,32 +474,12 @@ void CollectBlockCoverage(CoverageFunction* function, SharedFunctionInfo info, // Reset all counters on the DebugInfo to zero. ResetAllBlockCounts(info); } -} // anonymous namespace - -std::unique_ptr Coverage::CollectPrecise(Isolate* isolate) { - DCHECK(!isolate->is_best_effort_code_coverage()); - std::unique_ptr result = - Collect(isolate, isolate->code_coverage_mode()); - if (!isolate->is_collecting_type_profile() && - (isolate->is_precise_binary_code_coverage() || - isolate->is_block_binary_code_coverage())) { - // We do not have to hold onto feedback vectors for invocations we already - // reported. So we can reset the list. - isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0)); - } - return result; -} - -std::unique_ptr Coverage::CollectBestEffort(Isolate* isolate) { - return Collect(isolate, v8::debug::CoverageMode::kBestEffort); -} - -std::unique_ptr Coverage::Collect( - Isolate* isolate, v8::debug::CoverageMode collectionMode) { - SharedToCounterMap counter_map; +void CollectAndMaybeResetCounts(Isolate* isolate, + SharedToCounterMap* counter_map, + v8::debug::CoverageMode coverage_mode) { const bool reset_count = - collectionMode != v8::debug::CoverageMode::kBestEffort; + coverage_mode != v8::debug::CoverageMode::kBestEffort; switch (isolate->code_coverage_mode()) { case v8::debug::CoverageMode::kBlockBinary: @@ -525,7 +498,7 @@ std::unique_ptr Coverage::Collect( DCHECK(shared.IsSubjectToDebugging()); uint32_t count = static_cast(vector.invocation_count()); if (reset_count) vector.clear_invocation_count(); - counter_map.Add(shared, count); + counter_map->Add(shared, count); } break; } @@ -533,7 +506,7 @@ std::unique_ptr Coverage::Collect( DCHECK(!isolate->factory() ->feedback_vectors_for_profiling_tools() ->IsArrayList()); - DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, collectionMode); + DCHECK_EQ(v8::debug::CoverageMode::kBestEffort, coverage_mode); HeapIterator heap_iterator(isolate->heap()); for (HeapObject current_obj = heap_iterator.next(); !current_obj.is_null(); current_obj = heap_iterator.next()) { @@ -542,8 +515,9 @@ std::unique_ptr Coverage::Collect( SharedFunctionInfo shared = func.shared(); if (!shared.IsSubjectToDebugging()) continue; if (!(func.has_feedback_vector() || - func.has_closure_feedback_cell_array())) + func.has_closure_feedback_cell_array())) { continue; + } uint32_t count = 0; if (func.has_feedback_vector()) { count = @@ -554,7 +528,7 @@ std::unique_ptr Coverage::Collect( // atleast once. We don't have precise invocation count here. count = 1; } - counter_map.Add(shared, count); + counter_map->Add(shared, count); } // Also check functions on the stack to collect the count map. With lazy @@ -563,12 +537,64 @@ std::unique_ptr Coverage::Collect( // updated (i.e. it didn't execute return / jump). for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) { SharedFunctionInfo shared = it.frame()->function().shared(); - if (counter_map.Get(shared) != 0) continue; - counter_map.Add(shared, 1); + if (counter_map->Get(shared) != 0) continue; + counter_map->Add(shared, 1); } break; } } +} + +// A {SFI, count} tuple is used to sort by source range (stored on +// the SFI) and call count (in the counter map). +struct SharedFunctionInfoAndCount { + SharedFunctionInfoAndCount(SharedFunctionInfo info, uint32_t count) + : info(info), + count(count), + start(StartPosition(info)), + end(info.EndPosition()) {} + + // Sort by: + // - start, ascending. + // - end, descending. + // - count, ascending. + bool operator<(const SharedFunctionInfoAndCount& that) const { + if (this->start != that.start) return this->start < that.start; + if (this->end != that.end) return this->end > that.end; + return this->count < that.count; + } + + SharedFunctionInfo info; + uint32_t count; + int start; + int end; +}; + +} // anonymous namespace + +std::unique_ptr Coverage::CollectPrecise(Isolate* isolate) { + DCHECK(!isolate->is_best_effort_code_coverage()); + std::unique_ptr result = + Collect(isolate, isolate->code_coverage_mode()); + if (!isolate->is_collecting_type_profile() && + (isolate->is_precise_binary_code_coverage() || + isolate->is_block_binary_code_coverage())) { + // We do not have to hold onto feedback vectors for invocations we already + // reported. So we can reset the list. + isolate->SetFeedbackVectorsForProfilingTools(*ArrayList::New(isolate, 0)); + } + return result; +} + +std::unique_ptr Coverage::CollectBestEffort(Isolate* isolate) { + return Collect(isolate, v8::debug::CoverageMode::kBestEffort); +} + +std::unique_ptr Coverage::Collect( + Isolate* isolate, v8::debug::CoverageMode collectionMode) { + // Collect call counts for all functions. + SharedToCounterMap counter_map; + CollectAndMaybeResetCounts(isolate, &counter_map, collectionMode); // Iterate shared function infos of every script and build a mapping // between source ranges and invocation counts. @@ -583,30 +609,40 @@ std::unique_ptr Coverage::Collect( result->emplace_back(script_handle); std::vector* functions = &result->back().functions; - std::vector sorted; + std::vector sorted; { // Sort functions by start position, from outer to inner functions. SharedFunctionInfo::ScriptIterator infos(isolate, *script_handle); for (SharedFunctionInfo info = infos.Next(); !info.is_null(); info = infos.Next()) { - sorted.push_back(info); + sorted.emplace_back(info, counter_map.Get(info)); } - std::sort(sorted.begin(), sorted.end(), CompareSharedFunctionInfo); + std::sort(sorted.begin(), sorted.end()); } // Stack to track nested functions, referring function by index. std::vector nesting; // Use sorted list to reconstruct function nesting. - for (SharedFunctionInfo info : sorted) { - int start = StartPosition(info); - int end = info.EndPosition(); - uint32_t count = counter_map.Get(info); + for (const SharedFunctionInfoAndCount& v : sorted) { + SharedFunctionInfo info = v.info; + int start = v.start; + int end = v.end; + uint32_t count = v.count; + // Find the correct outer function based on start position. + // + // This is not robust when considering two functions with identical source + // ranges. In this case, it is unclear which function is the inner / outer + // function. Above, we ensure that such functions are sorted in ascending + // `count` order, so at least our `parent_is_covered` optimization below + // should be fine. + // TODO(jgruber): Consider removing the optimization. while (!nesting.empty() && functions->at(nesting.back()).end <= start) { nesting.pop_back(); } + if (count != 0) { switch (collectionMode) { case v8::debug::CoverageMode::kBlockCount: