Disable bytecode flushing once we toggle coverage mode.

Changing coverage mode generated different bytecode in some cases.
Hence it is not safe to flush bytecode once we toggle coverage mode.

Bug: chromium:1147917
Change-Id: I9e640aeaec664d3d4a4aaedf809c568e9ad924fc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2615020
Commit-Queue: Mythri Alle <mythria@chromium.org>
Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71985}
This commit is contained in:
Mythri A 2021-01-08 16:42:04 +00:00 committed by Commit Bot
parent 432c0a78e9
commit 8aa6b15fa0
6 changed files with 21 additions and 10 deletions

View File

@ -747,6 +747,10 @@ void Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) {
// generated for a function, which can interfere with lazy source positions,
// so just force source position collection whenever there's such a change.
isolate->CollectSourcePositionsForAllBytecodeArrays();
// Changing the coverage mode changes the generated bytecode and hence it is
// not safe to flush bytecode. Set a flag here, so we can disable bytecode
// flushing.
isolate->set_disable_bytecode_flushing(true);
}
switch (mode) {

View File

@ -458,6 +458,7 @@ using DebugObjectCache = std::vector<Handle<HeapObject>>;
/* Current code coverage mode */ \
V(debug::CoverageMode, code_coverage_mode, debug::CoverageMode::kBestEffort) \
V(debug::TypeProfileMode, type_profile_mode, debug::TypeProfileMode::kNone) \
V(bool, disable_bytecode_flushing, false) \
V(int, last_console_context_id, 0) \
V(v8_inspector::V8Inspector*, inspector, nullptr) \
V(bool, next_v8_call_is_safe_for_termination, false) \

View File

@ -409,7 +409,7 @@ void ConcurrentMarking::Run(JobDelegate* delegate, unsigned mark_compact_epoch,
MarkingWorklists::Local local_marking_worklists(marking_worklists_);
ConcurrentMarkingVisitor visitor(
task_id, &local_marking_worklists, weak_objects_, heap_,
mark_compact_epoch, Heap::GetBytecodeFlushMode(),
mark_compact_epoch, Heap::GetBytecodeFlushMode(heap_->isolate()),
heap_->local_embedder_heap_tracer()->InUse(), is_forced_gc,
&task_state->memory_chunk_data);
NativeContextInferrer& native_context_inferrer =

View File

@ -72,6 +72,19 @@ Address AllocationResult::ToAddress() {
return HeapObject::cast(object_).address();
}
// static
BytecodeFlushMode Heap::GetBytecodeFlushMode(Isolate* isolate) {
if (isolate->disable_bytecode_flushing()) {
return BytecodeFlushMode::kDoNotFlushBytecode;
}
if (FLAG_stress_flush_bytecode) {
return BytecodeFlushMode::kStressFlushBytecode;
} else if (FLAG_flush_bytecode) {
return BytecodeFlushMode::kFlushBytecode;
}
return BytecodeFlushMode::kDoNotFlushBytecode;
}
Isolate* Heap::isolate() {
return reinterpret_cast<Isolate*>(
reinterpret_cast<intptr_t>(this) -

View File

@ -445,14 +445,7 @@ class Heap {
// Helper function to get the bytecode flushing mode based on the flags. This
// is required because it is not safe to acess flags in concurrent marker.
static inline BytecodeFlushMode GetBytecodeFlushMode() {
if (FLAG_stress_flush_bytecode) {
return BytecodeFlushMode::kStressFlushBytecode;
} else if (FLAG_flush_bytecode) {
return BytecodeFlushMode::kFlushBytecode;
}
return BytecodeFlushMode::kDoNotFlushBytecode;
}
static inline BytecodeFlushMode GetBytecodeFlushMode(Isolate* isolate);
static uintptr_t ZapValue() {
return FLAG_clear_free_memory ? kClearedFreeMemoryValue : kZapValue;

View File

@ -499,7 +499,7 @@ void MarkCompactCollector::StartMarking() {
std::make_unique<MarkingWorklists::Local>(marking_worklists());
marking_visitor_ = std::make_unique<MarkingVisitor>(
marking_state(), local_marking_worklists(), weak_objects(), heap_,
epoch(), Heap::GetBytecodeFlushMode(),
epoch(), Heap::GetBytecodeFlushMode(heap()->isolate()),
heap_->local_embedder_heap_tracer()->InUse(),
heap_->is_current_gc_forced());
// Marking bits are cleared by the sweeper.