Revert "Disable bytecode flushing once we toggle coverage mode."
This reverts commit 8aa6b15fa0
.
Reason for revert: broke TSAN https://ci.chromium.org/ui/p/v8/builders/ci/V8%20Linux64%20TSAN%20-%20stress-incremental-marking/1497/overview
Original change's description:
> Disable bytecode flushing once we toggle coverage mode.
>
> Changing coverage mode generated different bytecode in some cases.
> Hence it is not safe to flush bytecode once we toggle coverage mode.
>
> Bug: chromium:1147917
> Change-Id: I9e640aeaec664d3d4a4aaedf809c568e9ad924fc
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2615020
> Commit-Queue: Mythri Alle <mythria@chromium.org>
> Reviewed-by: Ross McIlroy <rmcilroy@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#71985}
TBR=rmcilroy@chromium.org,mythria@chromium.org
Change-Id: Id4c95da337e291437b7856e2fe7004e1e6405515
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: chromium:1147917
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2619402
Reviewed-by: Sathya Gunasekaran <gsathya@chromium.org>
Commit-Queue: Sathya Gunasekaran <gsathya@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71989}
This commit is contained in:
parent
b3330e9502
commit
61b2335a9e
@ -747,10 +747,6 @@ void Coverage::SelectMode(Isolate* isolate, debug::CoverageMode mode) {
|
||||
// generated for a function, which can interfere with lazy source positions,
|
||||
// so just force source position collection whenever there's such a change.
|
||||
isolate->CollectSourcePositionsForAllBytecodeArrays();
|
||||
// Changing the coverage mode changes the generated bytecode and hence it is
|
||||
// not safe to flush bytecode. Set a flag here, so we can disable bytecode
|
||||
// flushing.
|
||||
isolate->set_disable_bytecode_flushing(true);
|
||||
}
|
||||
|
||||
switch (mode) {
|
||||
|
@ -458,7 +458,6 @@ using DebugObjectCache = std::vector<Handle<HeapObject>>;
|
||||
/* Current code coverage mode */ \
|
||||
V(debug::CoverageMode, code_coverage_mode, debug::CoverageMode::kBestEffort) \
|
||||
V(debug::TypeProfileMode, type_profile_mode, debug::TypeProfileMode::kNone) \
|
||||
V(bool, disable_bytecode_flushing, false) \
|
||||
V(int, last_console_context_id, 0) \
|
||||
V(v8_inspector::V8Inspector*, inspector, nullptr) \
|
||||
V(bool, next_v8_call_is_safe_for_termination, false) \
|
||||
|
@ -409,7 +409,7 @@ void ConcurrentMarking::Run(JobDelegate* delegate, unsigned mark_compact_epoch,
|
||||
MarkingWorklists::Local local_marking_worklists(marking_worklists_);
|
||||
ConcurrentMarkingVisitor visitor(
|
||||
task_id, &local_marking_worklists, weak_objects_, heap_,
|
||||
mark_compact_epoch, Heap::GetBytecodeFlushMode(heap_->isolate()),
|
||||
mark_compact_epoch, Heap::GetBytecodeFlushMode(),
|
||||
heap_->local_embedder_heap_tracer()->InUse(), is_forced_gc,
|
||||
&task_state->memory_chunk_data);
|
||||
NativeContextInferrer& native_context_inferrer =
|
||||
|
@ -72,19 +72,6 @@ Address AllocationResult::ToAddress() {
|
||||
return HeapObject::cast(object_).address();
|
||||
}
|
||||
|
||||
// static
|
||||
BytecodeFlushMode Heap::GetBytecodeFlushMode(Isolate* isolate) {
|
||||
if (isolate->disable_bytecode_flushing()) {
|
||||
return BytecodeFlushMode::kDoNotFlushBytecode;
|
||||
}
|
||||
if (FLAG_stress_flush_bytecode) {
|
||||
return BytecodeFlushMode::kStressFlushBytecode;
|
||||
} else if (FLAG_flush_bytecode) {
|
||||
return BytecodeFlushMode::kFlushBytecode;
|
||||
}
|
||||
return BytecodeFlushMode::kDoNotFlushBytecode;
|
||||
}
|
||||
|
||||
Isolate* Heap::isolate() {
|
||||
return reinterpret_cast<Isolate*>(
|
||||
reinterpret_cast<intptr_t>(this) -
|
||||
|
@ -445,7 +445,14 @@ class Heap {
|
||||
|
||||
// Helper function to get the bytecode flushing mode based on the flags. This
|
||||
// is required because it is not safe to acess flags in concurrent marker.
|
||||
static inline BytecodeFlushMode GetBytecodeFlushMode(Isolate* isolate);
|
||||
static inline BytecodeFlushMode GetBytecodeFlushMode() {
|
||||
if (FLAG_stress_flush_bytecode) {
|
||||
return BytecodeFlushMode::kStressFlushBytecode;
|
||||
} else if (FLAG_flush_bytecode) {
|
||||
return BytecodeFlushMode::kFlushBytecode;
|
||||
}
|
||||
return BytecodeFlushMode::kDoNotFlushBytecode;
|
||||
}
|
||||
|
||||
static uintptr_t ZapValue() {
|
||||
return FLAG_clear_free_memory ? kClearedFreeMemoryValue : kZapValue;
|
||||
|
@ -499,7 +499,7 @@ void MarkCompactCollector::StartMarking() {
|
||||
std::make_unique<MarkingWorklists::Local>(marking_worklists());
|
||||
marking_visitor_ = std::make_unique<MarkingVisitor>(
|
||||
marking_state(), local_marking_worklists(), weak_objects(), heap_,
|
||||
epoch(), Heap::GetBytecodeFlushMode(heap()->isolate()),
|
||||
epoch(), Heap::GetBytecodeFlushMode(),
|
||||
heap_->local_embedder_heap_tracer()->InUse(),
|
||||
heap_->is_current_gc_forced());
|
||||
// Marking bits are cleared by the sweeper.
|
||||
|
Loading…
Reference in New Issue
Block a user