[heap] Forces CodeSpaceMemoryModificationScope only in safepoints

CodeSpaceMemoryModificationScope should only be used by the main
thread and during a safepoint. This adds a check in
CodeSpaceMemoryModificationScope.

The reason for this is that CodeSpaceMemoryModificationScope is not
thread-safe. It assumes that no other thread is modifying code space
(either by setting memory permission or adding a new page).

This CL also replaces CodeSpaceMemoryModificationScope to
CodePageCollectionMemoryModificationScope in a few occurrences, where
the former is not needed. This should not hurt performance.

Bug: v8:12054
Change-Id: I2675e667782c6ad8410877a4e64374899066bcd1
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3263890
Commit-Queue: Victor Gomes <victorgomes@chromium.org>
Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Andreas Haas <ahaas@chromium.org>
Cr-Commit-Position: refs/heads/main@{#77732}
This commit is contained in:
Victor Gomes 2021-11-05 12:50:09 +01:00 committed by V8 LUCI CQ
parent f31fb295e5
commit 5bb577eaf3
8 changed files with 61 additions and 58 deletions

View File

@ -223,7 +223,7 @@ void SetupIsolateDelegate::ReplacePlaceholders(Isolate* isolate) {
// Replace references from all builtin code objects to placeholders. // Replace references from all builtin code objects to placeholders.
Builtins* builtins = isolate->builtins(); Builtins* builtins = isolate->builtins();
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
CodeSpaceMemoryModificationScope modification_scope(isolate->heap()); CodePageCollectionMemoryModificationScope modification_scope(isolate->heap());
static const int kRelocMask = static const int kRelocMask =
RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
RelocInfo::ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) | RelocInfo::ModeMask(RelocInfo::FULL_EMBEDDED_OBJECT) |
@ -233,6 +233,8 @@ void SetupIsolateDelegate::ReplacePlaceholders(Isolate* isolate) {
for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast; for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
++builtin) { ++builtin) {
Code code = builtins->code(builtin); Code code = builtins->code(builtin);
isolate->heap()->UnprotectAndRegisterMemoryChunk(
code, UnprotectMemoryOrigin::kMainThread);
bool flush_icache = false; bool flush_icache = false;
for (RelocIterator it(code, kRelocMask); !it.done(); it.next()) { for (RelocIterator it(code, kRelocMask); !it.done(); it.next()) {
RelocInfo* rinfo = it.rinfo(); RelocInfo* rinfo = it.rinfo();

View File

@ -3831,7 +3831,7 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
// If we are deserializing, read the state into the now-empty heap. // If we are deserializing, read the state into the now-empty heap.
{ {
CodeSpaceMemoryModificationScope modification_scope(heap()); CodePageCollectionMemoryModificationScope modification_scope(heap());
if (create_heap_objects) { if (create_heap_objects) {
heap_.read_only_space()->ClearStringPaddingIfNeeded(); heap_.read_only_space()->ClearStringPaddingIfNeeded();

View File

@ -30,6 +30,7 @@
#include "src/heap/paged-spaces-inl.h" #include "src/heap/paged-spaces-inl.h"
#include "src/heap/read-only-heap.h" #include "src/heap/read-only-heap.h"
#include "src/heap/read-only-spaces.h" #include "src/heap/read-only-spaces.h"
#include "src/heap/safepoint.h"
#include "src/heap/spaces-inl.h" #include "src/heap/spaces-inl.h"
#include "src/heap/third-party/heap-api.h" #include "src/heap/third-party/heap-api.h"
#include "src/objects/allocation-site-inl.h" #include "src/objects/allocation-site-inl.h"
@ -791,6 +792,8 @@ AlwaysAllocateScopeForTesting::AlwaysAllocateScopeForTesting(Heap* heap)
CodeSpaceMemoryModificationScope::CodeSpaceMemoryModificationScope(Heap* heap) CodeSpaceMemoryModificationScope::CodeSpaceMemoryModificationScope(Heap* heap)
: heap_(heap) { : heap_(heap) {
DCHECK_EQ(ThreadId::Current(), heap_->isolate()->thread_id());
heap_->safepoint()->AssertActive();
if (heap_->write_protect_code_memory()) { if (heap_->write_protect_code_memory()) {
heap_->increment_code_space_memory_modification_scope_depth(); heap_->increment_code_space_memory_modification_scope_depth();
heap_->code_space()->SetCodeModificationPermissions(); heap_->code_space()->SetCodeModificationPermissions();

View File

@ -5420,7 +5420,7 @@ void Heap::DisableInlineAllocation() {
// Update inline allocation limit for old spaces. // Update inline allocation limit for old spaces.
PagedSpaceIterator spaces(this); PagedSpaceIterator spaces(this);
CodeSpaceMemoryModificationScope modification_scope(this); CodePageCollectionMemoryModificationScope modification_scope(this);
for (PagedSpace* space = spaces.Next(); space != nullptr; for (PagedSpace* space = spaces.Next(); space != nullptr;
space = spaces.Next()) { space = spaces.Next()) {
base::MutexGuard guard(space->mutex()); base::MutexGuard guard(space->mutex());

View File

@ -17,7 +17,7 @@
#include "src/base/utils/random-number-generator.h" #include "src/base/utils/random-number-generator.h"
#include "src/compiler/wasm-compiler.h" #include "src/compiler/wasm-compiler.h"
#include "src/handles/global-handles-inl.h" #include "src/handles/global-handles-inl.h"
#include "src/heap/heap-inl.h" // For CodeSpaceMemoryModificationScope. #include "src/heap/heap-inl.h" // For CodePageCollectionMemoryModificationScope.
#include "src/logging/counters-scopes.h" #include "src/logging/counters-scopes.h"
#include "src/logging/metrics.h" #include "src/logging/metrics.h"
#include "src/objects/property-descriptor.h" #include "src/objects/property-descriptor.h"
@ -3360,12 +3360,13 @@ void CompilationStateImpl::FinalizeJSToWasmWrappers(
*export_wrappers_out = isolate->factory()->NewFixedArray( *export_wrappers_out = isolate->factory()->NewFixedArray(
MaxNumExportWrappers(module), AllocationType::kOld); MaxNumExportWrappers(module), AllocationType::kOld);
// TODO(6792): Wrappers below are allocated with {Factory::NewCode}. As an // TODO(6792): Wrappers below are allocated with {Factory::NewCode}. As an
// optimization we keep the code space unlocked to avoid repeated unlocking // optimization we create a code memory modification scope that avoids
// because many such wrapper are allocated in sequence below. // changing the page permissions back-and-forth between RWX and RX, because
// many such wrapper are allocated in sequence below.
TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("v8.wasm.detailed"), TRACE_EVENT1(TRACE_DISABLED_BY_DEFAULT("v8.wasm.detailed"),
"wasm.FinalizeJSToWasmWrappers", "wrappers", "wasm.FinalizeJSToWasmWrappers", "wrappers",
js_to_wasm_wrapper_units_.size()); js_to_wasm_wrapper_units_.size());
CodeSpaceMemoryModificationScope modification_scope(isolate->heap()); CodePageCollectionMemoryModificationScope modification_scope(isolate->heap());
for (auto& unit : js_to_wasm_wrapper_units_) { for (auto& unit : js_to_wasm_wrapper_units_) {
DCHECK_EQ(isolate, unit->isolate()); DCHECK_EQ(isolate, unit->isolate());
Handle<Code> code = unit->Finalize(); Handle<Code> code = unit->Finalize();
@ -3798,9 +3799,10 @@ void CompileJsToWasmWrappers(Isolate* isolate, const WasmModule* module,
// Finalize compilation jobs in the main thread. // Finalize compilation jobs in the main thread.
// TODO(6792): Wrappers below are allocated with {Factory::NewCode}. As an // TODO(6792): Wrappers below are allocated with {Factory::NewCode}. As an
// optimization we keep the code space unlocked to avoid repeated unlocking // optimization we create a code memory modification scope that avoids
// because many such wrapper are allocated in sequence below. // changing the page permissions back-and-forth between RWX and RX, because
CodeSpaceMemoryModificationScope modification_scope(isolate->heap()); // many such wrapper are allocated in sequence below.
CodePageCollectionMemoryModificationScope modification_scope(isolate->heap());
for (auto& pair : compilation_units) { for (auto& pair : compilation_units) {
JSToWasmWrapperKey key = pair.first; JSToWasmWrapperKey key = pair.first;
JSToWasmWrapperCompilationUnit* unit = pair.second.get(); JSToWasmWrapperCompilationUnit* unit = pair.second.get();

View File

@ -198,7 +198,7 @@ void SimulateFullSpace(v8::internal::PagedSpace* space) {
// FLAG_stress_concurrent_allocation = false; // FLAG_stress_concurrent_allocation = false;
// Background thread allocating concurrently interferes with this function. // Background thread allocating concurrently interferes with this function.
CHECK(!FLAG_stress_concurrent_allocation); CHECK(!FLAG_stress_concurrent_allocation);
CodeSpaceMemoryModificationScope modification_scope(space->heap()); CodePageCollectionMemoryModificationScope modification_scope(space->heap());
i::MarkCompactCollector* collector = space->heap()->mark_compact_collector(); i::MarkCompactCollector* collector = space->heap()->mark_compact_collector();
if (collector->sweeping_in_progress()) { if (collector->sweeping_in_progress()) {
collector->EnsureSweepingCompleted(); collector->EnsureSweepingCompleted();

View File

@ -478,9 +478,10 @@ UNINITIALIZED_TEST(ConcurrentRecordRelocSlot) {
v8::Isolate* isolate = v8::Isolate::New(create_params); v8::Isolate* isolate = v8::Isolate::New(create_params);
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
Heap* heap = i_isolate->heap(); Heap* heap = i_isolate->heap();
{
Code code; Code code;
HeapObject value; HeapObject value;
CodePageCollectionMemoryModificationScope modification_scope(heap);
{ {
HandleScope handle_scope(i_isolate); HandleScope handle_scope(i_isolate);
i::byte buffer[i::Assembler::kDefaultBufferSize]; i::byte buffer[i::Assembler::kDefaultBufferSize];
@ -511,7 +512,6 @@ UNINITIALIZED_TEST(ConcurrentRecordRelocSlot) {
CHECK(heap->incremental_marking()->marking_state()->IsWhite(value)); CHECK(heap->incremental_marking()->marking_state()->IsWhite(value));
{ {
CodeSpaceMemoryModificationScope modification_scope(heap);
auto thread = auto thread =
std::make_unique<ConcurrentRecordRelocSlotThread>(heap, code, value); std::make_unique<ConcurrentRecordRelocSlotThread>(heap, code, value);
CHECK(thread->Start()); CHECK(thread->Start());
@ -521,7 +521,7 @@ UNINITIALIZED_TEST(ConcurrentRecordRelocSlot) {
CHECK(heap->incremental_marking()->marking_state()->IsBlackOrGrey(value)); CHECK(heap->incremental_marking()->marking_state()->IsBlackOrGrey(value));
heap::InvokeMarkSweep(i_isolate); heap::InvokeMarkSweep(i_isolate);
}
isolate->Dispose(); isolate->Dispose();
} }

View File

@ -224,12 +224,7 @@ HEAP_TEST(TestNewSpaceRefsInCopiedCode) {
masm.GetCode(isolate, &desc); masm.GetCode(isolate, &desc);
Handle<Code> code = Handle<Code> code =
Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build(); Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build();
Handle<Code> copy = factory->CopyCode(code);
Handle<Code> copy;
{
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
copy = factory->CopyCode(code);
}
CheckEmbeddedObjectsAreEqual(isolate, code, copy); CheckEmbeddedObjectsAreEqual(isolate, code, copy);
CcTest::CollectAllAvailableGarbage(); CcTest::CollectAllAvailableGarbage();
@ -7289,9 +7284,10 @@ TEST(Regress10900) {
Handle<Code> code = Handle<Code> code =
Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build(); Factory::CodeBuilder(isolate, desc, CodeKind::FOR_TESTING).Build();
{ {
// Generate multiple code pages.
CodeSpaceMemoryModificationScope modification_scope(isolate->heap());
for (int i = 0; i < 100; i++) { for (int i = 0; i < 100; i++) {
// Generate multiple code pages.
CodePageCollectionMemoryModificationScope modification_scope(
isolate->heap());
factory->CopyCode(code); factory->CopyCode(code);
} }
} }