[heap] Make stack thread-local and introduce stack markers
This CL makes the object keeping stack information thread-local, moving it from Heap to ThreadLocalTop. In this way, stack scanning will work correctly when switching between threads, e.g., using v8::Locker. It also introduces a mechanism for setting a stack marker, to be used for scanning only the part of stack between its start and the marker (instead of the current stack top). Bug: v8:13257 Change-Id: I01091f5f49d9a8143d50aeef53789a98bdb29048 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3960991 Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Omer Katz <omerkatz@chromium.org> Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org> Cr-Commit-Position: refs/heads/main@{#83848}
This commit is contained in:
parent
724e7ce174
commit
8c7c087812
@ -39,6 +39,7 @@ load(":bazel/v8-non-pointer-compression.bzl", "v8_binary_non_pointer_compression
|
||||
# v8_enable_trace_baseline_exec
|
||||
# v8_enable_trace_feedback_updates
|
||||
# v8_enable_atomic_object_field_writes
|
||||
# v8_enable_conservative_stack_scanning
|
||||
# v8_enable_concurrent_marking
|
||||
# v8_enable_ignition_dispatch_counting
|
||||
# v8_enable_builtins_profiling
|
||||
|
2
BUILD.gn
2
BUILD.gn
@ -2271,6 +2271,8 @@ action("v8_dump_build_config") {
|
||||
"v8_current_cpu=\"$v8_current_cpu\"",
|
||||
"v8_enable_atomic_object_field_writes=" +
|
||||
"$v8_enable_atomic_object_field_writes",
|
||||
"v8_enable_conservative_stack_scanning=" +
|
||||
"$v8_enable_conservative_stack_scanning",
|
||||
"v8_enable_concurrent_marking=$v8_enable_concurrent_marking",
|
||||
"v8_enable_single_generation=$v8_enable_single_generation",
|
||||
"v8_enable_i18n_support=$v8_enable_i18n_support",
|
||||
|
@ -518,6 +518,7 @@ def build_config_content(cpu, icu):
|
||||
("v8_current_cpu", cpu),
|
||||
("v8_dict_property_const_tracking", "false"),
|
||||
("v8_enable_atomic_object_field_writes", "false"),
|
||||
("v8_enable_conservative_stack_scanning", "false"),
|
||||
("v8_enable_concurrent_marking", "false"),
|
||||
("v8_enable_i18n_support", icu),
|
||||
("v8_enable_verify_predictable", "false"),
|
||||
|
@ -3363,6 +3363,9 @@ void Isolate::Delete(Isolate* isolate) {
|
||||
Isolate* saved_isolate = isolate->TryGetCurrent();
|
||||
SetIsolateThreadLocals(isolate, nullptr);
|
||||
isolate->set_thread_id(ThreadId::Current());
|
||||
isolate->thread_local_top()->stack_ =
|
||||
saved_isolate ? saved_isolate->thread_local_top()->stack_
|
||||
: ::heap::base::Stack(base::Stack::GetStackStart());
|
||||
|
||||
bool owns_shared_isolate = isolate->owns_shared_isolate_;
|
||||
Isolate* maybe_shared_isolate = isolate->shared_isolate_;
|
||||
|
@ -37,12 +37,14 @@ void ThreadLocalTop::Clear() {
|
||||
current_embedder_state_ = nullptr;
|
||||
failed_access_check_callback_ = nullptr;
|
||||
thread_in_wasm_flag_address_ = kNullAddress;
|
||||
stack_ = ::heap::base::Stack();
|
||||
}
|
||||
|
||||
void ThreadLocalTop::Initialize(Isolate* isolate) {
|
||||
Clear();
|
||||
isolate_ = isolate;
|
||||
thread_id_ = ThreadId::Current();
|
||||
stack_.SetStackStart(base::Stack::GetStackStart());
|
||||
#if V8_ENABLE_WEBASSEMBLY
|
||||
thread_in_wasm_flag_address_ = reinterpret_cast<Address>(
|
||||
trap_handler::GetThreadInWasmThreadLocalAddress());
|
||||
|
@ -10,6 +10,7 @@
|
||||
#include "include/v8-unwinder.h"
|
||||
#include "src/common/globals.h"
|
||||
#include "src/execution/thread-id.h"
|
||||
#include "src/heap/base/stack.h"
|
||||
#include "src/objects/contexts.h"
|
||||
#include "src/utils/utils.h"
|
||||
|
||||
@ -29,7 +30,7 @@ class ThreadLocalTop {
|
||||
// TODO(all): This is not particularly beautiful. We should probably
|
||||
// refactor this to really consist of just Addresses and 32-bit
|
||||
// integer fields.
|
||||
static constexpr uint32_t kSizeInBytes = 25 * kSystemPointerSize;
|
||||
static constexpr uint32_t kSizeInBytes = 27 * kSystemPointerSize;
|
||||
|
||||
// Does early low-level initialization that does not depend on the
|
||||
// isolate being present.
|
||||
@ -146,6 +147,9 @@ class ThreadLocalTop {
|
||||
|
||||
// Address of the thread-local "thread in wasm" flag.
|
||||
Address thread_in_wasm_flag_address_;
|
||||
|
||||
// Stack information.
|
||||
::heap::base::Stack stack_;
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
|
@ -414,6 +414,11 @@ DEFINE_BOOL_READONLY(conservative_stack_scanning,
|
||||
V8_ENABLE_CONSERVATIVE_STACK_SCANNING_BOOL,
|
||||
"use conservative stack scanning")
|
||||
|
||||
#if V8_ENABLE_WEBASSEMBLY
|
||||
DEFINE_NEG_IMPLICATION(conservative_stack_scanning,
|
||||
experimental_wasm_stack_switching)
|
||||
#endif // V8_ENABLE_WEBASSEMBLY
|
||||
|
||||
#ifdef V8_ENABLE_INNER_POINTER_RESOLUTION_OSB
|
||||
#define V8_ENABLE_INNER_POINTER_RESOLUTION_OSB_BOOL true
|
||||
#else
|
||||
|
@ -163,5 +163,47 @@ void Stack::IteratePointersUnsafe(StackVisitor* visitor,
|
||||
IteratePointersImpl(this, visitor, reinterpret_cast<intptr_t*>(stack_end));
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
#ifdef DEBUG
|
||||
|
||||
bool IsOnCurrentStack(const void* ptr) {
|
||||
DCHECK_NOT_NULL(ptr);
|
||||
const void* current_stack_start = v8::base::Stack::GetStackStart();
|
||||
const void* current_stack_top = v8::base::Stack::GetCurrentStackPosition();
|
||||
return ptr <= current_stack_start && ptr >= current_stack_top;
|
||||
}
|
||||
|
||||
bool IsValidMarker(const void* stack_start, const void* stack_marker) {
|
||||
const void* current_stack_top = v8::base::Stack::GetCurrentStackPosition();
|
||||
return stack_marker <= stack_start && stack_marker >= current_stack_top;
|
||||
}
|
||||
|
||||
#endif // DEBUG
|
||||
|
||||
} // namespace
|
||||
|
||||
// In the following three methods, the stored stack start needs not coincide
|
||||
// with the current (actual) stack start (e.g., in case it was explicitly set to
|
||||
// a lower address, in tests) but has to be inside the current stack.
|
||||
|
||||
void Stack::set_marker(const void* stack_marker) {
|
||||
DCHECK(IsOnCurrentStack(stack_start_));
|
||||
DCHECK_NOT_NULL(stack_marker);
|
||||
DCHECK(IsValidMarker(stack_start_, stack_marker));
|
||||
stack_marker_ = stack_marker;
|
||||
}
|
||||
|
||||
void Stack::clear_marker() {
|
||||
DCHECK(IsOnCurrentStack(stack_start_));
|
||||
stack_marker_ = nullptr;
|
||||
}
|
||||
|
||||
const void* Stack::get_marker() const {
|
||||
DCHECK(IsOnCurrentStack(stack_start_));
|
||||
DCHECK_NOT_NULL(stack_marker_);
|
||||
return stack_marker_;
|
||||
}
|
||||
|
||||
} // namespace base
|
||||
} // namespace heap
|
||||
|
@ -46,8 +46,14 @@ class V8_EXPORT_PRIVATE Stack final {
|
||||
// Returns the start of the stack.
|
||||
const void* stack_start() const { return stack_start_; }
|
||||
|
||||
// Sets, clears and gets the stack marker.
|
||||
void set_marker(const void* stack_marker);
|
||||
void clear_marker();
|
||||
const void* get_marker() const;
|
||||
|
||||
private:
|
||||
const void* stack_start_;
|
||||
const void* stack_marker_ = nullptr;
|
||||
};
|
||||
|
||||
} // namespace heap::base
|
||||
|
@ -1641,6 +1641,10 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
this, IsYoungGenerationCollector(collector) ? "MinorGC" : "MajorGC",
|
||||
GarbageCollectionReasonToString(gc_reason));
|
||||
|
||||
auto stack_marker = v8::base::Stack::GetCurrentStackPosition();
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
stack().set_marker(stack_marker);
|
||||
#endif
|
||||
if (collector == GarbageCollector::MARK_COMPACTOR && cpp_heap()) {
|
||||
// CppHeap needs a stack marker at the top of all entry points to allow
|
||||
// deterministic passes over the stack. E.g., a verifier that should only
|
||||
@ -1649,7 +1653,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
// TODO(chromium:1056170): Consider adding a component that keeps track
|
||||
// of relevant GC stack regions where interesting pointers can be found.
|
||||
static_cast<v8::internal::CppHeap*>(cpp_heap())
|
||||
->SetStackEndOfCurrentGC(v8::base::Stack::GetCurrentStackPosition());
|
||||
->SetStackEndOfCurrentGC(stack_marker);
|
||||
}
|
||||
|
||||
GarbageCollectionPrologue(gc_reason, gc_callback_flags);
|
||||
@ -1734,6 +1738,10 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
} else {
|
||||
tracer()->StopFullCycleIfNeeded();
|
||||
}
|
||||
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
stack().clear_marker();
|
||||
#endif
|
||||
}
|
||||
|
||||
// Part 3: Invoke all callbacks which should happen after the actual garbage
|
||||
@ -2331,6 +2339,10 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
|
||||
DCHECK(incremental_marking_->IsStopped());
|
||||
DCHECK_NOT_NULL(isolate()->global_safepoint());
|
||||
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
stack().set_marker(v8::base::Stack::GetCurrentStackPosition());
|
||||
#endif
|
||||
|
||||
isolate()->global_safepoint()->IterateClientIsolates([](Isolate* client) {
|
||||
client->heap()->FreeSharedLinearAllocationAreas();
|
||||
|
||||
@ -2361,6 +2373,10 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
|
||||
tracer()->StopObservablePause();
|
||||
tracer()->UpdateStatistics(collector);
|
||||
tracer()->StopFullCycleIfNeeded();
|
||||
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
stack().clear_marker();
|
||||
#endif
|
||||
}
|
||||
|
||||
void Heap::CompleteSweepingYoung(GarbageCollector collector) {
|
||||
@ -5514,7 +5530,6 @@ void Heap::SetUpSpaces(LinearAllocationArea& new_allocation_info,
|
||||
tracer_.reset(new GCTracer(this));
|
||||
array_buffer_sweeper_.reset(new ArrayBufferSweeper(this));
|
||||
gc_idle_time_handler_.reset(new GCIdleTimeHandler());
|
||||
stack_ = std::make_unique<::heap::base::Stack>(base::Stack::GetStackStart());
|
||||
memory_measurement_.reset(new MemoryMeasurement(isolate()));
|
||||
if (!IsShared()) memory_reducer_.reset(new MemoryReducer(this));
|
||||
if (V8_UNLIKELY(TracingFlags::is_gc_stats_enabled())) {
|
||||
@ -5745,10 +5760,12 @@ const cppgc::EmbedderStackState* Heap::overriden_stack_state() const {
|
||||
}
|
||||
|
||||
void Heap::SetStackStart(void* stack_start) {
|
||||
stack_->SetStackStart(stack_start);
|
||||
stack().SetStackStart(stack_start);
|
||||
}
|
||||
|
||||
::heap::base::Stack& Heap::stack() { return *stack_.get(); }
|
||||
::heap::base::Stack& Heap::stack() {
|
||||
return isolate_->thread_local_top()->stack_;
|
||||
}
|
||||
|
||||
void Heap::RegisterExternallyReferencedObject(Address* location) {
|
||||
GlobalHandles::MarkTraced(location);
|
||||
@ -5893,7 +5910,6 @@ void Heap::TearDown() {
|
||||
concurrent_marking_.reset();
|
||||
|
||||
gc_idle_time_handler_.reset();
|
||||
stack_.reset();
|
||||
memory_measurement_.reset();
|
||||
allocation_tracker_for_debugging_.reset();
|
||||
|
||||
|
@ -28,7 +28,6 @@
|
||||
#include "src/common/globals.h"
|
||||
#include "src/heap/allocation-observer.h"
|
||||
#include "src/heap/allocation-result.h"
|
||||
#include "src/heap/base/stack.h"
|
||||
#include "src/heap/gc-callbacks.h"
|
||||
#include "src/heap/heap-allocator.h"
|
||||
#include "src/heap/marking-state.h"
|
||||
@ -2297,7 +2296,6 @@ class Heap {
|
||||
std::unique_ptr<LocalEmbedderHeapTracer> local_embedder_heap_tracer_;
|
||||
std::unique_ptr<AllocationTrackerForDebugging>
|
||||
allocation_tracker_for_debugging_;
|
||||
std::unique_ptr<::heap::base::Stack> stack_;
|
||||
|
||||
// This object controls virtual space reserved for code on the V8 heap. This
|
||||
// is only valid for 64-bit architectures where kRequiresCodeRange.
|
||||
|
@ -13012,9 +13012,6 @@ void ApiTestFuzzer::Run() {
|
||||
{
|
||||
// ... get the V8 lock
|
||||
v8::Locker locker(CcTest::isolate());
|
||||
// ... set the isolate stack to this thread
|
||||
CcTest::i_isolate()->heap()->SetStackStart(
|
||||
v8::base::Stack::GetStackStart());
|
||||
// ... and start running the test.
|
||||
CallTest();
|
||||
}
|
||||
@ -13082,9 +13079,6 @@ void ApiTestFuzzer::ContextSwitch() {
|
||||
v8::Unlocker unlocker(CcTest::isolate());
|
||||
// Wait till someone starts us again.
|
||||
gate_.Wait();
|
||||
// Set the isolate stack to this thread.
|
||||
CcTest::i_isolate()->heap()->SetStackStart(
|
||||
v8::base::Stack::GetStackStart());
|
||||
// And we're off.
|
||||
}
|
||||
}
|
||||
|
@ -1574,6 +1574,14 @@
|
||||
'shared-memory/*': [SKIP],
|
||||
}], # single_generation
|
||||
|
||||
################################################################################
|
||||
['conservative_stack_scanning', {
|
||||
# TODO(v8:13257): Conservative stack scanning is not currently compatible
|
||||
# with stack switching.
|
||||
'wasm/stack-switching': [SKIP],
|
||||
'wasm/stack-switching-export': [SKIP],
|
||||
}], # conservative_stack_scanning
|
||||
|
||||
################################################################################
|
||||
['third_party_heap', {
|
||||
# Requires local heaps
|
||||
|
@ -549,6 +549,8 @@ class BaseTestRunner(object):
|
||||
self.build_config.cfi_vptr,
|
||||
"component_build":
|
||||
self.build_config.component_build,
|
||||
"conservative_stack_scanning":
|
||||
self.build_config.conservative_stack_scanning,
|
||||
"control_flow_integrity":
|
||||
self.build_config.control_flow_integrity,
|
||||
"concurrent_marking":
|
||||
|
@ -24,6 +24,8 @@ class BuildConfig(object):
|
||||
self.asan = build_config['is_asan']
|
||||
self.cfi_vptr = build_config['is_cfi']
|
||||
self.component_build = build_config['is_component_build']
|
||||
self.conservative_stack_scanning = build_config[
|
||||
'v8_enable_conservative_stack_scanning']
|
||||
self.control_flow_integrity = build_config['v8_control_flow_integrity']
|
||||
self.concurrent_marking = build_config['v8_enable_concurrent_marking']
|
||||
self.single_generation = build_config['v8_enable_single_generation']
|
||||
|
@ -17,6 +17,7 @@
|
||||
"v8_enable_i18n_support": true,
|
||||
"v8_enable_verify_predictable": false,
|
||||
"v8_target_cpu": "x64",
|
||||
"v8_enable_conservative_stack_scanning": false,
|
||||
"v8_enable_concurrent_marking": true,
|
||||
"v8_enable_verify_csa": false,
|
||||
"v8_enable_lite_mode": false,
|
||||
|
@ -17,6 +17,7 @@
|
||||
"v8_enable_i18n_support": true,
|
||||
"v8_enable_verify_predictable": false,
|
||||
"v8_target_cpu": "x64",
|
||||
"v8_enable_conservative_stack_scanning": false,
|
||||
"v8_enable_concurrent_marking": true,
|
||||
"v8_enable_verify_csa": false,
|
||||
"v8_enable_lite_mode": false,
|
||||
|
@ -17,6 +17,7 @@
|
||||
"v8_enable_i18n_support": true,
|
||||
"v8_enable_verify_predictable": false,
|
||||
"v8_target_cpu": "x64",
|
||||
"v8_enable_conservative_stack_scanning": false,
|
||||
"v8_enable_concurrent_marking": true,
|
||||
"v8_enable_verify_csa": false,
|
||||
"v8_enable_lite_mode": false,
|
||||
|
@ -17,6 +17,7 @@
|
||||
"v8_enable_i18n_support": true,
|
||||
"v8_enable_verify_predictable": false,
|
||||
"v8_target_cpu": "x64",
|
||||
"v8_enable_conservative_stack_scanning": false,
|
||||
"v8_enable_concurrent_marking": true,
|
||||
"v8_enable_verify_csa": false,
|
||||
"v8_enable_lite_mode": false,
|
||||
|
@ -17,6 +17,7 @@
|
||||
"v8_enable_i18n_support": true,
|
||||
"v8_enable_verify_predictable": false,
|
||||
"v8_target_cpu": "x64",
|
||||
"v8_enable_conservative_stack_scanning": false,
|
||||
"v8_enable_concurrent_marking": true,
|
||||
"v8_enable_verify_csa": false,
|
||||
"v8_enable_lite_mode": false,
|
||||
|
Loading…
Reference in New Issue
Block a user