[heap] Refactor the stack object
The stack object is primarily used for conservative stack scanning, both by the V8 and C++ garbage collectors. This CL introduces the notion of a "stack context", which comprises of the current stack marker (the lowest address on the stack that may contain interesting pointers) and the values of the saved registers. It simplifies the way in which iteration through the stack is invoked: the context must have previously been saved and iteration always uses the stack marker. Bug: v8:13257 Bug: v8:13493 Change-Id: Ia99ef702eb6ac67a3bcd006f0edf5e57d9975ab2 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/4017512 Reviewed-by: Michael Lippautz <mlippautz@chromium.org> Reviewed-by: Omer Katz <omerkatz@chromium.org> Commit-Queue: Nikolaos Papaspyrou <nikolaos@chromium.org> Cr-Commit-Position: refs/heads/main@{#84303}
This commit is contained in:
parent
8016f5c667
commit
9554743a0b
@ -3338,7 +3338,7 @@ void Isolate::Delete(Isolate* isolate) {
|
||||
SetIsolateThreadLocals(isolate, nullptr);
|
||||
isolate->set_thread_id(ThreadId::Current());
|
||||
isolate->thread_local_top()->stack_ =
|
||||
saved_isolate ? saved_isolate->thread_local_top()->stack_
|
||||
saved_isolate ? std::move(saved_isolate->thread_local_top()->stack_)
|
||||
: ::heap::base::Stack(base::Stack::GetStackStart());
|
||||
|
||||
bool owns_shared_isolate = isolate->owns_shared_isolate_;
|
||||
@ -3346,6 +3346,11 @@ void Isolate::Delete(Isolate* isolate) {
|
||||
|
||||
isolate->Deinit();
|
||||
|
||||
// Restore the saved isolate's stack.
|
||||
if (saved_isolate)
|
||||
saved_isolate->thread_local_top()->stack_ =
|
||||
std::move(isolate->thread_local_top()->stack_);
|
||||
|
||||
#ifdef DEBUG
|
||||
non_disposed_isolates_--;
|
||||
#endif // DEBUG
|
||||
|
@ -21,7 +21,7 @@
|
||||
// http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.faqs/ka4127.html
|
||||
|
||||
// 8 32-bit registers = 8 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 8,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 8,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 4, "Mismatch in word size");
|
||||
|
||||
|
@ -19,7 +19,7 @@
|
||||
// https://en.wikipedia.org/wiki/Calling_convention#ARM_(A64)
|
||||
|
||||
// 11 64-bit registers = 11 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 11,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 11,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 8, "Mismatch in word size");
|
||||
|
||||
|
@ -17,7 +17,7 @@
|
||||
// Source: https://en.wikipedia.org/wiki/X86_calling_conventions#cdecl
|
||||
|
||||
// 3 32-bit registers = 3 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 3,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 3,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 4, "Mismatch in word size");
|
||||
|
||||
|
@ -14,7 +14,7 @@
|
||||
// GN toolchain (e.g. ChromeOS) and not provide them.
|
||||
|
||||
// 11 64-bit registers = 11 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 11,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 11,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 8, "Mismatch in word size");
|
||||
|
||||
|
@ -14,7 +14,7 @@
|
||||
// GN toolchain (e.g. ChromeOS) and not provide them.
|
||||
|
||||
// 9 64-bit registers = 9 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 9,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 9,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 8, "Mismatch in word size");
|
||||
|
||||
|
@ -22,7 +22,7 @@
|
||||
#ifdef __PPC64__
|
||||
|
||||
// 20 64-bit registers = 20 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 20,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 20,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 8, "Mismatch in word size");
|
||||
|
||||
@ -66,7 +66,7 @@ asm(".align 2 \n"
|
||||
#else // !__PPC64__
|
||||
|
||||
// 20 32-bit registers = 20 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 20,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 20,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 4, "Mismatch in word size");
|
||||
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
#if V8_HOST_ARCH_RISCV64
|
||||
// 12 64-bit registers = 12 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 12,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 12,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 8, "Mismatch in word size");
|
||||
|
||||
@ -41,7 +41,7 @@ asm(".global SaveCalleeSavedRegisters \n"
|
||||
" jr ra \n");
|
||||
#elif V8_HOST_ARCH_RISCV32
|
||||
// 12 32-bit registers = 12 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 12,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 12,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 4, "Mismatch in word size");
|
||||
|
||||
|
@ -17,7 +17,7 @@
|
||||
// http://refspecs.linuxbase.org/ELF/zSeries/lzsabi0_zSeries.html
|
||||
|
||||
// 10 64-bit registers = 10 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 10,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 10,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 8, "Mismatch in word size");
|
||||
|
||||
|
@ -24,7 +24,7 @@
|
||||
|
||||
// 7 64-bit registers + 1 for alignment purposes = 8 * 1 = 8 intprt_t
|
||||
// 10 128-bit registers = 10 * 2 = 20 intptr_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 28,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 28,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 8, "Mismatch in word size");
|
||||
|
||||
@ -62,7 +62,7 @@ asm(".globl SaveCalleeSavedRegisters \n"
|
||||
// Source: https://github.com/hjl-tools/x86-psABI/wiki/x86-64-psABI-1.0.pdf
|
||||
|
||||
// 5 64-bit registers = 5 intprt_t
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters == 5,
|
||||
static_assert(heap::base::Stack::NumberOfCalleeSavedRegisters() == 5,
|
||||
"Mismatch in the number of callee-saved registers");
|
||||
static_assert(sizeof(intptr_t) == 8, "Mismatch in word size");
|
||||
|
||||
|
@ -110,9 +110,10 @@ void IterateUnsafeStackIfNecessary(StackVisitor* visitor) {
|
||||
#endif // defined(__has_feature)
|
||||
}
|
||||
|
||||
// Called by the trampoline that pushes registers on the stack. This method
|
||||
// should never be inlined to ensure that a possible redzone cannot contain
|
||||
// any data that needs to be scanned.
|
||||
} // namespace
|
||||
|
||||
// This method should never be inlined to ensure that a possible redzone cannot
|
||||
// contain any data that needs to be scanned.
|
||||
V8_NOINLINE
|
||||
// No ASAN support as method accesses redzones while walking the stack.
|
||||
DISABLE_ASAN
|
||||
@ -120,36 +121,36 @@ DISABLE_ASAN
|
||||
// thread, e.g., for interrupt handling. Atomic reads are not enough as the
|
||||
// other thread may use a lock to synchronize the access.
|
||||
DISABLE_TSAN
|
||||
void IteratePointersImpl(StackVisitor* visitor, const void* stack_start,
|
||||
const void* stack_end,
|
||||
const Stack::CalleeSavedRegisters* registers) {
|
||||
void Stack::IteratePointers(StackVisitor* visitor) const {
|
||||
DCHECK_NOT_NULL(stack_start_);
|
||||
DCHECK(context_);
|
||||
DCHECK_NOT_NULL(context_->stack_marker);
|
||||
|
||||
#ifdef V8_USE_ADDRESS_SANITIZER
|
||||
const void* asan_fake_stack = __asan_get_current_fake_stack();
|
||||
#endif // V8_USE_ADDRESS_SANITIZER
|
||||
|
||||
// Iterate through the registers.
|
||||
if (registers != nullptr) {
|
||||
for (intptr_t value : registers->buffer) {
|
||||
const void* address = reinterpret_cast<const void*>(value);
|
||||
MSAN_MEMORY_IS_INITIALIZED(&address, sizeof(address));
|
||||
if (address == nullptr) continue;
|
||||
visitor->VisitPointer(address);
|
||||
for (intptr_t value : context_->registers) {
|
||||
const void* address = reinterpret_cast<const void*>(value);
|
||||
MSAN_MEMORY_IS_INITIALIZED(&address, sizeof(address));
|
||||
if (address == nullptr) continue;
|
||||
visitor->VisitPointer(address);
|
||||
#ifdef V8_USE_ADDRESS_SANITIZER
|
||||
IterateAsanFakeFrameIfNecessary(visitor, asan_fake_stack, stack_start,
|
||||
stack_end, address);
|
||||
IterateAsanFakeFrameIfNecessary(visitor, asan_fake_stack, stack_start_,
|
||||
context_->stack_marker, address);
|
||||
#endif // V8_USE_ADDRESS_SANITIZER
|
||||
}
|
||||
}
|
||||
|
||||
// Iterate through the stack.
|
||||
// All supported platforms should have their stack aligned to at least
|
||||
// sizeof(void*).
|
||||
constexpr size_t kMinStackAlignment = sizeof(void*);
|
||||
CHECK_EQ(0u,
|
||||
reinterpret_cast<uintptr_t>(stack_end) & (kMinStackAlignment - 1));
|
||||
CHECK_EQ(0u, reinterpret_cast<uintptr_t>(context_->stack_marker) &
|
||||
(kMinStackAlignment - 1));
|
||||
for (const void* const* current =
|
||||
reinterpret_cast<const void* const*>(stack_end);
|
||||
current < stack_start; ++current) {
|
||||
reinterpret_cast<const void* const*>(context_->stack_marker);
|
||||
current < stack_start_; ++current) {
|
||||
// MSAN: Instead of unpoisoning the whole stack, the slot's value is copied
|
||||
// into a local which is unpoisoned.
|
||||
const void* address = *current;
|
||||
@ -157,44 +158,18 @@ void IteratePointersImpl(StackVisitor* visitor, const void* stack_start,
|
||||
if (address == nullptr) continue;
|
||||
visitor->VisitPointer(address);
|
||||
#ifdef V8_USE_ADDRESS_SANITIZER
|
||||
IterateAsanFakeFrameIfNecessary(visitor, asan_fake_stack, stack_start,
|
||||
stack_end, address);
|
||||
IterateAsanFakeFrameIfNecessary(visitor, asan_fake_stack, stack_start_,
|
||||
context_->stack_marker, address);
|
||||
#endif // V8_USE_ADDRESS_SANITIZER
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void Stack::IteratePointers(StackVisitor* visitor) const {
|
||||
DCHECK_NOT_NULL(stack_start_);
|
||||
PushAllRegistersAndInvokeCallback(visitor, stack_start_,
|
||||
&IteratePointersImpl);
|
||||
// No need to deal with callee-saved registers as they will be kept alive by
|
||||
// the regular conservative stack iteration.
|
||||
// TODO(chromium:1056170): Add support for SIMD and/or filtering.
|
||||
IterateUnsafeStackIfNecessary(visitor);
|
||||
}
|
||||
|
||||
void Stack::IteratePointersUnsafe(StackVisitor* visitor,
|
||||
const void* stack_end) const {
|
||||
IteratePointersImpl(visitor, stack_start_, stack_end, nullptr);
|
||||
}
|
||||
|
||||
namespace {
|
||||
// Function with architecture-specific implementation:
|
||||
// Saves all callee-saved registers in the specified buffer.
|
||||
extern "C" void SaveCalleeSavedRegisters(intptr_t* buffer);
|
||||
} // namespace
|
||||
|
||||
V8_NOINLINE void Stack::PushAllRegistersAndInvokeCallback(
|
||||
StackVisitor* visitor, const void* stack_start, Callback callback) {
|
||||
Stack::CalleeSavedRegisters registers;
|
||||
SaveCalleeSavedRegisters(registers.buffer.data());
|
||||
callback(visitor, stack_start, v8::base::Stack::GetCurrentStackPosition(),
|
||||
®isters);
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
#ifdef DEBUG
|
||||
|
||||
@ -205,34 +180,43 @@ bool IsOnCurrentStack(const void* ptr) {
|
||||
return ptr <= current_stack_start && ptr >= current_stack_top;
|
||||
}
|
||||
|
||||
bool IsValidMarker(const void* stack_start, const void* stack_marker) {
|
||||
const void* current_stack_top = v8::base::Stack::GetCurrentStackPosition();
|
||||
return stack_marker <= stack_start && stack_marker >= current_stack_top;
|
||||
}
|
||||
|
||||
#endif // DEBUG
|
||||
|
||||
} // namespace
|
||||
|
||||
// In the following three methods, the stored stack start needs not coincide
|
||||
// with the current (actual) stack start (e.g., in case it was explicitly set to
|
||||
// a lower address, in tests) but has to be inside the current stack.
|
||||
|
||||
void Stack::set_marker(const void* stack_marker) {
|
||||
DCHECK(IsOnCurrentStack(stack_start_));
|
||||
DCHECK_NOT_NULL(stack_marker);
|
||||
DCHECK(IsValidMarker(stack_start_, stack_marker));
|
||||
stack_marker_ = stack_marker;
|
||||
void Stack::SaveContext(bool check_invariant) {
|
||||
// TODO(v8:13493): Remove the method's parameter and the implication as soon
|
||||
// as IsOnCurrentStack is compatible with stack switching.
|
||||
DCHECK_IMPLIES(check_invariant, IsOnCurrentStack(stack_start_));
|
||||
// Contexts can be nested but the marker and the registers are only saved on
|
||||
// the first invocation.
|
||||
if (context_) {
|
||||
++context_->nesting_counter;
|
||||
return;
|
||||
}
|
||||
// Allocate the context and set the marker.
|
||||
const void* stack_top = v8::base::Stack::GetCurrentStackPosition();
|
||||
DCHECK_NOT_NULL(stack_top);
|
||||
context_ = std::make_unique<Context>(stack_top);
|
||||
// TODO(v8:13493): Remove the implication as soon as IsValidMarker is
|
||||
// compatible with stack switching.
|
||||
DCHECK_IMPLIES(check_invariant, stack_top <= stack_start_);
|
||||
context_->stack_marker = stack_top;
|
||||
// Save the registers.
|
||||
SaveCalleeSavedRegisters(context_->registers.data());
|
||||
}
|
||||
|
||||
void Stack::clear_marker() {
|
||||
DCHECK(IsOnCurrentStack(stack_start_));
|
||||
stack_marker_ = nullptr;
|
||||
}
|
||||
|
||||
const void* Stack::get_marker() const {
|
||||
DCHECK_NOT_NULL(stack_marker_);
|
||||
return stack_marker_;
|
||||
void Stack::ClearContext(bool check_invariant) {
|
||||
// TODO(v8:13493): Remove the method's parameter and the implication as soon
|
||||
// as IsOnCurrentStack is compatible with stack switching.
|
||||
DCHECK_IMPLIES(check_invariant, IsOnCurrentStack(stack_start_));
|
||||
DCHECK(context_);
|
||||
// Skip clearing the context if that was a nested invocation.
|
||||
if (context_->nesting_counter > 0) {
|
||||
--context_->nesting_counter;
|
||||
return;
|
||||
}
|
||||
context_.reset();
|
||||
}
|
||||
|
||||
} // namespace heap::base
|
||||
|
@ -5,6 +5,8 @@
|
||||
#ifndef V8_HEAP_BASE_STACK_H_
|
||||
#define V8_HEAP_BASE_STACK_H_
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "src/base/macros.h"
|
||||
#include "src/base/platform/platform.h"
|
||||
|
||||
@ -20,53 +22,17 @@ class StackVisitor {
|
||||
// - native stack;
|
||||
// - ASAN/MSAN;
|
||||
// - SafeStack: https://releases.llvm.org/10.0.0/tools/clang/docs/SafeStack.html
|
||||
//
|
||||
// TODO(chromium:1056170): Consider adding a component that keeps track
|
||||
// of relevant GC stack regions where interesting pointers can be found.
|
||||
class V8_EXPORT_PRIVATE Stack final {
|
||||
public:
|
||||
// The following constant is architecture-specific. The size of the buffer
|
||||
// for storing the callee-saved registers is going to be equal to
|
||||
// NumberOfCalleeSavedRegisters * sizeof(intptr_t).
|
||||
|
||||
#if V8_HOST_ARCH_IA32
|
||||
// Must be consistent with heap/base/asm/ia32/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 3;
|
||||
#elif V8_HOST_ARCH_X64
|
||||
#ifdef _WIN64
|
||||
// Must be consistent with heap/base/asm/x64/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 28;
|
||||
#else // !_WIN64
|
||||
// Must be consistent with heap/base/asm/x64/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 5;
|
||||
#endif // !_WIN64
|
||||
#elif V8_HOST_ARCH_ARM64
|
||||
// Must be consistent with heap/base/asm/arm64/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 11;
|
||||
#elif V8_HOST_ARCH_ARM
|
||||
// Must be consistent with heap/base/asm/arm/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 8;
|
||||
#elif V8_HOST_ARCH_PPC64
|
||||
// Must be consistent with heap/base/asm/ppc/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 20;
|
||||
#elif V8_HOST_ARCH_PPC
|
||||
// Must be consistent with heap/base/asm/ppc/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 20;
|
||||
#elif V8_HOST_ARCH_MIPS64
|
||||
// Must be consistent with heap/base/asm/mips64el/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 9;
|
||||
#elif V8_HOST_ARCH_LOONG64
|
||||
// Must be consistent with heap/base/asm/loong64/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 11;
|
||||
#elif V8_HOST_ARCH_S390
|
||||
// Must be consistent with heap/base/asm/s390/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 10;
|
||||
#elif V8_HOST_ARCH_RISCV32
|
||||
// Must be consistent with heap/base/asm/riscv/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 12;
|
||||
#elif V8_HOST_ARCH_RISCV64
|
||||
// Must be consistent with heap/base/asm/riscv/.
|
||||
static constexpr int NumberOfCalleeSavedRegisters = 12;
|
||||
#else
|
||||
#error Unknown architecture.
|
||||
#endif
|
||||
// The size of the buffer for storing the callee-saved registers is going to
|
||||
// be equal to kNumberOfCalleeSavedRegisters * sizeof(intptr_t).
|
||||
// This is architecture-specific.
|
||||
static constexpr int NumberOfCalleeSavedRegisters() {
|
||||
return Context::kNumberOfCalleeSavedRegisters;
|
||||
}
|
||||
|
||||
explicit Stack(const void* stack_start = nullptr);
|
||||
|
||||
@ -76,48 +42,75 @@ class V8_EXPORT_PRIVATE Stack final {
|
||||
// Returns true if |slot| is part of the stack and false otherwise.
|
||||
bool IsOnStack(const void* slot) const;
|
||||
|
||||
// Word-aligned iteration of the stack. Callee-saved registers are pushed to
|
||||
// the stack before iterating pointers. Slot values are passed on to
|
||||
// `visitor`.
|
||||
void IteratePointers(StackVisitor* visitor) const;
|
||||
// Word-aligned iteration of the stack and the saved registers.
|
||||
// Slot values are passed on to `visitor`.
|
||||
V8_NOINLINE void IteratePointers(StackVisitor* visitor) const;
|
||||
|
||||
// Word-aligned iteration of the stack, starting at `stack_end`. Slot values
|
||||
// are passed on to `visitor`. This is intended to be used with verifiers that
|
||||
// only visit a subset of the stack of IteratePointers().
|
||||
//
|
||||
// **Ignores:**
|
||||
// - Callee-saved registers.
|
||||
// - SafeStack.
|
||||
void IteratePointersUnsafe(StackVisitor* visitor,
|
||||
const void* stack_end) const;
|
||||
// Saves and clears the stack context, i.e., it sets the stack marker and
|
||||
// saves the registers.
|
||||
// TODO(v8:13493): The parameter is for suppressing the invariant check in
|
||||
// the case of WASM stack switching. It will be removed as soon as context
|
||||
// saving becomes compatible with stack switching.
|
||||
void SaveContext(bool check_invariant = true);
|
||||
void ClearContext(bool check_invariant = true);
|
||||
|
||||
// Returns the start of the stack.
|
||||
const void* stack_start() const { return stack_start_; }
|
||||
private:
|
||||
struct Context {
|
||||
// The following constant is architecture-specific.
|
||||
#if V8_HOST_ARCH_IA32
|
||||
// Must be consistent with heap/base/asm/ia32/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 3;
|
||||
#elif V8_HOST_ARCH_X64
|
||||
#ifdef _WIN64
|
||||
// Must be consistent with heap/base/asm/x64/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 28;
|
||||
#else // !_WIN64
|
||||
// Must be consistent with heap/base/asm/x64/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 5;
|
||||
#endif // !_WIN64
|
||||
#elif V8_HOST_ARCH_ARM64
|
||||
// Must be consistent with heap/base/asm/arm64/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 11;
|
||||
#elif V8_HOST_ARCH_ARM
|
||||
// Must be consistent with heap/base/asm/arm/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 8;
|
||||
#elif V8_HOST_ARCH_PPC64
|
||||
// Must be consistent with heap/base/asm/ppc/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 20;
|
||||
#elif V8_HOST_ARCH_PPC
|
||||
// Must be consistent with heap/base/asm/ppc/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 20;
|
||||
#elif V8_HOST_ARCH_MIPS64
|
||||
// Must be consistent with heap/base/asm/mips64el/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 9;
|
||||
#elif V8_HOST_ARCH_LOONG64
|
||||
// Must be consistent with heap/base/asm/loong64/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 11;
|
||||
#elif V8_HOST_ARCH_S390
|
||||
// Must be consistent with heap/base/asm/s390/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 10;
|
||||
#elif V8_HOST_ARCH_RISCV32
|
||||
// Must be consistent with heap/base/asm/riscv/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 12;
|
||||
#elif V8_HOST_ARCH_RISCV64
|
||||
// Must be consistent with heap/base/asm/riscv/.
|
||||
static constexpr int kNumberOfCalleeSavedRegisters = 12;
|
||||
#else
|
||||
#error Unknown architecture.
|
||||
#endif
|
||||
|
||||
// Sets, clears and gets the stack marker.
|
||||
void set_marker(const void* stack_marker);
|
||||
void clear_marker();
|
||||
const void* get_marker() const;
|
||||
explicit Context(const void* marker) : stack_marker(marker) {}
|
||||
|
||||
// Mechanism for saving the callee-saved registers, required for conservative
|
||||
// stack scanning.
|
||||
|
||||
struct CalleeSavedRegisters {
|
||||
int nesting_counter = 0;
|
||||
const void* stack_marker;
|
||||
// We always double-align this buffer, to support for longer registers,
|
||||
// e.g., 128-bit registers in WIN64.
|
||||
alignas(2 * sizeof(intptr_t))
|
||||
std::array<intptr_t, NumberOfCalleeSavedRegisters> buffer;
|
||||
std::array<intptr_t, kNumberOfCalleeSavedRegisters> registers;
|
||||
};
|
||||
|
||||
using Callback = void (*)(StackVisitor*, const void*, const void*,
|
||||
const CalleeSavedRegisters* registers);
|
||||
|
||||
static V8_NOINLINE void PushAllRegistersAndInvokeCallback(
|
||||
StackVisitor* visitor, const void* stack_start, Callback callback);
|
||||
|
||||
private:
|
||||
const void* stack_start_;
|
||||
const void* stack_marker_ = nullptr;
|
||||
std::unique_ptr<Context> context_;
|
||||
};
|
||||
|
||||
} // namespace heap::base
|
||||
|
@ -16,7 +16,6 @@
|
||||
#include "src/base/logging.h"
|
||||
#include "src/base/macros.h"
|
||||
#include "src/base/optional.h"
|
||||
#include "src/base/platform/platform.h"
|
||||
#include "src/base/platform/time.h"
|
||||
#include "src/execution/isolate-inl.h"
|
||||
#include "src/flags/flags.h"
|
||||
@ -863,7 +862,7 @@ void CppHeap::TraceEpilogue() {
|
||||
const size_t bytes_allocated_in_prefinalizers = ExecutePreFinalizers();
|
||||
#if CPPGC_VERIFY_HEAP
|
||||
UnifiedHeapMarkingVerifier verifier(*this, *collection_type_);
|
||||
verifier.Run(stack_state_of_prev_gc(), stack_end_of_current_gc(),
|
||||
verifier.Run(stack_state_of_prev_gc(),
|
||||
stats_collector()->marked_bytes_on_current_cycle() +
|
||||
bytes_allocated_in_prefinalizers);
|
||||
#endif // CPPGC_VERIFY_HEAP
|
||||
@ -943,7 +942,7 @@ void CppHeap::CollectGarbageForTesting(CollectionType collection_type,
|
||||
// Finish sweeping in case it is still running.
|
||||
sweeper().FinishIfRunning();
|
||||
|
||||
SetStackEndOfCurrentGC(v8::base::Stack::GetCurrentStackPosition());
|
||||
SaveStackContextScope stack_context_scope(stack());
|
||||
|
||||
if (isolate_) {
|
||||
reinterpret_cast<v8::Isolate*>(isolate_)
|
||||
|
@ -183,11 +183,6 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
|
||||
stack_state_of_prev_gc_ = stack_state;
|
||||
}
|
||||
|
||||
uintptr_t stack_end_of_current_gc() const { return stack_end_of_current_gc_; }
|
||||
void SetStackEndOfCurrentGC(uintptr_t stack_end) {
|
||||
stack_end_of_current_gc_ = stack_end;
|
||||
}
|
||||
|
||||
void SetInAtomicPauseForTesting(bool value) { in_atomic_pause_ = value; }
|
||||
|
||||
virtual void StartIncrementalGarbageCollectionForTesting() = 0;
|
||||
@ -289,10 +284,6 @@ class V8_EXPORT_PRIVATE HeapBase : public cppgc::HeapHandle {
|
||||
EmbedderStackState::kNoHeapPointers;
|
||||
std::unique_ptr<EmbedderStackState> override_stack_state_;
|
||||
|
||||
// Marker that signals end of the interesting stack region in which on-heap
|
||||
// pointers can be found.
|
||||
uintptr_t stack_end_of_current_gc_ = 0;
|
||||
|
||||
bool in_atomic_pause_ = false;
|
||||
|
||||
int creation_thread_id_ = v8::base::OS::GetCurrentThreadId();
|
||||
|
@ -166,9 +166,10 @@ void Heap::FinalizeGarbageCollection(StackState stack_state) {
|
||||
DCHECK(!in_no_gc_scope());
|
||||
CHECK(!in_disallow_gc_scope());
|
||||
config_.stack_state = stack_state;
|
||||
SetStackEndOfCurrentGC(v8::base::Stack::GetCurrentStackPosition());
|
||||
in_atomic_pause_ = true;
|
||||
|
||||
stack()->SaveContext();
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
// Check if the young generation was enabled. We must enable young generation
|
||||
// before calling the custom weak callbacks to make sure that the callbacks
|
||||
@ -187,7 +188,7 @@ void Heap::FinalizeGarbageCollection(StackState stack_state) {
|
||||
const size_t bytes_allocated_in_prefinalizers = ExecutePreFinalizers();
|
||||
#if CPPGC_VERIFY_HEAP
|
||||
MarkingVerifier verifier(*this, config_.collection_type);
|
||||
verifier.Run(config_.stack_state, stack_end_of_current_gc(),
|
||||
verifier.Run(config_.stack_state,
|
||||
stats_collector()->marked_bytes_on_current_cycle() +
|
||||
bytes_allocated_in_prefinalizers);
|
||||
#endif // CPPGC_VERIFY_HEAP
|
||||
@ -196,6 +197,8 @@ void Heap::FinalizeGarbageCollection(StackState stack_state) {
|
||||
#endif
|
||||
USE(bytes_allocated_in_prefinalizers);
|
||||
|
||||
stack()->ClearContext();
|
||||
|
||||
#if defined(CPPGC_YOUNG_GENERATION)
|
||||
ResetRememberedSet();
|
||||
#endif // defined(CPPGC_YOUNG_GENERATION)
|
||||
|
@ -45,8 +45,7 @@ MarkingVerifierBase::MarkingVerifierBase(
|
||||
collection_type_(collection_type) {}
|
||||
|
||||
void MarkingVerifierBase::Run(
|
||||
StackState stack_state, uintptr_t stack_end,
|
||||
v8::base::Optional<size_t> expected_marked_bytes) {
|
||||
StackState stack_state, v8::base::Optional<size_t> expected_marked_bytes) {
|
||||
Traverse(heap_.raw_heap());
|
||||
// Avoid verifying the stack when running with TSAN as the TSAN runtime changes
|
||||
// stack contents when e.g. working with locks. Specifically, the marker uses
|
||||
@ -63,8 +62,7 @@ void MarkingVerifierBase::Run(
|
||||
#if !defined(THREAD_SANITIZER) && !defined(CPPGC_POINTER_COMPRESSION)
|
||||
if (stack_state == StackState::kMayContainHeapPointers) {
|
||||
in_construction_objects_ = &in_construction_objects_stack_;
|
||||
heap_.stack()->IteratePointersUnsafe(
|
||||
this, reinterpret_cast<const void*>(stack_end));
|
||||
heap_.stack()->IteratePointers(this);
|
||||
// The objects found through the unsafe iteration are only a subset of the
|
||||
// regular iteration as they miss objects held alive only from callee-saved
|
||||
// registers that are never pushed on the stack and SafeStack.
|
||||
|
@ -41,7 +41,7 @@ class V8_EXPORT_PRIVATE MarkingVerifierBase
|
||||
MarkingVerifierBase(const MarkingVerifierBase&) = delete;
|
||||
MarkingVerifierBase& operator=(const MarkingVerifierBase&) = delete;
|
||||
|
||||
void Run(StackState, uintptr_t, v8::base::Optional<size_t>);
|
||||
void Run(StackState, v8::base::Optional<size_t>);
|
||||
|
||||
protected:
|
||||
MarkingVerifierBase(HeapBase&, CollectionType, VerificationState&,
|
||||
|
@ -1668,25 +1668,7 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
DevToolsTraceEventScope devtools_trace_event_scope(
|
||||
this, IsYoungGenerationCollector(collector) ? "MinorGC" : "MajorGC",
|
||||
GarbageCollectionReasonToString(gc_reason));
|
||||
|
||||
auto stack_marker = v8::base::Stack::GetCurrentStackPosition();
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
stack().set_marker(stack_marker);
|
||||
#endif
|
||||
if (cpp_heap()) {
|
||||
if (collector == GarbageCollector::MARK_COMPACTOR ||
|
||||
(collector == GarbageCollector::MINOR_MARK_COMPACTOR &&
|
||||
CppHeap::From(cpp_heap())->generational_gc_supported())) {
|
||||
// CppHeap needs a stack marker at the top of all entry points to allow
|
||||
// deterministic passes over the stack. E.g., a verifier that should
|
||||
// only find a subset of references of the marker.
|
||||
//
|
||||
// TODO(chromium:1056170): Consider adding a component that keeps track
|
||||
// of relevant GC stack regions where interesting pointers can be found.
|
||||
static_cast<v8::internal::CppHeap*>(cpp_heap())
|
||||
->SetStackEndOfCurrentGC(stack_marker);
|
||||
}
|
||||
}
|
||||
SaveStackContextScope stack_context_scope(&stack());
|
||||
|
||||
GarbageCollectionPrologue(gc_reason, gc_callback_flags);
|
||||
{
|
||||
@ -1770,10 +1752,6 @@ bool Heap::CollectGarbage(AllocationSpace space,
|
||||
} else {
|
||||
tracer()->StopFullCycleIfNeeded();
|
||||
}
|
||||
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
stack().clear_marker();
|
||||
#endif
|
||||
}
|
||||
|
||||
// Part 3: Invoke all callbacks which should happen after the actual garbage
|
||||
@ -2355,9 +2333,7 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
|
||||
DCHECK(incremental_marking_->IsStopped());
|
||||
DCHECK_NOT_NULL(isolate()->global_safepoint());
|
||||
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
stack().set_marker(v8::base::Stack::GetCurrentStackPosition());
|
||||
#endif
|
||||
SaveStackContextScope stack_context_scope(&stack());
|
||||
|
||||
isolate()->global_safepoint()->IterateClientIsolates([](Isolate* client) {
|
||||
client->heap()->FreeSharedLinearAllocationAreas();
|
||||
@ -2389,10 +2365,6 @@ void Heap::PerformSharedGarbageCollection(Isolate* initiator,
|
||||
tracer()->StopObservablePause();
|
||||
tracer()->UpdateStatistics(collector);
|
||||
tracer()->StopFullCycleIfNeeded();
|
||||
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
stack().clear_marker();
|
||||
#endif
|
||||
}
|
||||
|
||||
void Heap::CompleteSweepingYoung(GarbageCollector collector) {
|
||||
@ -4709,11 +4681,10 @@ void Heap::IterateRoots(RootVisitor* v, base::EnumSet<SkipRoot> options) {
|
||||
v->Synchronize(VisitorSynchronization::kGlobalHandles);
|
||||
|
||||
if (!options.contains(SkipRoot::kStack)) {
|
||||
ScanStackMode mode =
|
||||
options.contains(SkipRoot::kConservativeStack) ? ScanStackMode::kNone
|
||||
: options.contains(SkipRoot::kTopOfStack) ? ScanStackMode::kFromMarker
|
||||
: ScanStackMode::kComplete;
|
||||
IterateStackRoots(v, mode);
|
||||
StackState stack_state = options.contains(SkipRoot::kConservativeStack)
|
||||
? StackState::kNoHeapPointers
|
||||
: StackState::kMayContainHeapPointers;
|
||||
IterateStackRoots(v, stack_state);
|
||||
v->Synchronize(VisitorSynchronization::kStackRoots);
|
||||
}
|
||||
|
||||
@ -4844,9 +4815,9 @@ void Heap::IterateRootsIncludingClients(RootVisitor* v,
|
||||
}
|
||||
}
|
||||
|
||||
void Heap::IterateRootsFromStackIncludingClient(RootVisitor* v,
|
||||
ScanStackMode mode) {
|
||||
IterateStackRoots(v, mode);
|
||||
void Heap::IterateRootsFromStackIncludingClients(RootVisitor* v,
|
||||
StackState stack_state) {
|
||||
IterateStackRoots(v, stack_state);
|
||||
|
||||
if (isolate()->is_shared_heap_isolate()) {
|
||||
ClientRootVisitor client_root_visitor(v);
|
||||
@ -4854,7 +4825,7 @@ void Heap::IterateRootsFromStackIncludingClient(RootVisitor* v,
|
||||
[v = &client_root_visitor](Isolate* client) {
|
||||
// TODO(v8:13257): We cannot run CSS on client isolates now, as the
|
||||
// stack markers will not be correct.
|
||||
client->heap()->IterateStackRoots(v, ScanStackMode::kNone);
|
||||
client->heap()->IterateStackRoots(v, StackState::kNoHeapPointers);
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -4882,24 +4853,14 @@ void Heap::IterateBuiltins(RootVisitor* v) {
|
||||
static_assert(Builtins::AllBuiltinsAreIsolateIndependent());
|
||||
}
|
||||
|
||||
void Heap::IterateStackRoots(RootVisitor* v, ScanStackMode mode) {
|
||||
void Heap::IterateStackRoots(RootVisitor* v, StackState stack_state) {
|
||||
isolate_->Iterate(v);
|
||||
|
||||
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
switch (std::min(mode, scan_stack_mode_for_testing_)) {
|
||||
case ScanStackMode::kNone: {
|
||||
break;
|
||||
}
|
||||
case ScanStackMode::kComplete: {
|
||||
ConservativeStackVisitor stack_visitor(isolate(), v);
|
||||
stack().IteratePointers(&stack_visitor);
|
||||
break;
|
||||
}
|
||||
case ScanStackMode::kFromMarker: {
|
||||
ConservativeStackVisitor stack_visitor(isolate(), v);
|
||||
stack().IteratePointersUnsafe(&stack_visitor, stack().get_marker());
|
||||
break;
|
||||
}
|
||||
if (stack_state == StackState::kMayContainHeapPointers &&
|
||||
!disable_conservative_stack_scanning_for_testing_) {
|
||||
ConservativeStackVisitor stack_visitor(isolate(), v);
|
||||
stack().IteratePointers(&stack_visitor);
|
||||
}
|
||||
#endif // V8_ENABLE_CONSERVATIVE_STACK_SCANNING
|
||||
}
|
||||
@ -6487,7 +6448,8 @@ HeapObjectIterator::HeapObjectIterator(
|
||||
filtering_(filtering),
|
||||
filter_(nullptr),
|
||||
space_iterator_(nullptr),
|
||||
object_iterator_(nullptr) {
|
||||
object_iterator_(nullptr),
|
||||
stack_context_scope_(&heap->stack()) {
|
||||
heap_->MakeHeapIterable();
|
||||
// Start the iteration.
|
||||
space_iterator_ = new SpaceIterator(heap_);
|
||||
@ -7537,5 +7499,28 @@ CppClassNamesAsHeapObjectNameScope::CppClassNamesAsHeapObjectNameScope(
|
||||
CppClassNamesAsHeapObjectNameScope::~CppClassNamesAsHeapObjectNameScope() =
|
||||
default;
|
||||
|
||||
SaveStackContextScope::SaveStackContextScope(::heap::base::Stack* stack)
|
||||
: stack_(stack) {
|
||||
#if V8_ENABLE_WEBASSEMBLY
|
||||
// TODO(v8:13493): Do not check the stack context invariant if WASM stack
|
||||
// switching is enabled. This will be removed as soon as context saving
|
||||
// becomes compatible with stack switching.
|
||||
stack_->SaveContext(!v8_flags.experimental_wasm_stack_switching);
|
||||
#else
|
||||
stack_->SaveContext();
|
||||
#endif // V8_ENABLE_WEBASSEMBLY
|
||||
}
|
||||
|
||||
SaveStackContextScope::~SaveStackContextScope() {
|
||||
#if V8_ENABLE_WEBASSEMBLY
|
||||
// TODO(v8:13493): Do not check the stack context invariant if WASM stack
|
||||
// switching is enabled. This will be removed as soon as context saving
|
||||
// becomes compatible with stack switching.
|
||||
stack_->ClearContext(!v8_flags.experimental_wasm_stack_switching);
|
||||
#else
|
||||
stack_->ClearContext();
|
||||
#endif // V8_ENABLE_WEBASSEMBLY
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -173,7 +173,6 @@ enum class SkipRoot {
|
||||
kMainThreadHandles,
|
||||
kUnserializable,
|
||||
kWeak,
|
||||
kTopOfStack,
|
||||
kConservativeStack,
|
||||
};
|
||||
|
||||
@ -1029,16 +1028,12 @@ class Heap {
|
||||
// garbage collection and is usually only performed as part of
|
||||
// (de)serialization or heap verification.
|
||||
|
||||
// The order of this enumeration's elements is important: they should go from
|
||||
// more precise to more conservative modes for stack scanning, so that we can
|
||||
// use std::min to override for testing purposes.
|
||||
enum class ScanStackMode { kNone, kFromMarker, kComplete };
|
||||
|
||||
// Iterates over the strong roots and the weak roots.
|
||||
void IterateRoots(RootVisitor* v, base::EnumSet<SkipRoot> options);
|
||||
void IterateRootsIncludingClients(RootVisitor* v,
|
||||
base::EnumSet<SkipRoot> options);
|
||||
void IterateRootsFromStackIncludingClient(RootVisitor* v, ScanStackMode mode);
|
||||
void IterateRootsFromStackIncludingClients(RootVisitor* v,
|
||||
StackState stack_state);
|
||||
|
||||
// Iterates over entries in the smi roots list. Only interesting to the
|
||||
// serializer/deserializer, since GC does not care about smis.
|
||||
@ -1047,7 +1042,7 @@ class Heap {
|
||||
void IterateWeakRoots(RootVisitor* v, base::EnumSet<SkipRoot> options);
|
||||
void IterateWeakGlobalHandles(RootVisitor* v);
|
||||
void IterateBuiltins(RootVisitor* v);
|
||||
void IterateStackRoots(RootVisitor* v, ScanStackMode mode);
|
||||
void IterateStackRoots(RootVisitor* v, StackState stack_state);
|
||||
|
||||
// ===========================================================================
|
||||
// Remembered set API. =======================================================
|
||||
@ -2391,7 +2386,7 @@ class Heap {
|
||||
bool force_oom_ = false;
|
||||
bool force_gc_on_next_allocation_ = false;
|
||||
bool delay_sweeper_tasks_for_testing_ = false;
|
||||
ScanStackMode scan_stack_mode_for_testing_ = ScanStackMode::kComplete;
|
||||
bool disable_conservative_stack_scanning_for_testing_ = false;
|
||||
|
||||
UnorderedHeapObjectMap<HeapObject> retainer_;
|
||||
UnorderedHeapObjectMap<Root> retaining_root_;
|
||||
@ -2456,7 +2451,7 @@ class Heap {
|
||||
friend class PagedSpaceBase;
|
||||
friend class PretenturingHandler;
|
||||
friend class ReadOnlyRoots;
|
||||
friend class ScanStackModeScopeForTesting;
|
||||
friend class DisableConservativeStackScanningScopeForTesting;
|
||||
friend class Scavenger;
|
||||
friend class ScavengerCollector;
|
||||
friend class StressConcurrentAllocationObserver;
|
||||
@ -2676,21 +2671,32 @@ class V8_NODISCARD IgnoreLocalGCRequests {
|
||||
Heap* heap_;
|
||||
};
|
||||
|
||||
class V8_NODISCARD ScanStackModeScopeForTesting {
|
||||
// TODO(v8:13493): This class will move to src/heap/base/stack.h once its
|
||||
// implementation no longer needs access to V8 flags.
|
||||
class V8_EXPORT_PRIVATE V8_NODISCARD SaveStackContextScope {
|
||||
public:
|
||||
explicit inline ScanStackModeScopeForTesting(Heap* heap,
|
||||
Heap::ScanStackMode mode)
|
||||
: heap_(heap), old_value_(heap_->scan_stack_mode_for_testing_) {
|
||||
heap_->scan_stack_mode_for_testing_ = mode;
|
||||
explicit SaveStackContextScope(::heap::base::Stack* stack);
|
||||
~SaveStackContextScope();
|
||||
|
||||
protected:
|
||||
::heap::base::Stack* stack_;
|
||||
};
|
||||
|
||||
class V8_NODISCARD DisableConservativeStackScanningScopeForTesting {
|
||||
public:
|
||||
explicit inline DisableConservativeStackScanningScopeForTesting(Heap* heap)
|
||||
: heap_(heap),
|
||||
old_value_(heap_->disable_conservative_stack_scanning_for_testing_) {
|
||||
heap_->disable_conservative_stack_scanning_for_testing_ = true;
|
||||
}
|
||||
|
||||
inline ~ScanStackModeScopeForTesting() {
|
||||
heap_->scan_stack_mode_for_testing_ = old_value_;
|
||||
inline ~DisableConservativeStackScanningScopeForTesting() {
|
||||
heap_->disable_conservative_stack_scanning_for_testing_ = old_value_;
|
||||
}
|
||||
|
||||
protected:
|
||||
Heap* heap_;
|
||||
Heap::ScanStackMode old_value_;
|
||||
bool old_value_;
|
||||
};
|
||||
|
||||
// Visitor class to verify interior pointers in spaces that do not contain
|
||||
@ -2793,6 +2799,7 @@ class V8_EXPORT_PRIVATE HeapObjectIterator {
|
||||
SpaceIterator* space_iterator_;
|
||||
// Object iterator for the space currently being iterated.
|
||||
std::unique_ptr<ObjectIterator> object_iterator_;
|
||||
SaveStackContextScope stack_context_scope_;
|
||||
|
||||
DISALLOW_GARBAGE_COLLECTION(no_heap_allocation_)
|
||||
};
|
||||
|
@ -153,10 +153,8 @@ class MarkingVerifier : public ObjectVisitorWithCageBases, public RootVisitor {
|
||||
};
|
||||
|
||||
void MarkingVerifier::VerifyRoots() {
|
||||
// When verifying marking, we never want to scan conservatively the top of the
|
||||
// stack.
|
||||
heap_->IterateRootsIncludingClients(
|
||||
this, base::EnumSet<SkipRoot>{SkipRoot::kWeak, SkipRoot::kTopOfStack});
|
||||
heap_->IterateRootsIncludingClients(this,
|
||||
base::EnumSet<SkipRoot>{SkipRoot::kWeak});
|
||||
}
|
||||
|
||||
void MarkingVerifier::VerifyMarkingOnPage(const Page* page, Address start,
|
||||
@ -2025,9 +2023,8 @@ void MarkCompactCollector::MarkRoots(RootVisitor* root_visitor,
|
||||
// v8::TracedReference alive from the stack. This is only needed when using
|
||||
// `EmbedderHeapTracer` and not using `CppHeap`.
|
||||
auto& stack = heap()->stack();
|
||||
if (stack.stack_start() &&
|
||||
heap_->local_embedder_heap_tracer()->embedder_stack_state() ==
|
||||
cppgc::EmbedderStackState::kMayContainHeapPointers) {
|
||||
if (heap_->local_embedder_heap_tracer()->embedder_stack_state() ==
|
||||
cppgc::EmbedderStackState::kMayContainHeapPointers) {
|
||||
ConservativeTracedHandlesMarkingVisitor conservative_marker(
|
||||
*heap_, *local_marking_worklists_);
|
||||
stack.IteratePointers(&conservative_marker);
|
||||
@ -2159,8 +2156,8 @@ Address MarkCompactCollector::FindBasePtrForMarking(Address maybe_inner_ptr) {
|
||||
#endif // V8_ENABLE_INNER_POINTER_RESOLUTION_MB
|
||||
|
||||
void MarkCompactCollector::MarkRootsFromStack(RootVisitor* root_visitor) {
|
||||
heap()->IterateRootsFromStackIncludingClient(root_visitor,
|
||||
Heap::ScanStackMode::kComplete);
|
||||
heap()->IterateRootsFromStackIncludingClients(
|
||||
root_visitor, StackState::kMayContainHeapPointers);
|
||||
}
|
||||
|
||||
void MarkCompactCollector::MarkObjectsFromClientHeaps() {
|
||||
|
@ -516,7 +516,7 @@ void ScavengerCollector::IterateStackAndScavenge(
|
||||
survived_bytes_before +=
|
||||
scavenger->bytes_copied() + scavenger->bytes_promoted();
|
||||
}
|
||||
heap_->IterateStackRoots(root_scavenge_visitor, Heap::ScanStackMode::kNone);
|
||||
heap_->IterateStackRoots(root_scavenge_visitor, StackState::kNoHeapPointers);
|
||||
(*scavengers)[main_thread_id]->Process();
|
||||
size_t survived_bytes_after = 0;
|
||||
for (auto& scavenger : *scavengers) {
|
||||
|
@ -14,6 +14,7 @@
|
||||
#include "src/debug/debug.h"
|
||||
#include "src/handles/global-handles.h"
|
||||
#include "src/heap/combined-heap.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/heap/safepoint.h"
|
||||
#include "src/numbers/conversions.h"
|
||||
#include "src/objects/allocation-site-inl.h"
|
||||
@ -2064,14 +2065,16 @@ bool V8HeapExplorer::IterateAndExtractReferences(
|
||||
// its custom name to a generic builtin.
|
||||
RootsReferencesExtractor extractor(this);
|
||||
ReadOnlyRoots(heap_).Iterate(&extractor);
|
||||
heap_->IterateRoots(&extractor, base::EnumSet<SkipRoot>{SkipRoot::kWeak});
|
||||
// TODO(v8:11800): The heap snapshot generator incorrectly considers the weak
|
||||
// string tables as strong retainers. Move IterateWeakRoots after
|
||||
// SetVisitingWeakRoots.
|
||||
heap_->IterateWeakRoots(&extractor, {});
|
||||
extractor.SetVisitingWeakRoots();
|
||||
heap_->IterateWeakGlobalHandles(&extractor);
|
||||
|
||||
{
|
||||
SaveStackContextScope scope(&heap_->stack());
|
||||
heap_->IterateRoots(&extractor, base::EnumSet<SkipRoot>{SkipRoot::kWeak});
|
||||
// TODO(v8:11800): The heap snapshot generator incorrectly considers the
|
||||
// weak string tables as strong retainers. Move IterateWeakRoots after
|
||||
// SetVisitingWeakRoots.
|
||||
heap_->IterateWeakRoots(&extractor, {});
|
||||
extractor.SetVisitingWeakRoots();
|
||||
heap_->IterateWeakGlobalHandles(&extractor);
|
||||
}
|
||||
bool interrupted = false;
|
||||
|
||||
CombinedHeapObjectIterator iterator(heap_,
|
||||
|
@ -198,39 +198,49 @@ void CcTest::AddGlobalFunction(v8::Local<v8::Context> env, const char* name,
|
||||
}
|
||||
|
||||
void CcTest::CollectGarbage(i::AllocationSpace space, i::Isolate* isolate,
|
||||
i::Heap::ScanStackMode mode) {
|
||||
v8::StackState stack_state) {
|
||||
i::Isolate* iso = isolate ? isolate : i_isolate();
|
||||
i::ScanStackModeScopeForTesting scope(iso->heap(), mode);
|
||||
v8::base::Optional<i::DisableConservativeStackScanningScopeForTesting> scope;
|
||||
if (stack_state == v8::StackState::kNoHeapPointers)
|
||||
scope.emplace(iso->heap());
|
||||
iso->heap()->CollectGarbage(space, i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void CcTest::CollectAllGarbage(i::Isolate* isolate,
|
||||
i::Heap::ScanStackMode mode) {
|
||||
v8::StackState stack_state) {
|
||||
i::Isolate* iso = isolate ? isolate : i_isolate();
|
||||
i::ScanStackModeScopeForTesting scope(iso->heap(), mode);
|
||||
v8::base::Optional<i::DisableConservativeStackScanningScopeForTesting> scope;
|
||||
if (stack_state == v8::StackState::kNoHeapPointers)
|
||||
scope.emplace(iso->heap());
|
||||
iso->heap()->CollectAllGarbage(i::Heap::kNoGCFlags,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void CcTest::CollectAllAvailableGarbage(i::Isolate* isolate,
|
||||
i::Heap::ScanStackMode mode) {
|
||||
v8::StackState stack_state) {
|
||||
i::Isolate* iso = isolate ? isolate : i_isolate();
|
||||
i::ScanStackModeScopeForTesting scope(iso->heap(), mode);
|
||||
v8::base::Optional<i::DisableConservativeStackScanningScopeForTesting> scope;
|
||||
if (stack_state == v8::StackState::kNoHeapPointers)
|
||||
scope.emplace(iso->heap());
|
||||
iso->heap()->CollectAllAvailableGarbage(i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void CcTest::PreciseCollectAllGarbage(i::Isolate* isolate,
|
||||
i::Heap::ScanStackMode mode) {
|
||||
v8::StackState stack_state) {
|
||||
i::Isolate* iso = isolate ? isolate : i_isolate();
|
||||
i::ScanStackModeScopeForTesting scope(iso->heap(), mode);
|
||||
v8::base::Optional<i::DisableConservativeStackScanningScopeForTesting> scope;
|
||||
if (stack_state == v8::StackState::kNoHeapPointers)
|
||||
scope.emplace(iso->heap());
|
||||
iso->heap()->PreciseCollectAllGarbage(i::Heap::kNoGCFlags,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void CcTest::CollectSharedGarbage(i::Isolate* isolate,
|
||||
i::Heap::ScanStackMode mode) {
|
||||
v8::StackState stack_state) {
|
||||
i::Isolate* iso = isolate ? isolate : i_isolate();
|
||||
i::ScanStackModeScopeForTesting scope(iso->heap(), mode);
|
||||
v8::base::Optional<i::DisableConservativeStackScanningScopeForTesting> scope;
|
||||
if (stack_state == v8::StackState::kNoHeapPointers)
|
||||
scope.emplace(iso->heap());
|
||||
iso->heap()->CollectGarbageShared(iso->main_thread_local_heap(),
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
@ -173,19 +173,19 @@ class CcTest {
|
||||
// By default, the GC methods do not scan the stack conservatively.
|
||||
static void CollectGarbage(
|
||||
i::AllocationSpace space, i::Isolate* isolate = nullptr,
|
||||
i::Heap::ScanStackMode mode = i::Heap::ScanStackMode::kNone);
|
||||
v8::StackState stack_state = v8::StackState::kNoHeapPointers);
|
||||
static void CollectAllGarbage(
|
||||
i::Isolate* isolate = nullptr,
|
||||
i::Heap::ScanStackMode mode = i::Heap::ScanStackMode::kNone);
|
||||
v8::StackState stack_state = v8::StackState::kNoHeapPointers);
|
||||
static void CollectAllAvailableGarbage(
|
||||
i::Isolate* isolate = nullptr,
|
||||
i::Heap::ScanStackMode mode = i::Heap::ScanStackMode::kNone);
|
||||
v8::StackState stack_state = v8::StackState::kNoHeapPointers);
|
||||
static void PreciseCollectAllGarbage(
|
||||
i::Isolate* isolate = nullptr,
|
||||
i::Heap::ScanStackMode mode = i::Heap::ScanStackMode::kNone);
|
||||
v8::StackState stack_state = v8::StackState::kNoHeapPointers);
|
||||
static void CollectSharedGarbage(
|
||||
i::Isolate* isolate = nullptr,
|
||||
i::Heap::ScanStackMode mode = i::Heap::ScanStackMode::kNone);
|
||||
v8::StackState stack_state = v8::StackState::kNoHeapPointers);
|
||||
|
||||
static i::Handle<i::String> MakeString(const char* str);
|
||||
static i::Handle<i::String> MakeName(const char* str, int suffix);
|
||||
|
@ -292,7 +292,7 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) {
|
||||
// If minor incremental marking is running, we need to finalize it first
|
||||
// because of the AdvanceForTesting call in this function which is currently
|
||||
// only possible for MajorMC.
|
||||
ScanStackModeScopeForTesting scope(heap, Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting no_stack_scanning(heap);
|
||||
heap->CollectGarbage(NEW_SPACE, GarbageCollectionReason::kFinalizeMinorMC);
|
||||
}
|
||||
|
||||
@ -334,7 +334,7 @@ void AbandonCurrentlyFreeMemory(PagedSpace* space) {
|
||||
}
|
||||
|
||||
void GcAndSweep(Heap* heap, AllocationSpace space) {
|
||||
ScanStackModeScopeForTesting scope(heap, Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting no_stack_scanning(heap);
|
||||
heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
|
||||
if (heap->sweeping_in_progress()) {
|
||||
IsolateSafepointScope scope(heap);
|
||||
|
@ -144,8 +144,8 @@ UNINITIALIZED_TEST(ConcurrentAllocationWhileMainThreadIsParked) {
|
||||
const int kThreads = 4;
|
||||
|
||||
{
|
||||
ScanStackModeScopeForTesting no_stack_scanning(i_isolate->heap(),
|
||||
Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
i_isolate->heap());
|
||||
ParkedScope scope(i_isolate->main_thread_local_isolate());
|
||||
|
||||
for (int i = 0; i < kThreads; i++) {
|
||||
@ -177,8 +177,8 @@ UNINITIALIZED_TEST(ConcurrentAllocationWhileMainThreadParksAndUnparks) {
|
||||
const int kThreads = 4;
|
||||
|
||||
{
|
||||
ScanStackModeScopeForTesting no_stack_scanning(i_isolate->heap(),
|
||||
Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
i_isolate->heap());
|
||||
|
||||
for (int i = 0; i < kThreads; i++) {
|
||||
auto thread =
|
||||
@ -217,8 +217,8 @@ UNINITIALIZED_TEST(ConcurrentAllocationWhileMainThreadRunsWithSafepoints) {
|
||||
const int kThreads = 4;
|
||||
|
||||
{
|
||||
ScanStackModeScopeForTesting no_stack_scanning(i_isolate->heap(),
|
||||
Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
i_isolate->heap());
|
||||
|
||||
for (int i = 0; i < kThreads; i++) {
|
||||
auto thread =
|
||||
|
@ -752,8 +752,8 @@ TEST(MakingExternalUnalignedOneByteString) {
|
||||
|
||||
// Trigger GCs and force evacuation.
|
||||
CcTest::CollectAllGarbage();
|
||||
i::ScanStackModeScopeForTesting no_stack_scanning(
|
||||
CcTest::heap(), i::Heap::ScanStackMode::kNone);
|
||||
i::DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
CcTest::heap());
|
||||
CcTest::heap()->CollectAllGarbage(i::Heap::kReduceMemoryFootprintMask,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
@ -21028,8 +21028,8 @@ class RegExpInterruptTest {
|
||||
|
||||
static void CollectAllGarbage(v8::Isolate* isolate, void* data) {
|
||||
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
|
||||
i::ScanStackModeScopeForTesting no_stack_scanning(
|
||||
CcTest::heap(), i::Heap::ScanStackMode::kNone);
|
||||
i::DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
CcTest::heap());
|
||||
i_isolate->heap()->PreciseCollectAllGarbage(
|
||||
i::Heap::kNoGCFlags, i::GarbageCollectionReason::kRuntime);
|
||||
}
|
||||
@ -25590,8 +25590,8 @@ TEST(MemoryPressure) {
|
||||
WeakCallCounter counter(1234);
|
||||
|
||||
// Conservative stack scanning might break results.
|
||||
i::ScanStackModeScopeForTesting no_stack_scanning(
|
||||
CcTest::heap(), i::Heap::ScanStackMode::kNone);
|
||||
i::DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
CcTest::heap());
|
||||
|
||||
// Check that critical memory pressure notification sets GC interrupt.
|
||||
auto garbage = CreateGarbageWithWeakCallCounter(isolate, &counter);
|
||||
@ -27455,8 +27455,7 @@ static void CallIsolate2(const v8::FunctionCallbackInfo<v8::Value>& args) {
|
||||
v8::Local<v8::Context>::New(isolate_2, context_2);
|
||||
v8::Context::Scope context_scope(context);
|
||||
i::Heap* heap_2 = reinterpret_cast<i::Isolate*>(isolate_2)->heap();
|
||||
i::ScanStackModeScopeForTesting no_stack_scanning(
|
||||
heap_2, i::Heap::ScanStackMode::kNone);
|
||||
i::DisableConservativeStackScanningScopeForTesting no_stack_scanning(heap_2);
|
||||
heap_2->CollectAllGarbage(i::Heap::kForcedGC,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
CompileRun("f2() //# sourceURL=isolate2b");
|
||||
|
@ -1260,8 +1260,8 @@ static TestStatsStream GetHeapStatsUpdate(
|
||||
TEST(HeapSnapshotObjectsStats) {
|
||||
// Concurrent allocation and conservative stack scanning might break results.
|
||||
i::v8_flags.stress_concurrent_allocation = false;
|
||||
i::ScanStackModeScopeForTesting no_stack_scanning(
|
||||
CcTest::heap(), i::Heap::ScanStackMode::kNone);
|
||||
i::DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
CcTest::heap());
|
||||
|
||||
LocalContext env;
|
||||
v8::HandleScope scope(env->GetIsolate());
|
||||
|
@ -11,7 +11,9 @@
|
||||
#include "include/v8-array-buffer.h"
|
||||
#include "include/v8-inspector.h"
|
||||
#include "include/v8-local-handle.h"
|
||||
#include "include/v8-locker.h"
|
||||
#include "include/v8-script.h"
|
||||
#include "src/base/optional.h"
|
||||
|
||||
namespace v8 {
|
||||
|
||||
@ -47,7 +49,9 @@ class InspectorIsolateData : public v8_inspector::V8InspectorClient {
|
||||
~InspectorIsolateData() override {
|
||||
// Enter the isolate before destructing this InspectorIsolateData, so that
|
||||
// destructors that run before the Isolate's destructor still see it as
|
||||
// entered.
|
||||
// entered. Use a v8::Locker, in case the thread destroying the isolate is
|
||||
// not the last one that entered it.
|
||||
locker_.emplace(isolate());
|
||||
isolate()->Enter();
|
||||
}
|
||||
|
||||
@ -159,6 +163,9 @@ class InspectorIsolateData : public v8_inspector::V8InspectorClient {
|
||||
SetupGlobalTasks setup_global_tasks_;
|
||||
std::unique_ptr<v8::ArrayBuffer::Allocator> array_buffer_allocator_;
|
||||
std::unique_ptr<v8::Isolate, IsolateDeleter> isolate_;
|
||||
// The locker_ field has to come after isolate_ because the locker has to
|
||||
// outlive the isolate.
|
||||
base::Optional<v8::Locker> locker_;
|
||||
std::unique_ptr<v8_inspector::V8Inspector> inspector_;
|
||||
int last_context_group_id_ = 0;
|
||||
std::map<int, std::vector<v8::Global<v8::Context>>> contexts_;
|
||||
|
@ -1586,7 +1586,7 @@
|
||||
|
||||
################################################################################
|
||||
['conservative_stack_scanning', {
|
||||
# TODO(v8:13257): Conservative stack scanning is not currently compatible
|
||||
# TODO(v8:13493): Conservative stack scanning is not currently compatible
|
||||
# with stack switching.
|
||||
'wasm/stack-switching': [SKIP],
|
||||
'wasm/stack-switching-export': [SKIP],
|
||||
|
@ -359,8 +359,8 @@ class MergeDeserializedCodeTest : public DeserializeTest {
|
||||
}
|
||||
}
|
||||
|
||||
i::ScanStackModeScopeForTesting no_stack_scanning(
|
||||
i_isolate->heap(), i::Heap::ScanStackMode::kNone);
|
||||
i::DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
i_isolate->heap());
|
||||
i_isolate->heap()->CollectAllGarbage(i::Heap::kNoGCFlags,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
|
||||
@ -509,8 +509,8 @@ class MergeDeserializedCodeTest : public DeserializeTest {
|
||||
// At this point, the original_objects array might still have pointers to
|
||||
// some old discarded content, such as UncompiledData from flushed
|
||||
// functions. GC again to clear it all out.
|
||||
i::ScanStackModeScopeForTesting no_stack_scanning(
|
||||
i_isolate->heap(), i::Heap::ScanStackMode::kNone);
|
||||
i::DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
i_isolate->heap());
|
||||
i_isolate->heap()->CollectAllGarbage(i::Heap::kNoGCFlags,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
|
||||
|
@ -86,6 +86,7 @@ TEST_F(ConservativeStackVisitorTest, DirectBasePointer) {
|
||||
volatile Address ptr = recorder->base_address();
|
||||
|
||||
ConservativeStackVisitor stack_visitor(isolate(), recorder.get());
|
||||
SaveStackContextScope stack_context_scope(&heap()->stack());
|
||||
isolate()->heap()->stack().IteratePointers(&stack_visitor);
|
||||
|
||||
// Make sure to keep the pointer alive.
|
||||
@ -107,6 +108,7 @@ TEST_F(ConservativeStackVisitorTest, TaggedBasePointer) {
|
||||
volatile Address ptr = recorder->tagged_address();
|
||||
|
||||
ConservativeStackVisitor stack_visitor(isolate(), recorder.get());
|
||||
SaveStackContextScope stack_context_scope(&heap()->stack());
|
||||
isolate()->heap()->stack().IteratePointers(&stack_visitor);
|
||||
|
||||
// Make sure to keep the pointer alive.
|
||||
@ -128,6 +130,7 @@ TEST_F(ConservativeStackVisitorTest, InnerPointer) {
|
||||
volatile Address ptr = recorder->inner_address();
|
||||
|
||||
ConservativeStackVisitor stack_visitor(isolate(), recorder.get());
|
||||
SaveStackContextScope stack_context_scope(&heap()->stack());
|
||||
isolate()->heap()->stack().IteratePointers(&stack_visitor);
|
||||
|
||||
// Make sure to keep the pointer alive.
|
||||
@ -151,6 +154,7 @@ TEST_F(ConservativeStackVisitorTest, HalfWord1) {
|
||||
volatile uint32_t ptr[] = {recorder->compr_address(), 0};
|
||||
|
||||
ConservativeStackVisitor stack_visitor(isolate(), recorder.get());
|
||||
SaveStackContextScope stack_context_scope(&heap()->stack());
|
||||
isolate()->heap()->stack().IteratePointers(&stack_visitor);
|
||||
|
||||
// Make sure to keep the pointer alive.
|
||||
@ -172,6 +176,7 @@ TEST_F(ConservativeStackVisitorTest, HalfWord2) {
|
||||
volatile uint32_t ptr[] = {0, recorder->compr_address()};
|
||||
|
||||
ConservativeStackVisitor stack_visitor(isolate(), recorder.get());
|
||||
SaveStackContextScope stack_context_scope(&heap()->stack());
|
||||
isolate()->heap()->stack().IteratePointers(&stack_visitor);
|
||||
|
||||
// Make sure to keep the pointer alive.
|
||||
@ -193,6 +198,7 @@ TEST_F(ConservativeStackVisitorTest, InnerHalfWord1) {
|
||||
volatile uint32_t ptr[] = {recorder->compr_inner(), 0};
|
||||
|
||||
ConservativeStackVisitor stack_visitor(isolate(), recorder.get());
|
||||
SaveStackContextScope stack_context_scope(&heap()->stack());
|
||||
isolate()->heap()->stack().IteratePointers(&stack_visitor);
|
||||
|
||||
// Make sure to keep the pointer alive.
|
||||
@ -214,6 +220,7 @@ TEST_F(ConservativeStackVisitorTest, InnerHalfWord2) {
|
||||
volatile uint32_t ptr[] = {0, recorder->compr_inner()};
|
||||
|
||||
ConservativeStackVisitor stack_visitor(isolate(), recorder.get());
|
||||
SaveStackContextScope stack_context_scope(&heap()->stack());
|
||||
isolate()->heap()->stack().IteratePointers(&stack_visitor);
|
||||
|
||||
// Make sure to keep the pointer alive.
|
||||
|
@ -359,8 +359,7 @@ class InConstructionObjectReferringToGlobalHandle final
|
||||
InConstructionObjectReferringToGlobalHandle(Heap* heap,
|
||||
v8::Local<v8::Object> wrapper)
|
||||
: wrapper_(reinterpret_cast<v8::Isolate*>(heap->isolate()), wrapper) {
|
||||
ScanStackModeScopeForTesting no_stack_scanning(heap,
|
||||
Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting no_stack_scanning(heap);
|
||||
heap->CollectGarbage(OLD_SPACE, GarbageCollectionReason::kTesting);
|
||||
heap->CollectGarbage(OLD_SPACE, GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
@ -29,7 +29,9 @@ class MarkerTest : public testing::TestWithHeap {
|
||||
const MarkingConfig config = {CollectionType::kMajor, stack_state};
|
||||
auto* heap = Heap::From(GetHeap());
|
||||
InitializeMarker(*heap, GetPlatformHandle().get(), config);
|
||||
heap->stack()->SaveContext();
|
||||
marker_->FinishMarking(stack_state);
|
||||
heap->stack()->ClearContext();
|
||||
// Pretend do finish sweeping as StatsCollector verifies that Notify*
|
||||
// methods are called in the right order.
|
||||
heap->stats_collector()->NotifySweepingCompleted(
|
||||
@ -250,7 +252,9 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
|
||||
marker->Visitor().Trace(member);
|
||||
});
|
||||
EXPECT_FALSE(HeapObjectHeader::FromObject(object).IsMarked());
|
||||
Heap::From(GetHeap())->stack()->SaveContext();
|
||||
marker()->FinishMarking(StackState::kMayContainHeapPointers);
|
||||
Heap::From(GetHeap())->stack()->ClearContext();
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(object).IsMarked());
|
||||
}
|
||||
|
||||
@ -259,11 +263,14 @@ TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
|
||||
StackState::kMayContainHeapPointers};
|
||||
InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
|
||||
MakeGarbageCollected<GCedWithCallback>(
|
||||
GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) {
|
||||
GetAllocationHandle(), [stack = Heap::From(GetHeap())->stack(),
|
||||
marker = marker()](GCedWithCallback* obj) {
|
||||
Member<GCedWithCallback> member(obj);
|
||||
marker->Visitor().Trace(member);
|
||||
EXPECT_FALSE(HeapObjectHeader::FromObject(obj).IsMarked());
|
||||
stack->SaveContext();
|
||||
marker->FinishMarking(StackState::kMayContainHeapPointers);
|
||||
stack->ClearContext();
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(obj).IsMarked());
|
||||
});
|
||||
}
|
||||
@ -320,7 +327,9 @@ TEST_F(MarkerTest,
|
||||
RegisterInConstructionObject(GetAllocationHandle(), marker()->Visitor(),
|
||||
storage);
|
||||
EXPECT_FALSE(HeapObjectHeader::FromObject(storage.object()).IsMarked());
|
||||
Heap::From(GetHeap())->stack()->SaveContext();
|
||||
marker()->FinishMarking(StackState::kMayContainHeapPointers);
|
||||
Heap::From(GetHeap())->stack()->ClearContext();
|
||||
EXPECT_TRUE(HeapObjectHeader::FromObject(storage.object()).IsMarked());
|
||||
}
|
||||
|
||||
@ -400,7 +409,9 @@ class IncrementalMarkingTest : public testing::TestWithHeap {
|
||||
}
|
||||
|
||||
void FinishMarking() {
|
||||
Heap::From(GetHeap())->stack()->SaveContext();
|
||||
GetMarkerRef()->FinishMarking(StackState::kMayContainHeapPointers);
|
||||
Heap::From(GetHeap())->stack()->ClearContext();
|
||||
// Pretend do finish sweeping as StatsCollector verifies that Notify*
|
||||
// methods are called in the right order.
|
||||
GetMarkerRef().reset();
|
||||
|
@ -23,9 +23,10 @@ class MarkingVerifierTest : public testing::TestWithHeap {
|
||||
V8_NOINLINE void VerifyMarking(HeapBase& heap, StackState stack_state,
|
||||
size_t expected_marked_bytes) {
|
||||
Heap::From(GetHeap())->object_allocator().ResetLinearAllocationBuffers();
|
||||
Heap::From(GetHeap())->stack()->SaveContext();
|
||||
MarkingVerifier verifier(heap, CollectionType::kMajor);
|
||||
verifier.Run(stack_state, v8::base::Stack::GetCurrentStackPosition(),
|
||||
expected_marked_bytes);
|
||||
verifier.Run(stack_state, expected_marked_bytes);
|
||||
Heap::From(GetHeap())->stack()->ClearContext();
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -83,7 +83,9 @@ TEST_F(GCStackTest, IteratePointersFindsOnStackValue) {
|
||||
{
|
||||
int* volatile tmp = scanner->needle();
|
||||
USE(tmp);
|
||||
GetStack()->SaveContext();
|
||||
GetStack()->IteratePointers(scanner.get());
|
||||
GetStack()->ClearContext();
|
||||
EXPECT_TRUE(scanner->found());
|
||||
}
|
||||
}
|
||||
@ -98,7 +100,9 @@ TEST_F(GCStackTest, IteratePointersFindsOnStackValuePotentiallyUnaligned) {
|
||||
USE(a);
|
||||
int* volatile tmp = scanner->needle();
|
||||
USE(tmp);
|
||||
GetStack()->SaveContext();
|
||||
GetStack()->IteratePointers(scanner.get());
|
||||
GetStack()->ClearContext();
|
||||
EXPECT_TRUE(scanner->found());
|
||||
}
|
||||
}
|
||||
@ -143,7 +147,9 @@ V8_NOINLINE void* RecursivelyPassOnParameterImpl(void* p1, void* p2, void* p3,
|
||||
nullptr, nullptr, nullptr, p7, stack,
|
||||
visitor);
|
||||
} else if (p8) {
|
||||
stack->SaveContext();
|
||||
stack->IteratePointers(visitor);
|
||||
stack->ClearContext();
|
||||
return p8;
|
||||
}
|
||||
return nullptr;
|
||||
@ -154,7 +160,9 @@ V8_NOINLINE void* RecursivelyPassOnParameter(size_t num, void* parameter,
|
||||
StackVisitor* visitor) {
|
||||
switch (num) {
|
||||
case 0:
|
||||
stack->SaveContext();
|
||||
stack->IteratePointers(visitor);
|
||||
stack->ClearContext();
|
||||
return parameter;
|
||||
case 1:
|
||||
return RecursivelyPassOnParameterImpl(nullptr, nullptr, nullptr, nullptr,
|
||||
@ -290,7 +298,9 @@ extern "C" V8_NOINLINE
|
||||
#endif // defined(__clang__)
|
||||
void
|
||||
IteratePointersNoMangling(Stack* stack, StackVisitor* visitor) {
|
||||
stack->SaveContext();
|
||||
stack->IteratePointers(visitor);
|
||||
stack->ClearContext();
|
||||
}
|
||||
} // namespace
|
||||
|
||||
@ -468,7 +478,9 @@ class CheckStackAlignmentVisitor final : public StackVisitor {
|
||||
|
||||
TEST_F(GCStackTest, StackAlignment) {
|
||||
auto checker = std::make_unique<CheckStackAlignmentVisitor>();
|
||||
GetStack()->SaveContext();
|
||||
GetStack()->IteratePointers(checker.get());
|
||||
GetStack()->ClearContext();
|
||||
}
|
||||
#endif // V8_OS_LINUX && (V8_HOST_ARCH_IA32 || V8_HOST_ARCH_X64)
|
||||
|
||||
|
@ -29,7 +29,9 @@ class WeakContainerTest : public testing::TestWithHeap {
|
||||
}
|
||||
|
||||
void FinishMarking(StackState stack_state) {
|
||||
Heap::From(GetHeap())->stack()->SaveContext();
|
||||
GetMarkerRef()->FinishMarking(stack_state);
|
||||
Heap::From(GetHeap())->stack()->ClearContext();
|
||||
marked_bytes_ =
|
||||
Heap::From(GetHeap())->AsBase().stats_collector()->marked_bytes();
|
||||
GetMarkerRef().reset();
|
||||
|
@ -26,7 +26,9 @@ class V8_NODISCARD IncrementalMarkingScope {
|
||||
explicit IncrementalMarkingScope(MarkerBase* marker) : marker_(marker) {}
|
||||
|
||||
~IncrementalMarkingScope() V8_NOEXCEPT {
|
||||
marker_->heap().stack()->SaveContext();
|
||||
marker_->FinishMarking(kIncrementalConfig.stack_state);
|
||||
marker_->heap().stack()->ClearContext();
|
||||
}
|
||||
|
||||
static constexpr MarkingConfig kIncrementalConfig{
|
||||
|
@ -33,22 +33,22 @@ class WithHeapInternals : public TMixin, HeapInternalsBase {
|
||||
WithHeapInternals& operator=(const WithHeapInternals&) = delete;
|
||||
|
||||
void CollectGarbage(AllocationSpace space) {
|
||||
ScanStackModeScopeForTesting scope(heap(), Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting scope(heap());
|
||||
heap()->CollectGarbage(space, GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void FullGC() {
|
||||
ScanStackModeScopeForTesting scope(heap(), Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting scope(heap());
|
||||
heap()->CollectGarbage(OLD_SPACE, GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void YoungGC() {
|
||||
ScanStackModeScopeForTesting scope(heap(), Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting scope(heap());
|
||||
heap()->CollectGarbage(NEW_SPACE, GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void CollectAllAvailableGarbage() {
|
||||
ScanStackModeScopeForTesting scope(heap(), Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting scope(heap());
|
||||
heap()->CollectAllAvailableGarbage(GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
@ -92,7 +92,7 @@ class WithHeapInternals : public TMixin, HeapInternalsBase {
|
||||
}
|
||||
|
||||
void GcAndSweep(AllocationSpace space) {
|
||||
ScanStackModeScopeForTesting scope(heap(), Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting scope(heap());
|
||||
heap()->CollectGarbage(space, GarbageCollectionReason::kTesting);
|
||||
if (heap()->sweeping_in_progress()) {
|
||||
IsolateSafepointScope scope(heap());
|
||||
@ -136,19 +136,19 @@ using TestWithHeapInternalsAndContext = //
|
||||
|
||||
inline void CollectGarbage(AllocationSpace space, v8::Isolate* isolate) {
|
||||
Heap* heap = reinterpret_cast<i::Isolate*>(isolate)->heap();
|
||||
ScanStackModeScopeForTesting scope(heap, Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting scope(heap);
|
||||
heap->CollectGarbage(space, GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
inline void FullGC(v8::Isolate* isolate) {
|
||||
Heap* heap = reinterpret_cast<i::Isolate*>(isolate)->heap();
|
||||
ScanStackModeScopeForTesting scope(heap, Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting scope(heap);
|
||||
heap->CollectAllGarbage(Heap::kNoGCFlags, GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
inline void YoungGC(v8::Isolate* isolate) {
|
||||
Heap* heap = reinterpret_cast<i::Isolate*>(isolate)->heap();
|
||||
ScanStackModeScopeForTesting scope(heap, Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting scope(heap);
|
||||
heap->CollectGarbage(NEW_SPACE, GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
|
@ -145,8 +145,8 @@ void DeclarationContext::Check(const char* source, int get, int set, int query,
|
||||
InitializeIfNeeded();
|
||||
// A retry after a GC may pollute the counts, so perform gc now
|
||||
// to avoid that.
|
||||
i::ScanStackModeScopeForTesting no_stack_scanning(
|
||||
i_isolate()->heap(), i::Heap::ScanStackMode::kNone);
|
||||
i::DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
i_isolate()->heap());
|
||||
i_isolate()->heap()->CollectGarbage(i::NEW_SPACE,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
HandleScope scope(isolate_);
|
||||
|
@ -91,8 +91,8 @@ ManualGCScope::ManualGCScope(i::Isolate* isolate) {
|
||||
// running by the time a ManualGCScope is created. Finalizing existing marking
|
||||
// prevents any undefined/unexpected behavior.
|
||||
if (isolate && isolate->heap()->incremental_marking()->IsMarking()) {
|
||||
ScanStackModeScopeForTesting no_stack_scanning(isolate->heap(),
|
||||
Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
isolate->heap());
|
||||
isolate->heap()->CollectGarbage(OLD_SPACE,
|
||||
GarbageCollectionReason::kTesting);
|
||||
// Make sure there is no concurrent sweeping running in the background.
|
||||
|
@ -187,37 +187,39 @@ class WithIsolateScopeMixin : public TMixin {
|
||||
}
|
||||
|
||||
// By default, the GC methods do not scan the stack conservatively.
|
||||
void CollectGarbage(
|
||||
i::AllocationSpace space, i::Isolate* isolate = nullptr,
|
||||
i::Heap::ScanStackMode mode = i::Heap::ScanStackMode::kNone) {
|
||||
void CollectGarbage(i::AllocationSpace space, i::Isolate* isolate = nullptr,
|
||||
StackState stack_state = StackState::kNoHeapPointers) {
|
||||
i::Isolate* iso = isolate ? isolate : i_isolate();
|
||||
i::ScanStackModeScopeForTesting scope(iso->heap(), mode);
|
||||
base::Optional<i::DisableConservativeStackScanningScopeForTesting> scope;
|
||||
if (stack_state == StackState::kNoHeapPointers) scope.emplace(iso->heap());
|
||||
iso->heap()->CollectGarbage(space, i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void CollectAllGarbage(
|
||||
i::Isolate* isolate = nullptr,
|
||||
i::Heap::ScanStackMode mode = i::Heap::ScanStackMode::kNone) {
|
||||
void CollectAllGarbage(i::Isolate* isolate = nullptr,
|
||||
StackState stack_state = StackState::kNoHeapPointers) {
|
||||
i::Isolate* iso = isolate ? isolate : i_isolate();
|
||||
i::ScanStackModeScopeForTesting scope(iso->heap(), mode);
|
||||
base::Optional<i::DisableConservativeStackScanningScopeForTesting> scope;
|
||||
if (stack_state == StackState::kNoHeapPointers) scope.emplace(iso->heap());
|
||||
iso->heap()->CollectAllGarbage(i::Heap::kNoGCFlags,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void CollectAllAvailableGarbage(
|
||||
i::Isolate* isolate = nullptr,
|
||||
i::Heap::ScanStackMode mode = i::Heap::ScanStackMode::kNone) {
|
||||
StackState stack_state = StackState::kNoHeapPointers) {
|
||||
i::Isolate* iso = isolate ? isolate : i_isolate();
|
||||
i::ScanStackModeScopeForTesting scope(iso->heap(), mode);
|
||||
base::Optional<i::DisableConservativeStackScanningScopeForTesting> scope;
|
||||
if (stack_state == StackState::kNoHeapPointers) scope.emplace(iso->heap());
|
||||
iso->heap()->CollectAllAvailableGarbage(
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
||||
void PreciseCollectAllGarbage(
|
||||
i::Isolate* isolate = nullptr,
|
||||
i::Heap::ScanStackMode mode = i::Heap::ScanStackMode::kNone) {
|
||||
StackState stack_state = StackState::kNoHeapPointers) {
|
||||
i::Isolate* iso = isolate ? isolate : i_isolate();
|
||||
i::ScanStackModeScopeForTesting scope(iso->heap(), mode);
|
||||
base::Optional<i::DisableConservativeStackScanningScopeForTesting> scope;
|
||||
if (stack_state == StackState::kNoHeapPointers) scope.emplace(iso->heap());
|
||||
iso->heap()->PreciseCollectAllGarbage(i::Heap::kNoGCFlags,
|
||||
i::GarbageCollectionReason::kTesting);
|
||||
}
|
||||
|
@ -30,8 +30,8 @@ own<Trap> Stage2(void* env, const Val args[], Val results[]) {
|
||||
own<Trap> Stage4_GC(void* env, const Val args[], Val results[]) {
|
||||
printf("Stage4...\n");
|
||||
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(env);
|
||||
ScanStackModeScopeForTesting no_stack_scanning(isolate->heap(),
|
||||
Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting no_stack_scanning(
|
||||
isolate->heap());
|
||||
isolate->heap()->PreciseCollectAllGarbage(Heap::kForcedGC,
|
||||
GarbageCollectionReason::kTesting);
|
||||
results[0] = Val::i32(args[0].i32() + 1);
|
||||
|
@ -37,8 +37,7 @@ TEST_F(WasmCapiTest, Serialize) {
|
||||
ResetModule();
|
||||
Heap* heap =
|
||||
reinterpret_cast<::wasm::StoreImpl*>(store())->i_isolate()->heap();
|
||||
ScanStackModeScopeForTesting no_stack_scanning(heap,
|
||||
Heap::ScanStackMode::kNone);
|
||||
DisableConservativeStackScanningScopeForTesting no_stack_scanning(heap);
|
||||
heap->PreciseCollectAllGarbage(Heap::kForcedGC,
|
||||
GarbageCollectionReason::kTesting);
|
||||
heap->PreciseCollectAllGarbage(Heap::kForcedGC,
|
||||
|
Loading…
Reference in New Issue
Block a user