Reland [in-place weak refs] Add in-place weak references & migrate one WeakCell to it.
Implement in-place weak reference handling in GC. Turn FeedbackVector::optimized_code_or_smi into an in-place weak reference (this is the only in-place weak reference at this point). (See bug for design doc.) BUG=v8:7308 TBR=yangguo@chromium.org Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng Change-Id: I16d65dc768f10ed431252e23a0df07bee9063534 Reviewed-on: https://chromium-review.googlesource.com/948493 Commit-Queue: Marja Hölttä <marja@chromium.org> Reviewed-by: Ulan Degenbaev <ulan@chromium.org> Cr-Commit-Position: refs/heads/master@{#51731}
This commit is contained in:
parent
f6ed92d6de
commit
88062a2cbc
2
BUILD.gn
2
BUILD.gn
@ -1872,6 +1872,8 @@ v8_source_set("v8_base") {
|
||||
"src/objects/literal-objects.h",
|
||||
"src/objects/map-inl.h",
|
||||
"src/objects/map.h",
|
||||
"src/objects/maybe-object-inl.h",
|
||||
"src/objects/maybe-object.h",
|
||||
"src/objects/microtask-inl.h",
|
||||
"src/objects/microtask.h",
|
||||
"src/objects/module-inl.h",
|
||||
|
35
include/v8.h
35
include/v8.h
@ -145,7 +145,9 @@ class Arguments;
|
||||
class DeferredHandles;
|
||||
class Heap;
|
||||
class HeapObject;
|
||||
class HeapObjectReference;
|
||||
class Isolate;
|
||||
class MaybeObject;
|
||||
class Object;
|
||||
struct ScriptStreamingData;
|
||||
template<typename T> class CustomArguments;
|
||||
@ -8852,9 +8854,13 @@ const int kApiInt64Size = sizeof(int64_t); // NOLINT
|
||||
|
||||
// Tag information for HeapObject.
|
||||
const int kHeapObjectTag = 1;
|
||||
const int kWeakHeapObjectTag = 3;
|
||||
const int kHeapObjectTagSize = 2;
|
||||
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
|
||||
|
||||
const intptr_t kWeakHeapObjectMask = 1 << 1;
|
||||
const intptr_t kClearedWeakHeapObject = 3;
|
||||
|
||||
// Tag information for Smi.
|
||||
const int kSmiTag = 0;
|
||||
const int kSmiTagSize = 1;
|
||||
@ -8996,6 +9002,35 @@ class Internals {
|
||||
kHeapObjectTag);
|
||||
}
|
||||
|
||||
V8_INLINE static bool HasWeakHeapObjectTag(
|
||||
const internal::MaybeObject* value) {
|
||||
return ((reinterpret_cast<intptr_t>(value) & kHeapObjectTagMask) ==
|
||||
kWeakHeapObjectTag);
|
||||
}
|
||||
|
||||
// Object* should never have the weak tag; this variant is for overzealous
|
||||
// checking.
|
||||
V8_INLINE static bool HasWeakHeapObjectTag(const internal::Object* value) {
|
||||
return ((reinterpret_cast<intptr_t>(value) & kHeapObjectTagMask) ==
|
||||
kWeakHeapObjectTag);
|
||||
}
|
||||
|
||||
V8_INLINE static bool IsClearedWeakHeapObject(internal::MaybeObject* value) {
|
||||
return reinterpret_cast<intptr_t>(value) == kClearedWeakHeapObject;
|
||||
}
|
||||
|
||||
V8_INLINE static internal::HeapObject* RemoveWeakHeapObjectMask(
|
||||
internal::HeapObjectReference* value) {
|
||||
return reinterpret_cast<HeapObject*>(reinterpret_cast<intptr_t>(value) &
|
||||
~kWeakHeapObjectMask);
|
||||
}
|
||||
|
||||
V8_INLINE static internal::HeapObjectReference* AddWeakHeapObjectMask(
|
||||
internal::HeapObject* value) {
|
||||
return reinterpret_cast<HeapObjectReference*>(
|
||||
reinterpret_cast<intptr_t>(value) | kWeakHeapObjectMask);
|
||||
}
|
||||
|
||||
V8_INLINE static int SmiValue(const internal::Object* value) {
|
||||
return PlatformSmiTagging::SmiToInt(value);
|
||||
}
|
||||
|
@ -1736,6 +1736,14 @@ void MacroAssembler::JumpToInstructionStream(const InstructionStream* stream) {
|
||||
Jump(kOffHeapTrampolineRegister);
|
||||
}
|
||||
|
||||
void MacroAssembler::LoadWeakValue(Register out, Register in,
|
||||
Label* target_if_cleared) {
|
||||
cmp(in, Operand(kClearedWeakHeapObject));
|
||||
b(eq, target_if_cleared);
|
||||
|
||||
and_(out, in, Operand(~kWeakHeapObjectMask));
|
||||
}
|
||||
|
||||
void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
DCHECK_GT(value, 0);
|
||||
|
@ -801,6 +801,10 @@ class MacroAssembler : public TurboAssembler {
|
||||
// Generates a trampoline to jump to the off-heap instruction stream.
|
||||
void JumpToInstructionStream(const InstructionStream* stream);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// In-place weak references.
|
||||
void LoadWeakValue(Register out, Register in, Label* target_if_cleared);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// StatsCounter support
|
||||
|
||||
|
@ -2503,6 +2503,12 @@ void MacroAssembler::LeaveExitFrame(bool restore_doubles,
|
||||
Pop(fp, lr);
|
||||
}
|
||||
|
||||
void MacroAssembler::LoadWeakValue(Register out, Register in,
|
||||
Label* target_if_cleared) {
|
||||
CompareAndBranch(in, Operand(kClearedWeakHeapObject), eq, target_if_cleared);
|
||||
|
||||
and_(out, in, Operand(~kWeakHeapObjectMask));
|
||||
}
|
||||
|
||||
void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
|
@ -1921,6 +1921,10 @@ class MacroAssembler : public TurboAssembler {
|
||||
LoadNativeContextSlot(Context::GLOBAL_PROXY_INDEX, dst);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// In-place weak references.
|
||||
void LoadWeakValue(Register out, Register in, Label* target_if_cleared);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// StatsCounter support
|
||||
|
||||
|
@ -752,7 +752,7 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
DCHECK(
|
||||
!AreAliased(feedback_vector, r0, r1, r3, scratch1, scratch2, scratch3));
|
||||
|
||||
Label optimized_code_slot_is_cell, fallthrough;
|
||||
Label optimized_code_slot_is_weak_ref, fallthrough;
|
||||
|
||||
Register closure = r1;
|
||||
Register optimized_code_entry = scratch1;
|
||||
@ -762,9 +762,9 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
|
||||
|
||||
// Check if the code entry is a Smi. If yes, we interpret it as an
|
||||
// optimisation marker. Otherwise, interpret is as a weak cell to a code
|
||||
// optimisation marker. Otherwise, interpret it as a weak reference to a code
|
||||
// object.
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_cell);
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
|
||||
|
||||
{
|
||||
// Optimized code slot is a Smi optimization marker.
|
||||
@ -799,12 +799,10 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
{
|
||||
// Optimized code slot is a WeakCell.
|
||||
__ bind(&optimized_code_slot_is_cell);
|
||||
// Optimized code slot is a weak reference.
|
||||
__ bind(&optimized_code_slot_is_weak_ref);
|
||||
|
||||
__ ldr(optimized_code_entry,
|
||||
FieldMemOperand(optimized_code_entry, WeakCell::kValueOffset));
|
||||
__ JumpIfSmi(optimized_code_entry, &fallthrough);
|
||||
__ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
|
||||
|
||||
// Check if the optimized code is marked for deopt. If it is, call the
|
||||
// runtime to clear it.
|
||||
|
@ -845,7 +845,7 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
DCHECK(
|
||||
!AreAliased(feedback_vector, x0, x1, x3, scratch1, scratch2, scratch3));
|
||||
|
||||
Label optimized_code_slot_is_cell, fallthrough;
|
||||
Label optimized_code_slot_is_weak_ref, fallthrough;
|
||||
|
||||
Register closure = x1;
|
||||
Register optimized_code_entry = scratch1;
|
||||
@ -855,9 +855,9 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
|
||||
|
||||
// Check if the code entry is a Smi. If yes, we interpret it as an
|
||||
// optimisation marker. Otherwise, interpret is as a weak cell to a code
|
||||
// optimisation marker. Otherwise, interpret is at a weak reference to a code
|
||||
// object.
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_cell);
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
|
||||
|
||||
{
|
||||
// Optimized code slot is a Smi optimization marker.
|
||||
@ -892,12 +892,10 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
{
|
||||
// Optimized code slot is a WeakCell.
|
||||
__ bind(&optimized_code_slot_is_cell);
|
||||
// Optimized code slot is a weak reference.
|
||||
__ bind(&optimized_code_slot_is_weak_ref);
|
||||
|
||||
__ Ldr(optimized_code_entry,
|
||||
FieldMemOperand(optimized_code_entry, WeakCell::kValueOffset));
|
||||
__ JumpIfSmi(optimized_code_entry, &fallthrough);
|
||||
__ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
|
||||
|
||||
// Check if the optimized code is marked for deopt. If it is, call the
|
||||
// runtime to clear it.
|
||||
|
@ -673,7 +673,7 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
// -----------------------------------
|
||||
DCHECK(!AreAliased(feedback_vector, eax, edx, edi, scratch));
|
||||
|
||||
Label optimized_code_slot_is_cell, fallthrough;
|
||||
Label optimized_code_slot_is_weak_ref, fallthrough;
|
||||
|
||||
Register closure = edi;
|
||||
Register optimized_code_entry = scratch;
|
||||
@ -682,9 +682,9 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
|
||||
|
||||
// Check if the code entry is a Smi. If yes, we interpret it as an
|
||||
// optimisation marker. Otherwise, interpret is as a weak cell to a code
|
||||
// optimisation marker. Otherwise, interpret it as a weak reference to a code
|
||||
// object.
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_cell);
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
|
||||
|
||||
{
|
||||
// Optimized code slot is an optimization marker.
|
||||
@ -719,12 +719,11 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
{
|
||||
// Optimized code slot is a WeakCell.
|
||||
__ bind(&optimized_code_slot_is_cell);
|
||||
// Optimized code slot is a weak reference.
|
||||
__ bind(&optimized_code_slot_is_weak_ref);
|
||||
|
||||
__ LoadWeakValue(optimized_code_entry, &fallthrough);
|
||||
|
||||
__ mov(optimized_code_entry,
|
||||
FieldOperand(optimized_code_entry, WeakCell::kValueOffset));
|
||||
__ JumpIfSmi(optimized_code_entry, &fallthrough);
|
||||
__ push(eax);
|
||||
__ push(edx);
|
||||
|
||||
|
@ -737,7 +737,7 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
DCHECK(
|
||||
!AreAliased(feedback_vector, a0, a1, a3, scratch1, scratch2, scratch3));
|
||||
|
||||
Label optimized_code_slot_is_cell, fallthrough;
|
||||
Label optimized_code_slot_is_weak_ref, fallthrough;
|
||||
|
||||
Register closure = a1;
|
||||
Register optimized_code_entry = scratch1;
|
||||
@ -746,9 +746,9 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
|
||||
|
||||
// Check if the code entry is a Smi. If yes, we interpret it as an
|
||||
// optimisation marker. Otherwise, interpret is as a weak cell to a code
|
||||
// optimisation marker. Otherwise, interpret it as a weak cell to a code
|
||||
// object.
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_cell);
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
|
||||
|
||||
{
|
||||
// Optimized code slot is a Smi optimization marker.
|
||||
@ -782,12 +782,10 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
{
|
||||
// Optimized code slot is a WeakCell.
|
||||
__ bind(&optimized_code_slot_is_cell);
|
||||
// Optimized code slot is a weak reference.
|
||||
__ bind(&optimized_code_slot_is_weak_ref);
|
||||
|
||||
__ lw(optimized_code_entry,
|
||||
FieldMemOperand(optimized_code_entry, WeakCell::kValueOffset));
|
||||
__ JumpIfSmi(optimized_code_entry, &fallthrough);
|
||||
__ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
|
||||
|
||||
// Check if the optimized code is marked for deopt. If it is, call the
|
||||
// runtime to clear it.
|
||||
|
@ -734,7 +734,7 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
DCHECK(
|
||||
!AreAliased(feedback_vector, a0, a1, a3, scratch1, scratch2, scratch3));
|
||||
|
||||
Label optimized_code_slot_is_cell, fallthrough;
|
||||
Label optimized_code_slot_is_weak_ref, fallthrough;
|
||||
|
||||
Register closure = a1;
|
||||
Register optimized_code_entry = scratch1;
|
||||
@ -743,9 +743,9 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
|
||||
|
||||
// Check if the code entry is a Smi. If yes, we interpret it as an
|
||||
// optimisation marker. Otherwise, interpret is as a weak cell to a code
|
||||
// optimisation marker. Otherwise, interpret it as a weak reference to a code
|
||||
// object.
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_cell);
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
|
||||
|
||||
{
|
||||
// Optimized code slot is a Smi optimization marker.
|
||||
@ -779,12 +779,10 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
{
|
||||
// Optimized code slot is a WeakCell.
|
||||
__ bind(&optimized_code_slot_is_cell);
|
||||
// Optimized code slot is a weak reference.
|
||||
__ bind(&optimized_code_slot_is_weak_ref);
|
||||
|
||||
__ Ld(optimized_code_entry,
|
||||
FieldMemOperand(optimized_code_entry, WeakCell::kValueOffset));
|
||||
__ JumpIfSmi(optimized_code_entry, &fallthrough);
|
||||
__ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
|
||||
|
||||
// Check if the optimized code is marked for deopt. If it is, call the
|
||||
// runtime to clear it.
|
||||
|
@ -745,7 +745,7 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
DCHECK(!AreAliased(feedback_vector, rax, rdx, rdi, scratch1, scratch2,
|
||||
scratch3));
|
||||
|
||||
Label optimized_code_slot_is_cell, fallthrough;
|
||||
Label optimized_code_slot_is_weak_ref, fallthrough;
|
||||
|
||||
Register closure = rdi;
|
||||
Register optimized_code_entry = scratch1;
|
||||
@ -754,9 +754,9 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
FieldOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
|
||||
|
||||
// Check if the code entry is a Smi. If yes, we interpret it as an
|
||||
// optimisation marker. Otherwise, interpret is as a weak cell to a code
|
||||
// optimisation marker. Otherwise, interpret it as a weak reference to a code
|
||||
// object.
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_cell);
|
||||
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
|
||||
|
||||
{
|
||||
// Optimized code slot is a Smi optimization marker.
|
||||
@ -790,12 +790,10 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
|
||||
}
|
||||
|
||||
{
|
||||
// Optimized code slot is a WeakCell.
|
||||
__ bind(&optimized_code_slot_is_cell);
|
||||
// Optimized code slot is a weak reference.
|
||||
__ bind(&optimized_code_slot_is_weak_ref);
|
||||
|
||||
__ movp(optimized_code_entry,
|
||||
FieldOperand(optimized_code_entry, WeakCell::kValueOffset));
|
||||
__ JumpIfSmi(optimized_code_entry, &fallthrough);
|
||||
__ LoadWeakValue(optimized_code_entry, &fallthrough);
|
||||
|
||||
// Check if the optimized code is marked for deopt. If it is, call the
|
||||
// runtime to clear it.
|
||||
|
@ -1263,12 +1263,14 @@ Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
|
||||
|
||||
Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
|
||||
int offset, MachineType rep) {
|
||||
AssertIsStrongHeapObject(object);
|
||||
return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
|
||||
}
|
||||
|
||||
Node* CodeStubAssembler::LoadObjectField(SloppyTNode<HeapObject> object,
|
||||
SloppyTNode<IntPtrT> offset,
|
||||
MachineType rep) {
|
||||
AssertIsStrongHeapObject(object);
|
||||
return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
|
||||
}
|
||||
|
||||
@ -10944,5 +10946,12 @@ void CodeStubAssembler::InitializeFunctionContext(Node* native_context,
|
||||
native_context);
|
||||
}
|
||||
|
||||
void CodeStubAssembler::AssertIsStrongHeapObject(
|
||||
SloppyTNode<HeapObject> object) {
|
||||
CSA_SLOW_ASSERT(this, WordEqual(WordAnd(BitcastTaggedToWord(object),
|
||||
IntPtrConstant(kHeapObjectTagMask)),
|
||||
IntPtrConstant(kHeapObjectTag)));
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
@ -2003,6 +2003,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
|
||||
void InitializeFunctionContext(Node* native_context, Node* context,
|
||||
int slots);
|
||||
|
||||
void AssertIsStrongHeapObject(SloppyTNode<HeapObject> object);
|
||||
|
||||
private:
|
||||
friend class CodeStubArguments;
|
||||
|
||||
|
@ -10,6 +10,7 @@
|
||||
#include "src/globals.h"
|
||||
#include "src/heap/heap-inl.h"
|
||||
#include "src/heap/heap.h"
|
||||
#include "src/objects/maybe-object-inl.h"
|
||||
#include "src/objects/shared-function-info.h"
|
||||
|
||||
// Has to be the last include (doesn't have include guards):
|
||||
@ -78,7 +79,7 @@ int FeedbackMetadata::GetSlotSize(FeedbackSlotKind kind) {
|
||||
|
||||
ACCESSORS(FeedbackVector, shared_function_info, SharedFunctionInfo,
|
||||
kSharedFunctionInfoOffset)
|
||||
ACCESSORS(FeedbackVector, optimized_code_cell, Object, kOptimizedCodeOffset)
|
||||
WEAK_ACCESSORS(FeedbackVector, optimized_code_weak_or_smi, kOptimizedCodeOffset)
|
||||
INT32_ACCESSORS(FeedbackVector, length, kLengthOffset)
|
||||
INT32_ACCESSORS(FeedbackVector, invocation_count, kInvocationCountOffset)
|
||||
INT32_ACCESSORS(FeedbackVector, profiler_ticks, kProfilerTicksOffset)
|
||||
@ -100,16 +101,18 @@ void FeedbackVector::increment_deopt_count() {
|
||||
}
|
||||
|
||||
Code* FeedbackVector::optimized_code() const {
|
||||
Object* slot = optimized_code_cell();
|
||||
if (slot->IsSmi()) return nullptr;
|
||||
WeakCell* cell = WeakCell::cast(slot);
|
||||
return cell->cleared() ? nullptr : Code::cast(cell->value());
|
||||
MaybeObject* slot = optimized_code_weak_or_smi();
|
||||
DCHECK(slot->IsSmi() || slot->IsClearedWeakHeapObject() ||
|
||||
slot->IsWeakHeapObject());
|
||||
HeapObject* heap_object;
|
||||
return slot->ToStrongOrWeakHeapObject(&heap_object) ? Code::cast(heap_object)
|
||||
: nullptr;
|
||||
}
|
||||
|
||||
OptimizationMarker FeedbackVector::optimization_marker() const {
|
||||
Object* slot = optimized_code_cell();
|
||||
if (!slot->IsSmi()) return OptimizationMarker::kNone;
|
||||
Smi* value = Smi::cast(slot);
|
||||
MaybeObject* slot = optimized_code_weak_or_smi();
|
||||
Smi* value;
|
||||
if (!slot->IsSmi(&value)) return OptimizationMarker::kNone;
|
||||
return static_cast<OptimizationMarker>(value->value());
|
||||
}
|
||||
|
||||
|
@ -219,10 +219,11 @@ Handle<FeedbackVector> FeedbackVector::New(Isolate* isolate,
|
||||
DCHECK_EQ(vector->length(), slot_count);
|
||||
|
||||
DCHECK_EQ(vector->shared_function_info(), *shared);
|
||||
DCHECK_EQ(vector->optimized_code_cell(),
|
||||
Smi::FromEnum(FLAG_log_function_events
|
||||
? OptimizationMarker::kLogFirstExecution
|
||||
: OptimizationMarker::kNone));
|
||||
DCHECK_EQ(
|
||||
vector->optimized_code_weak_or_smi(),
|
||||
MaybeObject::FromSmi(Smi::FromEnum(
|
||||
FLAG_log_function_events ? OptimizationMarker::kLogFirstExecution
|
||||
: OptimizationMarker::kNone)));
|
||||
DCHECK_EQ(vector->invocation_count(), 0);
|
||||
DCHECK_EQ(vector->profiler_ticks(), 0);
|
||||
DCHECK_EQ(vector->deopt_count(), 0);
|
||||
@ -325,9 +326,7 @@ void FeedbackVector::AddToVectorsForProfilingTools(
|
||||
void FeedbackVector::SetOptimizedCode(Handle<FeedbackVector> vector,
|
||||
Handle<Code> code) {
|
||||
DCHECK_EQ(code->kind(), Code::OPTIMIZED_FUNCTION);
|
||||
Factory* factory = vector->GetIsolate()->factory();
|
||||
Handle<WeakCell> cell = factory->NewWeakCell(code);
|
||||
vector->set_optimized_code_cell(*cell);
|
||||
vector->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*code));
|
||||
}
|
||||
|
||||
void FeedbackVector::ClearOptimizedCode() {
|
||||
@ -341,21 +340,22 @@ void FeedbackVector::ClearOptimizationMarker() {
|
||||
}
|
||||
|
||||
void FeedbackVector::SetOptimizationMarker(OptimizationMarker marker) {
|
||||
set_optimized_code_cell(Smi::FromEnum(marker));
|
||||
set_optimized_code_weak_or_smi(MaybeObject::FromSmi(Smi::FromEnum(marker)));
|
||||
}
|
||||
|
||||
void FeedbackVector::EvictOptimizedCodeMarkedForDeoptimization(
|
||||
SharedFunctionInfo* shared, const char* reason) {
|
||||
Object* slot = optimized_code_cell();
|
||||
if (slot->IsSmi()) return;
|
||||
MaybeObject* slot = optimized_code_weak_or_smi();
|
||||
if (slot->IsSmi()) {
|
||||
return;
|
||||
}
|
||||
|
||||
WeakCell* cell = WeakCell::cast(slot);
|
||||
if (cell->cleared()) {
|
||||
if (slot->IsClearedWeakHeapObject()) {
|
||||
ClearOptimizationMarker();
|
||||
return;
|
||||
}
|
||||
|
||||
Code* code = Code::cast(cell->value());
|
||||
Code* code = Code::cast(slot->GetHeapObject());
|
||||
if (code->marked_for_deoptimization()) {
|
||||
if (FLAG_trace_deopt) {
|
||||
PrintF("[evicting optimizing code marked for deoptimization (%s) for ",
|
||||
|
@ -156,9 +156,9 @@ class FeedbackVector : public HeapObject {
|
||||
// feedback vector.
|
||||
DECL_ACCESSORS(shared_function_info, SharedFunctionInfo)
|
||||
|
||||
// [optimized_code_cell]: WeakCell containing optimized code or a Smi marker
|
||||
// defining optimization behaviour.
|
||||
DECL_ACCESSORS(optimized_code_cell, Object)
|
||||
// [optimized_code_weak_or_smi]: weak reference to optimized code or a Smi
|
||||
// marker defining optimization behaviour.
|
||||
DECL_ACCESSORS(optimized_code_weak_or_smi, MaybeObject)
|
||||
|
||||
// [length]: The length of the feedback vector (not including the header, i.e.
|
||||
// the number of feedback slots).
|
||||
|
@ -310,6 +310,7 @@
|
||||
F(MC_CLEAR_WEAK_CELLS) \
|
||||
F(MC_CLEAR_WEAK_COLLECTIONS) \
|
||||
F(MC_CLEAR_WEAK_LISTS) \
|
||||
F(MC_CLEAR_WEAK_REFERENCES) \
|
||||
F(MC_EPILOGUE) \
|
||||
F(MC_EVACUATE) \
|
||||
F(MC_EVACUATE_CANDIDATES) \
|
||||
|
@ -91,12 +91,52 @@ class ConcurrentMarkingVisitor final
|
||||
return marking_state_.GreyToBlack(object);
|
||||
}
|
||||
|
||||
void ProcessStrongHeapObject(HeapObject* host, Object** slot,
|
||||
HeapObject* heap_object) {
|
||||
MarkObject(heap_object);
|
||||
MarkCompactCollector::RecordSlot(host, slot, heap_object);
|
||||
}
|
||||
|
||||
void ProcessWeakHeapObject(HeapObject* host, HeapObjectReference** slot,
|
||||
HeapObject* heap_object) {
|
||||
if (marking_state_.IsBlackOrGrey(heap_object)) {
|
||||
// Weak references with live values are directly processed here to
|
||||
// reduce the processing time of weak cells during the main GC
|
||||
// pause.
|
||||
MarkCompactCollector::RecordSlot(host, slot, heap_object);
|
||||
} else {
|
||||
// If we do not know about liveness of the value, we have to process
|
||||
// the reference when we know the liveness of the whole transitive
|
||||
// closure.
|
||||
weak_objects_->weak_references.Push(task_id_, std::make_pair(host, slot));
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
|
||||
for (Object** slot = start; slot < end; slot++) {
|
||||
Object* object = base::AsAtomicPointer::Relaxed_Load(slot);
|
||||
if (!object->IsHeapObject()) continue;
|
||||
MarkObject(HeapObject::cast(object));
|
||||
MarkCompactCollector::RecordSlot(host, slot, object);
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(object));
|
||||
if (object->IsHeapObject()) {
|
||||
ProcessStrongHeapObject(host, slot, HeapObject::cast(object));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) override {
|
||||
for (MaybeObject** slot = start; slot < end; slot++) {
|
||||
MaybeObject* object = base::AsAtomicPointer::Relaxed_Load(slot);
|
||||
HeapObject* heap_object;
|
||||
if (object->ToStrongHeapObject(&heap_object)) {
|
||||
// If the reference changes concurrently from strong to weak, the write
|
||||
// barrier will treat the weak reference as strong, so we won't miss the
|
||||
// weak reference.
|
||||
ProcessStrongHeapObject(host, reinterpret_cast<Object**>(slot),
|
||||
heap_object);
|
||||
} else if (object->ToWeakHeapObject(&heap_object)) {
|
||||
ProcessWeakHeapObject(
|
||||
host, reinterpret_cast<HeapObjectReference**>(slot), heap_object);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -335,10 +375,18 @@ class ConcurrentMarkingVisitor final
|
||||
for (Object** p = start; p < end; p++) {
|
||||
Object* object = reinterpret_cast<Object*>(
|
||||
base::Relaxed_Load(reinterpret_cast<const base::AtomicWord*>(p)));
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(object));
|
||||
slot_snapshot_->add(p, object);
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) override {
|
||||
// This should never happen, because we don't use snapshotting for objects
|
||||
// which contain weak references.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
private:
|
||||
SlotSnapshot* slot_snapshot_;
|
||||
};
|
||||
@ -485,6 +533,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
|
||||
|
||||
weak_objects_->weak_cells.FlushToGlobal(task_id);
|
||||
weak_objects_->transition_arrays.FlushToGlobal(task_id);
|
||||
weak_objects_->weak_references.FlushToGlobal(task_id);
|
||||
base::AsAtomicWord::Relaxed_Store<size_t>(&task_state->marked_bytes, 0);
|
||||
total_marked_bytes_.Increment(marked_bytes);
|
||||
{
|
||||
|
@ -662,6 +662,7 @@ void GCTracer::PrintNVP() const {
|
||||
"clear.weak_cells=%.1f "
|
||||
"clear.weak_collections=%.1f "
|
||||
"clear.weak_lists=%.1f "
|
||||
"clear.weak_references=%.1f "
|
||||
"epilogue=%.1f "
|
||||
"evacuate=%.1f "
|
||||
"evacuate.candidates=%.1f "
|
||||
@ -756,6 +757,7 @@ void GCTracer::PrintNVP() const {
|
||||
current_.scopes[Scope::MC_CLEAR_WEAK_CELLS],
|
||||
current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
|
||||
current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
|
||||
current_.scopes[Scope::MC_CLEAR_WEAK_REFERENCES],
|
||||
current_.scopes[Scope::MC_EPILOGUE],
|
||||
current_.scopes[Scope::MC_EVACUATE],
|
||||
current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
|
||||
|
@ -411,26 +411,42 @@ void Heap::FinalizeExternalString(String* string) {
|
||||
Address Heap::NewSpaceTop() { return new_space_->top(); }
|
||||
|
||||
bool Heap::InNewSpace(Object* object) {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(object));
|
||||
return InNewSpace(MaybeObject::FromObject(object));
|
||||
}
|
||||
|
||||
bool Heap::InFromSpace(Object* object) {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(object));
|
||||
return InFromSpace(MaybeObject::FromObject(object));
|
||||
}
|
||||
|
||||
bool Heap::InToSpace(Object* object) {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(object));
|
||||
return InToSpace(MaybeObject::FromObject(object));
|
||||
}
|
||||
|
||||
bool Heap::InNewSpace(MaybeObject* object) {
|
||||
// Inlined check from NewSpace::Contains.
|
||||
bool result =
|
||||
object->IsHeapObject() &&
|
||||
Page::FromAddress(HeapObject::cast(object)->address())->InNewSpace();
|
||||
HeapObject* heap_object;
|
||||
bool result = object->ToStrongOrWeakHeapObject(&heap_object) &&
|
||||
Page::FromAddress(heap_object->address())->InNewSpace();
|
||||
DCHECK(!result || // Either not in new space
|
||||
gc_state_ != NOT_IN_GC || // ... or in the middle of GC
|
||||
InToSpace(object)); // ... or in to-space (where we allocate).
|
||||
return result;
|
||||
}
|
||||
|
||||
bool Heap::InFromSpace(Object* object) {
|
||||
return object->IsHeapObject() &&
|
||||
MemoryChunk::FromAddress(HeapObject::cast(object)->address())
|
||||
bool Heap::InFromSpace(MaybeObject* object) {
|
||||
HeapObject* heap_object;
|
||||
return object->ToStrongOrWeakHeapObject(&heap_object) &&
|
||||
MemoryChunk::FromAddress(heap_object->address())
|
||||
->IsFlagSet(Page::IN_FROM_SPACE);
|
||||
}
|
||||
|
||||
|
||||
bool Heap::InToSpace(Object* object) {
|
||||
return object->IsHeapObject() &&
|
||||
MemoryChunk::FromAddress(HeapObject::cast(object)->address())
|
||||
bool Heap::InToSpace(MaybeObject* object) {
|
||||
HeapObject* heap_object;
|
||||
return object->ToStrongOrWeakHeapObject(&heap_object) &&
|
||||
MemoryChunk::FromAddress(heap_object->address())
|
||||
->IsFlagSet(Page::IN_TO_SPACE);
|
||||
}
|
||||
|
||||
@ -452,6 +468,13 @@ bool Heap::ShouldBePromoted(Address old_address) {
|
||||
}
|
||||
|
||||
void Heap::RecordWrite(Object* object, Object** slot, Object* value) {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(*slot));
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(value));
|
||||
RecordWrite(object, reinterpret_cast<MaybeObject**>(slot),
|
||||
reinterpret_cast<MaybeObject*>(value));
|
||||
}
|
||||
|
||||
void Heap::RecordWrite(Object* object, MaybeObject** slot, MaybeObject* value) {
|
||||
if (!InNewSpace(value) || !object->IsHeapObject() || InNewSpace(object)) {
|
||||
return;
|
||||
}
|
||||
|
@ -48,6 +48,7 @@
|
||||
#include "src/instruction-stream.h"
|
||||
#include "src/interpreter/interpreter.h"
|
||||
#include "src/objects/data-handler.h"
|
||||
#include "src/objects/maybe-object.h"
|
||||
#include "src/objects/shared-function-info.h"
|
||||
#include "src/regexp/jsregexp.h"
|
||||
#include "src/runtime-profiler.h"
|
||||
@ -1445,6 +1446,7 @@ class StringTableVerifier : public ObjectVisitor {
|
||||
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
|
||||
// Visit all HeapObject pointers in [start, end).
|
||||
for (Object** p = start; p < end; p++) {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(*p));
|
||||
if ((*p)->IsHeapObject()) {
|
||||
HeapObject* object = HeapObject::cast(*p);
|
||||
Isolate* isolate = object->GetIsolate();
|
||||
@ -1454,6 +1456,10 @@ class StringTableVerifier : public ObjectVisitor {
|
||||
}
|
||||
}
|
||||
}
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) override {
|
||||
UNREACHABLE();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -3931,7 +3937,7 @@ AllocationResult Heap::CopyFeedbackVector(FeedbackVector* src) {
|
||||
|
||||
// Slow case: Just copy the content one-by-one.
|
||||
result->set_shared_function_info(src->shared_function_info());
|
||||
result->set_optimized_code_cell(src->optimized_code_cell());
|
||||
result->set_optimized_code_weak_or_smi(src->optimized_code_weak_or_smi());
|
||||
result->set_invocation_count(src->invocation_count());
|
||||
result->set_profiler_ticks(src->profiler_ticks());
|
||||
result->set_deopt_count(src->deopt_count());
|
||||
@ -4076,9 +4082,9 @@ AllocationResult Heap::AllocateFeedbackVector(SharedFunctionInfo* shared,
|
||||
result->set_map_after_allocation(feedback_vector_map(), SKIP_WRITE_BARRIER);
|
||||
FeedbackVector* vector = FeedbackVector::cast(result);
|
||||
vector->set_shared_function_info(shared);
|
||||
vector->set_optimized_code_cell(Smi::FromEnum(
|
||||
vector->set_optimized_code_weak_or_smi(MaybeObject::FromSmi(Smi::FromEnum(
|
||||
FLAG_log_function_events ? OptimizationMarker::kLogFirstExecution
|
||||
: OptimizationMarker::kNone));
|
||||
: OptimizationMarker::kNone)));
|
||||
vector->set_length(length);
|
||||
vector->set_invocation_count(0);
|
||||
vector->set_profiler_ticks(0);
|
||||
@ -4342,17 +4348,22 @@ void Heap::NotifyObjectLayoutChange(HeapObject* object, int size,
|
||||
class SlotCollectingVisitor final : public ObjectVisitor {
|
||||
public:
|
||||
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
|
||||
for (Object** p = start; p < end; p++) {
|
||||
VisitPointers(host, reinterpret_cast<MaybeObject**>(start),
|
||||
reinterpret_cast<MaybeObject**>(end));
|
||||
}
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final {
|
||||
for (MaybeObject** p = start; p < end; p++) {
|
||||
slots_.push_back(p);
|
||||
}
|
||||
}
|
||||
|
||||
int number_of_slots() { return static_cast<int>(slots_.size()); }
|
||||
|
||||
Object** slot(int i) { return slots_[i]; }
|
||||
MaybeObject** slot(int i) { return slots_[i]; }
|
||||
|
||||
private:
|
||||
std::vector<Object**> slots_;
|
||||
std::vector<MaybeObject**> slots_;
|
||||
};
|
||||
|
||||
void Heap::VerifyObjectLayoutChange(HeapObject* object, Map* new_map) {
|
||||
@ -4829,10 +4840,22 @@ class SlotVerifyingVisitor : public ObjectVisitor {
|
||||
std::set<std::pair<SlotType, Address> >* typed)
|
||||
: untyped_(untyped), typed_(typed) {}
|
||||
|
||||
virtual bool ShouldHaveBeenRecorded(HeapObject* host, Object* target) = 0;
|
||||
virtual bool ShouldHaveBeenRecorded(HeapObject* host,
|
||||
MaybeObject* target) = 0;
|
||||
|
||||
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
|
||||
#ifdef DEBUG
|
||||
for (Object** slot = start; slot < end; slot++) {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(*slot));
|
||||
}
|
||||
#endif // DEBUG
|
||||
VisitPointers(host, reinterpret_cast<MaybeObject**>(start),
|
||||
reinterpret_cast<MaybeObject**>(end));
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final {
|
||||
for (MaybeObject** slot = start; slot < end; slot++) {
|
||||
if (ShouldHaveBeenRecorded(host, *slot)) {
|
||||
CHECK_GT(untyped_->count(reinterpret_cast<Address>(slot)), 0);
|
||||
}
|
||||
@ -4841,7 +4864,7 @@ class SlotVerifyingVisitor : public ObjectVisitor {
|
||||
|
||||
void VisitCodeTarget(Code* host, RelocInfo* rinfo) override {
|
||||
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
||||
if (ShouldHaveBeenRecorded(host, target)) {
|
||||
if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) {
|
||||
CHECK(
|
||||
InTypedSet(CODE_TARGET_SLOT, rinfo->pc()) ||
|
||||
(rinfo->IsInConstantPool() &&
|
||||
@ -4851,7 +4874,7 @@ class SlotVerifyingVisitor : public ObjectVisitor {
|
||||
|
||||
void VisitEmbeddedPointer(Code* host, RelocInfo* rinfo) override {
|
||||
Object* target = rinfo->target_object();
|
||||
if (ShouldHaveBeenRecorded(host, target)) {
|
||||
if (ShouldHaveBeenRecorded(host, MaybeObject::FromObject(target))) {
|
||||
CHECK(InTypedSet(EMBEDDED_OBJECT_SLOT, rinfo->pc()) ||
|
||||
(rinfo->IsInConstantPool() &&
|
||||
InTypedSet(OBJECT_SLOT, rinfo->constant_pool_entry_address())));
|
||||
@ -4872,10 +4895,11 @@ class OldToNewSlotVerifyingVisitor : public SlotVerifyingVisitor {
|
||||
std::set<std::pair<SlotType, Address> >* typed)
|
||||
: SlotVerifyingVisitor(untyped, typed), heap_(heap) {}
|
||||
|
||||
bool ShouldHaveBeenRecorded(HeapObject* host, Object* target) override {
|
||||
DCHECK_IMPLIES(target->IsHeapObject() && heap_->InNewSpace(target),
|
||||
heap_->InToSpace(target));
|
||||
return target->IsHeapObject() && heap_->InNewSpace(target) &&
|
||||
bool ShouldHaveBeenRecorded(HeapObject* host, MaybeObject* target) override {
|
||||
DCHECK_IMPLIES(
|
||||
target->IsStrongOrWeakHeapObject() && heap_->InNewSpace(target),
|
||||
heap_->InToSpace(target));
|
||||
return target->IsStrongOrWeakHeapObject() && heap_->InNewSpace(target) &&
|
||||
!heap_->InNewSpace(host);
|
||||
}
|
||||
|
||||
@ -5636,7 +5660,8 @@ bool Heap::SetUp() {
|
||||
|
||||
mark_compact_collector_ = new MarkCompactCollector(this);
|
||||
incremental_marking_ =
|
||||
new IncrementalMarking(this, mark_compact_collector_->marking_worklist());
|
||||
new IncrementalMarking(this, mark_compact_collector_->marking_worklist(),
|
||||
mark_compact_collector_->weak_objects());
|
||||
|
||||
if (FLAG_concurrent_marking) {
|
||||
MarkCompactCollector::MarkingWorklist* marking_worklist =
|
||||
@ -6298,12 +6323,19 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
|
||||
|
||||
void VisitPointers(HeapObject* host, Object** start,
|
||||
Object** end) override {
|
||||
MarkPointers(reinterpret_cast<MaybeObject**>(start),
|
||||
reinterpret_cast<MaybeObject**>(end));
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final {
|
||||
MarkPointers(start, end);
|
||||
}
|
||||
|
||||
void VisitRootPointers(Root root, const char* description, Object** start,
|
||||
Object** end) override {
|
||||
MarkPointers(start, end);
|
||||
MarkPointers(reinterpret_cast<MaybeObject**>(start),
|
||||
reinterpret_cast<MaybeObject**>(end));
|
||||
}
|
||||
|
||||
void TransitiveClosure() {
|
||||
@ -6315,12 +6347,14 @@ class UnreachableObjectsFilter : public HeapObjectsFilter {
|
||||
}
|
||||
|
||||
private:
|
||||
void MarkPointers(Object** start, Object** end) {
|
||||
for (Object** p = start; p < end; p++) {
|
||||
if (!(*p)->IsHeapObject()) continue;
|
||||
HeapObject* obj = HeapObject::cast(*p);
|
||||
if (filter_->MarkAsReachable(obj)) {
|
||||
marking_stack_.push_back(obj);
|
||||
void MarkPointers(MaybeObject** start, MaybeObject** end) {
|
||||
// Treat weak references as strong.
|
||||
for (MaybeObject** p = start; p < end; p++) {
|
||||
HeapObject* heap_object;
|
||||
if ((*p)->ToStrongOrWeakHeapObject(&heap_object)) {
|
||||
if (filter_->MarkAsReachable(heap_object)) {
|
||||
marking_stack_.push_back(heap_object);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -6627,23 +6661,31 @@ const char* AllocationSpaceName(AllocationSpace space) {
|
||||
|
||||
void VerifyPointersVisitor::VisitPointers(HeapObject* host, Object** start,
|
||||
Object** end) {
|
||||
VerifyPointers(reinterpret_cast<MaybeObject**>(start),
|
||||
reinterpret_cast<MaybeObject**>(end));
|
||||
}
|
||||
|
||||
void VerifyPointersVisitor::VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void VerifyPointersVisitor::VisitRootPointers(Root root,
|
||||
const char* description,
|
||||
Object** start, Object** end) {
|
||||
VerifyPointers(start, end);
|
||||
VerifyPointers(reinterpret_cast<MaybeObject**>(start),
|
||||
reinterpret_cast<MaybeObject**>(end));
|
||||
}
|
||||
|
||||
void VerifyPointersVisitor::VerifyPointers(Object** start, Object** end) {
|
||||
for (Object** current = start; current < end; current++) {
|
||||
if ((*current)->IsHeapObject()) {
|
||||
HeapObject* object = HeapObject::cast(*current);
|
||||
void VerifyPointersVisitor::VerifyPointers(MaybeObject** start,
|
||||
MaybeObject** end) {
|
||||
for (MaybeObject** current = start; current < end; current++) {
|
||||
HeapObject* object;
|
||||
if ((*current)->ToStrongOrWeakHeapObject(&object)) {
|
||||
CHECK(object->GetIsolate()->heap()->Contains(object));
|
||||
CHECK(object->map()->IsMap());
|
||||
} else {
|
||||
CHECK((*current)->IsSmi());
|
||||
CHECK((*current)->IsSmi() || (*current)->IsClearedWeakHeapObject());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1196,6 +1196,8 @@ class Heap {
|
||||
// ===========================================================================
|
||||
|
||||
// Write barrier support for object[offset] = o;
|
||||
inline void RecordWrite(Object* object, MaybeObject** slot,
|
||||
MaybeObject* value);
|
||||
inline void RecordWrite(Object* object, Object** slot, Object* value);
|
||||
inline void RecordWriteIntoCode(Code* host, RelocInfo* rinfo, Object* target);
|
||||
void RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, Object* target);
|
||||
@ -1316,6 +1318,9 @@ class Heap {
|
||||
inline bool InNewSpace(Object* object);
|
||||
inline bool InFromSpace(Object* object);
|
||||
inline bool InToSpace(Object* object);
|
||||
inline bool InNewSpace(MaybeObject* object);
|
||||
inline bool InFromSpace(MaybeObject* object);
|
||||
inline bool InToSpace(MaybeObject* object);
|
||||
|
||||
// Returns whether the object resides in old space.
|
||||
inline bool InOldSpace(Object* object);
|
||||
@ -2727,11 +2732,13 @@ class CodePageMemoryModificationScope {
|
||||
class VerifyPointersVisitor : public ObjectVisitor, public RootVisitor {
|
||||
public:
|
||||
void VisitPointers(HeapObject* host, Object** start, Object** end) override;
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) override;
|
||||
void VisitRootPointers(Root root, const char* description, Object** start,
|
||||
Object** end) override;
|
||||
|
||||
private:
|
||||
void VerifyPointers(Object** start, Object** end);
|
||||
void VerifyPointers(MaybeObject** start, MaybeObject** end);
|
||||
};
|
||||
|
||||
|
||||
|
@ -7,6 +7,7 @@
|
||||
|
||||
#include "src/heap/incremental-marking.h"
|
||||
#include "src/isolate.h"
|
||||
#include "src/objects/maybe-object.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
@ -14,8 +15,21 @@ namespace internal {
|
||||
|
||||
void IncrementalMarking::RecordWrite(HeapObject* obj, Object** slot,
|
||||
Object* value) {
|
||||
if (IsMarking() && value->IsHeapObject()) {
|
||||
RecordWriteSlow(obj, slot, value);
|
||||
DCHECK_IMPLIES(slot != nullptr, !Internals::HasWeakHeapObjectTag(*slot));
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(value));
|
||||
RecordMaybeWeakWrite(obj, reinterpret_cast<MaybeObject**>(slot),
|
||||
reinterpret_cast<MaybeObject*>(value));
|
||||
}
|
||||
|
||||
void IncrementalMarking::RecordMaybeWeakWrite(HeapObject* obj,
|
||||
MaybeObject** slot,
|
||||
MaybeObject* value) {
|
||||
// When writing a weak reference, treat it as strong for the purposes of the
|
||||
// marking barrier.
|
||||
HeapObject* heap_object;
|
||||
if (IsMarking() && value->ToStrongOrWeakHeapObject(&heap_object)) {
|
||||
RecordWriteSlow(obj, reinterpret_cast<HeapObjectReference**>(slot),
|
||||
heap_object);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -53,9 +53,11 @@ void IncrementalMarking::Observer::Step(int bytes_allocated, Address addr,
|
||||
}
|
||||
|
||||
IncrementalMarking::IncrementalMarking(
|
||||
Heap* heap, MarkCompactCollector::MarkingWorklist* marking_worklist)
|
||||
Heap* heap, MarkCompactCollector::MarkingWorklist* marking_worklist,
|
||||
WeakObjects* weak_objects)
|
||||
: heap_(heap),
|
||||
marking_worklist_(marking_worklist),
|
||||
weak_objects_(weak_objects),
|
||||
initial_old_generation_size_(0),
|
||||
bytes_marked_ahead_of_schedule_(0),
|
||||
bytes_marked_concurrently_(0),
|
||||
@ -91,8 +93,8 @@ bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
|
||||
return is_compacting_ && need_recording;
|
||||
}
|
||||
|
||||
|
||||
void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
|
||||
void IncrementalMarking::RecordWriteSlow(HeapObject* obj,
|
||||
HeapObjectReference** slot,
|
||||
Object* value) {
|
||||
if (BaseRecordWrite(obj, value) && slot != nullptr) {
|
||||
// Object is not going to be rescanned we need to record the slot.
|
||||
@ -558,8 +560,6 @@ void IncrementalMarking::FinalizeIncrementally() {
|
||||
// objects to reduce the marking load in the final pause.
|
||||
// 1) We scan and mark the roots again to find all changes to the root set.
|
||||
// 2) Age and retain maps embedded in optimized code.
|
||||
// 3) Remove weak cell with live values from the list of weak cells, they
|
||||
// do not need processing during GC.
|
||||
MarkRoots();
|
||||
|
||||
// Map retaining is needed for perfromance, not correctness,
|
||||
@ -638,6 +638,30 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
UpdateWeakReferencesAfterScavenge();
|
||||
}
|
||||
|
||||
void IncrementalMarking::UpdateWeakReferencesAfterScavenge() {
|
||||
weak_objects_->weak_references.Update(
|
||||
[](std::pair<HeapObject*, HeapObjectReference**> slot_in,
|
||||
std::pair<HeapObject*, HeapObjectReference**>* slot_out) -> bool {
|
||||
HeapObject* heap_obj = slot_in.first;
|
||||
MapWord map_word = heap_obj->map_word();
|
||||
if (map_word.IsForwardingAddress()) {
|
||||
ptrdiff_t distance_to_slot =
|
||||
reinterpret_cast<Address>(slot_in.second) -
|
||||
reinterpret_cast<Address>(slot_in.first);
|
||||
Address new_slot =
|
||||
reinterpret_cast<Address>(map_word.ToForwardingAddress()) +
|
||||
distance_to_slot;
|
||||
slot_out->first = map_word.ToForwardingAddress();
|
||||
slot_out->second = reinterpret_cast<HeapObjectReference**>(new_slot);
|
||||
} else {
|
||||
*slot_out = slot_in;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
void IncrementalMarking::UpdateMarkedBytesAfterScavenge(
|
||||
|
@ -87,7 +87,8 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
#endif
|
||||
|
||||
IncrementalMarking(Heap* heap,
|
||||
MarkCompactCollector::MarkingWorklist* marking_worklist);
|
||||
MarkCompactCollector::MarkingWorklist* marking_worklist,
|
||||
WeakObjects* weak_objects);
|
||||
|
||||
MarkingState* marking_state() { return &marking_state_; }
|
||||
|
||||
@ -165,6 +166,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
void FinalizeIncrementally();
|
||||
|
||||
void UpdateMarkingWorklistAfterScavenge();
|
||||
void UpdateWeakReferencesAfterScavenge();
|
||||
void UpdateMarkedBytesAfterScavenge(size_t dead_bytes_in_new_space);
|
||||
|
||||
void Hurry();
|
||||
@ -205,11 +207,14 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
// the incremental cycle (stays white).
|
||||
V8_INLINE bool BaseRecordWrite(HeapObject* obj, Object* value);
|
||||
V8_INLINE void RecordWrite(HeapObject* obj, Object** slot, Object* value);
|
||||
V8_INLINE void RecordMaybeWeakWrite(HeapObject* obj, MaybeObject** slot,
|
||||
MaybeObject* value);
|
||||
V8_INLINE void RecordWriteIntoCode(Code* host, RelocInfo* rinfo,
|
||||
Object* value);
|
||||
V8_INLINE void RecordWrites(HeapObject* obj);
|
||||
|
||||
void RecordWriteSlow(HeapObject* obj, Object** slot, Object* value);
|
||||
void RecordWriteSlow(HeapObject* obj, HeapObjectReference** slot,
|
||||
Object* value);
|
||||
void RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo, Object* value);
|
||||
|
||||
// Returns true if the function succeeds in transitioning the object
|
||||
@ -324,6 +329,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
|
||||
|
||||
Heap* const heap_;
|
||||
MarkCompactCollector::MarkingWorklist* const marking_worklist_;
|
||||
WeakObjects* weak_objects_;
|
||||
|
||||
double start_time_ms_;
|
||||
size_t initial_old_generation_size_;
|
||||
|
@ -184,6 +184,32 @@ void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkObject(host, target_object);
|
||||
}
|
||||
|
||||
template <FixedArrayVisitationMode fixed_array_mode,
|
||||
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
|
||||
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkingState>::VisitPointer(HeapObject* host,
|
||||
MaybeObject** p) {
|
||||
HeapObject* target_object;
|
||||
if ((*p)->ToStrongHeapObject(&target_object)) {
|
||||
collector_->RecordSlot(host, reinterpret_cast<HeapObjectReference**>(p),
|
||||
target_object);
|
||||
MarkObject(host, target_object);
|
||||
} else if ((*p)->ToWeakHeapObject(&target_object)) {
|
||||
if (marking_state()->IsBlackOrGrey(target_object)) {
|
||||
// Weak references with live values are directly processed here to reduce
|
||||
// the processing time of weak cells during the main GC pause.
|
||||
collector_->RecordSlot(host, reinterpret_cast<HeapObjectReference**>(p),
|
||||
target_object);
|
||||
} else {
|
||||
// If we do not know about liveness of values of weak cells, we have to
|
||||
// process them when we know the liveness of the whole transitive
|
||||
// closure.
|
||||
collector_->AddWeakReference(host,
|
||||
reinterpret_cast<HeapObjectReference**>(p));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <FixedArrayVisitationMode fixed_array_mode,
|
||||
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
|
||||
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
@ -194,6 +220,17 @@ void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
}
|
||||
}
|
||||
|
||||
template <FixedArrayVisitationMode fixed_array_mode,
|
||||
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
|
||||
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
MarkingState>::VisitPointers(HeapObject* host,
|
||||
MaybeObject** start,
|
||||
MaybeObject** end) {
|
||||
for (MaybeObject** p = start; p < end; p++) {
|
||||
VisitPointer(host, p);
|
||||
}
|
||||
}
|
||||
|
||||
template <FixedArrayVisitationMode fixed_array_mode,
|
||||
TraceRetainingPathMode retaining_path_mode, typename MarkingState>
|
||||
void MarkingVisitor<fixed_array_mode, retaining_path_mode,
|
||||
@ -353,6 +390,12 @@ void MarkCompactCollector::MarkExternallyReferencedObject(HeapObject* obj) {
|
||||
|
||||
void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
|
||||
Object* target) {
|
||||
RecordSlot(object, reinterpret_cast<HeapObjectReference**>(slot), target);
|
||||
}
|
||||
|
||||
void MarkCompactCollector::RecordSlot(HeapObject* object,
|
||||
HeapObjectReference** slot,
|
||||
Object* target) {
|
||||
Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
|
||||
Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
|
||||
if (target_page->IsEvacuationCandidate<AccessMode::ATOMIC>() &&
|
||||
|
@ -62,6 +62,7 @@ class MarkingVerifier : public ObjectVisitor, public RootVisitor {
|
||||
virtual Bitmap* bitmap(const MemoryChunk* chunk) = 0;
|
||||
|
||||
virtual void VerifyPointers(Object** start, Object** end) = 0;
|
||||
virtual void VerifyPointers(MaybeObject** start, MaybeObject** end) = 0;
|
||||
|
||||
virtual bool IsMarked(HeapObject* object) = 0;
|
||||
|
||||
@ -71,6 +72,11 @@ class MarkingVerifier : public ObjectVisitor, public RootVisitor {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) override {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void VisitRootPointers(Root root, const char* description, Object** start,
|
||||
Object** end) override {
|
||||
VerifyPointers(start, end);
|
||||
@ -182,6 +188,15 @@ class FullMarkingVerifier : public MarkingVerifier {
|
||||
}
|
||||
}
|
||||
|
||||
void VerifyPointers(MaybeObject** start, MaybeObject** end) override {
|
||||
for (MaybeObject** current = start; current < end; current++) {
|
||||
HeapObject* object;
|
||||
if ((*current)->ToStrongHeapObject(&object)) {
|
||||
CHECK(marking_state_->IsBlackOrGrey(object));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VisitEmbeddedPointer(Code* host, RelocInfo* rinfo) override {
|
||||
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
||||
if (!host->IsWeakObject(rinfo->target_object())) {
|
||||
@ -220,6 +235,7 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier {
|
||||
|
||||
void VerifyPointers(Object** start, Object** end) override {
|
||||
for (Object** current = start; current < end; current++) {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(*current));
|
||||
if ((*current)->IsHeapObject()) {
|
||||
HeapObject* object = HeapObject::cast(*current);
|
||||
if (!heap_->InNewSpace(object)) return;
|
||||
@ -228,6 +244,19 @@ class YoungGenerationMarkingVerifier : public MarkingVerifier {
|
||||
}
|
||||
}
|
||||
|
||||
void VerifyPointers(MaybeObject** start, MaybeObject** end) override {
|
||||
for (MaybeObject** current = start; current < end; current++) {
|
||||
HeapObject* object;
|
||||
// Minor MC treats weak references as strong.
|
||||
if ((*current)->ToStrongOrWeakHeapObject(&object)) {
|
||||
if (!heap_->InNewSpace(object)) {
|
||||
continue;
|
||||
}
|
||||
CHECK(IsMarked(object));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
MinorMarkCompactCollector::NonAtomicMarkingState* marking_state_;
|
||||
};
|
||||
@ -240,6 +269,11 @@ class EvacuationVerifier : public ObjectVisitor, public RootVisitor {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) override {
|
||||
VerifyPointers(start, end);
|
||||
}
|
||||
|
||||
void VisitRootPointers(Root root, const char* description, Object** start,
|
||||
Object** end) override {
|
||||
VerifyPointers(start, end);
|
||||
@ -251,6 +285,7 @@ class EvacuationVerifier : public ObjectVisitor, public RootVisitor {
|
||||
inline Heap* heap() { return heap_; }
|
||||
|
||||
virtual void VerifyPointers(Object** start, Object** end) = 0;
|
||||
virtual void VerifyPointers(MaybeObject** start, MaybeObject** end) = 0;
|
||||
|
||||
void VerifyRoots(VisitMode mode);
|
||||
void VerifyEvacuationOnPage(Address start, Address end);
|
||||
@ -320,6 +355,17 @@ class FullEvacuationVerifier : public EvacuationVerifier {
|
||||
}
|
||||
}
|
||||
}
|
||||
void VerifyPointers(MaybeObject** start, MaybeObject** end) override {
|
||||
for (MaybeObject** current = start; current < end; current++) {
|
||||
HeapObject* object;
|
||||
if ((*current)->ToStrongHeapObject(&object)) {
|
||||
if (heap()->InNewSpace(object)) {
|
||||
CHECK(heap()->InToSpace(object));
|
||||
}
|
||||
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(object));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
|
||||
@ -344,6 +390,14 @@ class YoungGenerationEvacuationVerifier : public EvacuationVerifier {
|
||||
}
|
||||
}
|
||||
}
|
||||
void VerifyPointers(MaybeObject** start, MaybeObject** end) override {
|
||||
for (MaybeObject** current = start; current < end; current++) {
|
||||
HeapObject* object;
|
||||
if ((*current)->ToStrongOrWeakHeapObject(&object)) {
|
||||
CHECK_IMPLIES(heap()->InNewSpace(object), heap()->InToSpace(object));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace
|
||||
@ -1010,7 +1064,16 @@ class MarkCompactCollector::CustomRootBodyMarkingVisitor final
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, Object** start, Object** end) final {
|
||||
for (Object** p = start; p < end; p++) MarkObject(host, *p);
|
||||
for (Object** p = start; p < end; p++) {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(*p));
|
||||
MarkObject(host, *p);
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final {
|
||||
// At the moment, custom roots cannot contain weak pointers.
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
// VisitEmbedderPointer is defined by ObjectVisitor to call VisitPointers.
|
||||
@ -1051,6 +1114,11 @@ class InternalizedStringTableCleaner : public ObjectVisitor {
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
int PointersRemoved() {
|
||||
return pointers_removed_;
|
||||
}
|
||||
@ -1191,13 +1259,27 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
|
||||
: collector_(collector) {}
|
||||
|
||||
inline void VisitPointer(HeapObject* host, Object** p) final {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(*p));
|
||||
RecordMigratedSlot(host, reinterpret_cast<MaybeObject*>(*p),
|
||||
reinterpret_cast<Address>(p));
|
||||
}
|
||||
|
||||
inline void VisitPointer(HeapObject* host, MaybeObject** p) final {
|
||||
RecordMigratedSlot(host, *p, reinterpret_cast<Address>(p));
|
||||
}
|
||||
|
||||
inline void VisitPointers(HeapObject* host, Object** start,
|
||||
Object** end) final {
|
||||
while (start < end) {
|
||||
RecordMigratedSlot(host, *start, reinterpret_cast<Address>(start));
|
||||
VisitPointer(host, start);
|
||||
++start;
|
||||
}
|
||||
}
|
||||
|
||||
inline void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final {
|
||||
while (start < end) {
|
||||
VisitPointer(host, start);
|
||||
++start;
|
||||
}
|
||||
}
|
||||
@ -1227,9 +1309,9 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
|
||||
inline void VisitInternalReference(Code* host, RelocInfo* rinfo) final {}
|
||||
|
||||
protected:
|
||||
inline virtual void RecordMigratedSlot(HeapObject* host, Object* value,
|
||||
inline virtual void RecordMigratedSlot(HeapObject* host, MaybeObject* value,
|
||||
Address slot) {
|
||||
if (value->IsHeapObject()) {
|
||||
if (value->IsStrongOrWeakHeapObject()) {
|
||||
Page* p = Page::FromAddress(reinterpret_cast<Address>(value));
|
||||
if (p->InNewSpace()) {
|
||||
DCHECK_IMPLIES(p->InToSpace(),
|
||||
@ -1314,9 +1396,9 @@ class YoungGenerationRecordMigratedSlotVisitor final
|
||||
return collector_->non_atomic_marking_state()->IsBlack(object);
|
||||
}
|
||||
|
||||
inline void RecordMigratedSlot(HeapObject* host, Object* value,
|
||||
inline void RecordMigratedSlot(HeapObject* host, MaybeObject* value,
|
||||
Address slot) final {
|
||||
if (value->IsHeapObject()) {
|
||||
if (value->IsStrongOrWeakHeapObject()) {
|
||||
Page* p = Page::FromAddress(reinterpret_cast<Address>(value));
|
||||
if (p->InNewSpace()) {
|
||||
DCHECK_IMPLIES(p->InToSpace(),
|
||||
@ -1742,14 +1824,34 @@ class YoungGenerationMarkingVisitor final
|
||||
}
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final {
|
||||
for (MaybeObject** p = start; p < end; p++) {
|
||||
VisitPointer(host, p);
|
||||
}
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointer(HeapObject* host, Object** slot) final {
|
||||
Object* target = *slot;
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(target));
|
||||
if (heap_->InNewSpace(target)) {
|
||||
HeapObject* target_object = HeapObject::cast(target);
|
||||
MarkObjectViaMarkingWorklist(target_object);
|
||||
}
|
||||
}
|
||||
|
||||
V8_INLINE void VisitPointer(HeapObject* host, MaybeObject** slot) final {
|
||||
MaybeObject* target = *slot;
|
||||
if (heap_->InNewSpace(target)) {
|
||||
HeapObject* target_object;
|
||||
// Treat weak references as strong. TODO(marja): Proper weakness handling
|
||||
// for minor-mcs.
|
||||
if (target->ToStrongOrWeakHeapObject(&target_object)) {
|
||||
MarkObjectViaMarkingWorklist(target_object);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
inline void MarkObjectViaMarkingWorklist(HeapObject* object) {
|
||||
if (marking_state_->WhiteToGrey(object)) {
|
||||
@ -1938,7 +2040,7 @@ class PageMarkingItem : public MarkingItem {
|
||||
chunk_, [this, isolate, task](SlotType slot_type, Address host_addr,
|
||||
Address slot) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
isolate, slot_type, slot, [this, task](Object** slot) {
|
||||
isolate, slot_type, slot, [this, task](MaybeObject** slot) {
|
||||
return CheckAndMarkObject(task,
|
||||
reinterpret_cast<Address>(slot));
|
||||
});
|
||||
@ -1947,12 +2049,15 @@ class PageMarkingItem : public MarkingItem {
|
||||
|
||||
SlotCallbackResult CheckAndMarkObject(YoungGenerationMarkingTask* task,
|
||||
Address slot_address) {
|
||||
Object* object = *reinterpret_cast<Object**>(slot_address);
|
||||
MaybeObject* object = *reinterpret_cast<MaybeObject**>(slot_address);
|
||||
if (heap()->InNewSpace(object)) {
|
||||
// Marking happens before flipping the young generation, so the object
|
||||
// has to be in ToSpace.
|
||||
DCHECK(heap()->InToSpace(object));
|
||||
HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
|
||||
HeapObject* heap_object;
|
||||
bool success = object->ToStrongOrWeakHeapObject(&heap_object);
|
||||
USE(success);
|
||||
DCHECK(success);
|
||||
task->MarkObject(heap_object);
|
||||
slots_++;
|
||||
return KEEP_SLOT;
|
||||
@ -2459,12 +2564,14 @@ void MarkCompactCollector::ClearNonLiveReferences() {
|
||||
}
|
||||
DependentCode* dependent_code_list;
|
||||
ClearWeakCellsAndSimpleMapTransitions(&dependent_code_list);
|
||||
ClearWeakReferences();
|
||||
MarkDependentCodeForDeoptimization(dependent_code_list);
|
||||
|
||||
ClearWeakCollections();
|
||||
|
||||
DCHECK(weak_objects_.weak_cells.IsGlobalEmpty());
|
||||
DCHECK(weak_objects_.transition_arrays.IsGlobalEmpty());
|
||||
DCHECK(weak_objects_.weak_references.IsGlobalEmpty());
|
||||
}
|
||||
|
||||
|
||||
@ -2803,6 +2910,25 @@ void MarkCompactCollector::ClearWeakCellsAndSimpleMapTransitions(
|
||||
*dependent_code_list = dependent_code_head;
|
||||
}
|
||||
|
||||
void MarkCompactCollector::ClearWeakReferences() {
|
||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_CLEAR_WEAK_REFERENCES);
|
||||
std::pair<HeapObject*, HeapObjectReference**> slot;
|
||||
while (weak_objects_.weak_references.Pop(kMainThread, &slot)) {
|
||||
HeapObject* value;
|
||||
HeapObjectReference** location = slot.second;
|
||||
if ((*location)->ToWeakHeapObject(&value)) {
|
||||
DCHECK(!value->IsCell());
|
||||
DCHECK(!value->IsMap());
|
||||
if (non_atomic_marking_state()->IsBlackOrGrey(value)) {
|
||||
// The value of the weak reference is alive.
|
||||
RecordSlot(slot.first, location, value);
|
||||
} else {
|
||||
*location = HeapObjectReference::ClearedValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void MarkCompactCollector::AbortWeakObjects() {
|
||||
weak_objects_.weak_cells.Clear();
|
||||
weak_objects_.transition_arrays.Clear();
|
||||
@ -2833,30 +2959,57 @@ void MarkCompactCollector::RecordRelocSlot(Code* host, RelocInfo* rinfo,
|
||||
}
|
||||
|
||||
template <AccessMode access_mode>
|
||||
static inline SlotCallbackResult UpdateSlot(Object** slot) {
|
||||
Object* obj = *slot;
|
||||
if (obj->IsHeapObject()) {
|
||||
HeapObject* heap_obj = HeapObject::cast(obj);
|
||||
MapWord map_word = heap_obj->map_word();
|
||||
if (map_word.IsForwardingAddress()) {
|
||||
DCHECK(heap_obj->GetHeap()->InFromSpace(heap_obj) ||
|
||||
MarkCompactCollector::IsOnEvacuationCandidate(heap_obj) ||
|
||||
Page::FromAddress(heap_obj->address())
|
||||
->IsFlagSet(Page::COMPACTION_WAS_ABORTED));
|
||||
HeapObject* target = map_word.ToForwardingAddress();
|
||||
if (access_mode == AccessMode::NON_ATOMIC) {
|
||||
*slot = target;
|
||||
} else {
|
||||
base::AsAtomicPointer::Release_CompareAndSwap(slot, obj, target);
|
||||
}
|
||||
DCHECK(!heap_obj->GetHeap()->InFromSpace(target));
|
||||
DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(target));
|
||||
static inline SlotCallbackResult UpdateSlot(
|
||||
MaybeObject** slot, MaybeObject* old, HeapObject* heap_obj,
|
||||
HeapObjectReference::ReferenceType reference_type) {
|
||||
MapWord map_word = heap_obj->map_word();
|
||||
if (map_word.IsForwardingAddress()) {
|
||||
DCHECK(heap_obj->GetHeap()->InFromSpace(heap_obj) ||
|
||||
MarkCompactCollector::IsOnEvacuationCandidate(heap_obj) ||
|
||||
Page::FromAddress(heap_obj->address())
|
||||
->IsFlagSet(Page::COMPACTION_WAS_ABORTED));
|
||||
MaybeObject* target =
|
||||
reference_type == HeapObjectReference::WEAK
|
||||
? HeapObjectReference::Weak(map_word.ToForwardingAddress())
|
||||
: HeapObjectReference::Strong(map_word.ToForwardingAddress());
|
||||
if (access_mode == AccessMode::NON_ATOMIC) {
|
||||
*slot = target;
|
||||
} else {
|
||||
base::AsAtomicPointer::Release_CompareAndSwap(slot, old, target);
|
||||
}
|
||||
DCHECK(!heap_obj->GetHeap()->InFromSpace(target));
|
||||
DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(target));
|
||||
}
|
||||
// OLD_TO_OLD slots are always removed after updating.
|
||||
return REMOVE_SLOT;
|
||||
}
|
||||
|
||||
template <AccessMode access_mode>
|
||||
static inline SlotCallbackResult UpdateSlot(MaybeObject** slot) {
|
||||
MaybeObject* obj = base::AsAtomicPointer::Relaxed_Load(slot);
|
||||
HeapObject* heap_obj;
|
||||
if (obj->ToWeakHeapObject(&heap_obj)) {
|
||||
UpdateSlot<access_mode>(slot, obj, heap_obj, HeapObjectReference::WEAK);
|
||||
} else if (obj->ToStrongHeapObject(&heap_obj)) {
|
||||
return UpdateSlot<access_mode>(slot, obj, heap_obj,
|
||||
HeapObjectReference::STRONG);
|
||||
}
|
||||
return REMOVE_SLOT;
|
||||
}
|
||||
|
||||
template <AccessMode access_mode>
|
||||
static inline SlotCallbackResult UpdateStrongSlot(MaybeObject** maybe_slot) {
|
||||
DCHECK((*maybe_slot)->IsSmi() || (*maybe_slot)->IsStrongHeapObject());
|
||||
Object** slot = reinterpret_cast<Object**>(maybe_slot);
|
||||
Object* obj = base::AsAtomicPointer::Relaxed_Load(slot);
|
||||
if (obj->IsHeapObject()) {
|
||||
HeapObject* heap_obj = HeapObject::cast(obj);
|
||||
return UpdateSlot<access_mode>(maybe_slot, MaybeObject::FromObject(obj),
|
||||
heap_obj, HeapObjectReference::STRONG);
|
||||
}
|
||||
return REMOVE_SLOT;
|
||||
}
|
||||
|
||||
// Visitor for updating root pointers and to-space pointers.
|
||||
// It does not expect to encounter pointers to dead objects.
|
||||
// TODO(ulan): Remove code object specific functions. This visitor
|
||||
@ -2864,33 +3017,59 @@ static inline SlotCallbackResult UpdateSlot(Object** slot) {
|
||||
class PointersUpdatingVisitor : public ObjectVisitor, public RootVisitor {
|
||||
public:
|
||||
void VisitPointer(HeapObject* host, Object** p) override {
|
||||
UpdateStrongSlotInternal(p);
|
||||
}
|
||||
|
||||
void VisitPointer(HeapObject* host, MaybeObject** p) override {
|
||||
UpdateSlotInternal(p);
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
|
||||
for (Object** p = start; p < end; p++) UpdateSlotInternal(p);
|
||||
for (Object** p = start; p < end; p++) {
|
||||
UpdateStrongSlotInternal(p);
|
||||
}
|
||||
}
|
||||
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
void VisitRootPointer(Root root, const char* description,
|
||||
Object** p) override {
|
||||
UpdateSlotInternal(p);
|
||||
UpdateStrongSlotInternal(p);
|
||||
}
|
||||
|
||||
void VisitRootPointers(Root root, const char* description, Object** start,
|
||||
Object** end) override {
|
||||
for (Object** p = start; p < end; p++) UpdateSlotInternal(p);
|
||||
for (Object** p = start; p < end; p++) UpdateStrongSlotInternal(p);
|
||||
}
|
||||
|
||||
void VisitEmbeddedPointer(Code* host, RelocInfo* rinfo) override {
|
||||
UpdateTypedSlotHelper::UpdateEmbeddedPointer(rinfo, UpdateSlotInternal);
|
||||
UpdateTypedSlotHelper::UpdateEmbeddedPointer(
|
||||
rinfo, UpdateStrongMaybeObjectSlotInternal);
|
||||
}
|
||||
|
||||
void VisitCodeTarget(Code* host, RelocInfo* rinfo) override {
|
||||
UpdateTypedSlotHelper::UpdateCodeTarget(rinfo, UpdateSlotInternal);
|
||||
UpdateTypedSlotHelper::UpdateCodeTarget(
|
||||
rinfo, UpdateStrongMaybeObjectSlotInternal);
|
||||
}
|
||||
|
||||
private:
|
||||
static inline SlotCallbackResult UpdateSlotInternal(Object** slot) {
|
||||
static inline SlotCallbackResult UpdateStrongMaybeObjectSlotInternal(
|
||||
MaybeObject** slot) {
|
||||
DCHECK(!(*slot)->IsWeakHeapObject());
|
||||
DCHECK(!(*slot)->IsClearedWeakHeapObject());
|
||||
return UpdateStrongSlotInternal(reinterpret_cast<Object**>(slot));
|
||||
}
|
||||
|
||||
static inline SlotCallbackResult UpdateStrongSlotInternal(Object** slot) {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(*slot));
|
||||
return UpdateStrongSlot<AccessMode::NON_ATOMIC>(
|
||||
reinterpret_cast<MaybeObject**>(slot));
|
||||
}
|
||||
|
||||
static inline SlotCallbackResult UpdateSlotInternal(MaybeObject** slot) {
|
||||
return UpdateSlot<AccessMode::NON_ATOMIC>(slot);
|
||||
}
|
||||
};
|
||||
@ -3642,28 +3821,34 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
|
||||
private:
|
||||
inline SlotCallbackResult CheckAndUpdateOldToNewSlot(Address slot_address) {
|
||||
Object** slot = reinterpret_cast<Object**>(slot_address);
|
||||
if (heap_->InFromSpace(*slot)) {
|
||||
HeapObject* heap_object = reinterpret_cast<HeapObject*>(*slot);
|
||||
DCHECK(heap_object->IsHeapObject());
|
||||
MaybeObject** slot = reinterpret_cast<MaybeObject**>(slot_address);
|
||||
HeapObject* heap_object;
|
||||
if (!(*slot)->ToStrongOrWeakHeapObject(&heap_object)) {
|
||||
return REMOVE_SLOT;
|
||||
}
|
||||
if (heap_->InFromSpace(heap_object)) {
|
||||
MapWord map_word = heap_object->map_word();
|
||||
if (map_word.IsForwardingAddress()) {
|
||||
*slot = map_word.ToForwardingAddress();
|
||||
HeapObjectReference::Update(
|
||||
reinterpret_cast<HeapObjectReference**>(slot),
|
||||
map_word.ToForwardingAddress());
|
||||
}
|
||||
bool success = (*slot)->ToStrongOrWeakHeapObject(&heap_object);
|
||||
USE(success);
|
||||
DCHECK(success);
|
||||
// If the object was in from space before and is after executing the
|
||||
// callback in to space, the object is still live.
|
||||
// Unfortunately, we do not know about the slot. It could be in a
|
||||
// just freed free space object.
|
||||
if (heap_->InToSpace(*slot)) {
|
||||
if (heap_->InToSpace(heap_object)) {
|
||||
return KEEP_SLOT;
|
||||
}
|
||||
} else if (heap_->InToSpace(*slot)) {
|
||||
} else if (heap_->InToSpace(heap_object)) {
|
||||
// Slots can point to "to" space if the page has been moved, or if the
|
||||
// slot has been recorded multiple times in the remembered set, or
|
||||
// if the slot was already updated during old->old updating.
|
||||
// In case the page has been moved, check markbits to determine liveness
|
||||
// of the slot. In the other case, the slot can just be kept.
|
||||
HeapObject* heap_object = reinterpret_cast<HeapObject*>(*slot);
|
||||
if (Page::FromAddress(heap_object->address())
|
||||
->IsFlagSet(Page::PAGE_NEW_NEW_PROMOTION)) {
|
||||
// IsBlackOrGrey is required because objects are marked as grey for
|
||||
@ -3677,7 +3862,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
}
|
||||
return KEEP_SLOT;
|
||||
} else {
|
||||
DCHECK(!heap_->InNewSpace(*slot));
|
||||
DCHECK(!heap_->InNewSpace(heap_object));
|
||||
}
|
||||
return REMOVE_SLOT;
|
||||
}
|
||||
@ -3697,7 +3882,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
[&filter](Address slot) {
|
||||
if (!filter.IsValid(slot)) return REMOVE_SLOT;
|
||||
return UpdateSlot<AccessMode::NON_ATOMIC>(
|
||||
reinterpret_cast<Object**>(slot));
|
||||
reinterpret_cast<MaybeObject**>(slot));
|
||||
},
|
||||
SlotSet::PREFREE_EMPTY_BUCKETS);
|
||||
}
|
||||
@ -3725,7 +3910,7 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
chunk_,
|
||||
[isolate, this](SlotType slot_type, Address host_addr, Address slot) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
isolate, slot_type, slot, [this](Object** slot) {
|
||||
isolate, slot_type, slot, [this](MaybeObject** slot) {
|
||||
return CheckAndUpdateOldToNewSlot(
|
||||
reinterpret_cast<Address>(slot));
|
||||
});
|
||||
@ -3738,8 +3923,11 @@ class RememberedSetUpdatingItem : public UpdatingItem {
|
||||
RememberedSet<OLD_TO_OLD>::IterateTyped(
|
||||
chunk_,
|
||||
[isolate](SlotType slot_type, Address host_addr, Address slot) {
|
||||
// Using UpdateStrongSlot is OK here, because there are no weak
|
||||
// typed slots.
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
isolate, slot_type, slot, UpdateSlot<AccessMode::NON_ATOMIC>);
|
||||
isolate, slot_type, slot,
|
||||
UpdateStrongSlot<AccessMode::NON_ATOMIC>);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -495,6 +495,9 @@ class MajorNonAtomicMarkingState final
|
||||
struct WeakObjects {
|
||||
Worklist<WeakCell*, 64> weak_cells;
|
||||
Worklist<TransitionArray*, 64> transition_arrays;
|
||||
// TODO(marja): For old space, we only need the slot, not the host
|
||||
// object. Optimize this by adding a different storage for old space.
|
||||
Worklist<std::pair<HeapObject*, HeapObjectReference**>, 64> weak_references;
|
||||
};
|
||||
|
||||
// Collector for young and old generation.
|
||||
@ -658,7 +661,12 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
|
||||
void AbortCompaction();
|
||||
|
||||
static inline bool IsOnEvacuationCandidate(HeapObject* obj) {
|
||||
static inline bool IsOnEvacuationCandidate(Object* obj) {
|
||||
return Page::FromAddress(reinterpret_cast<Address>(obj))
|
||||
->IsEvacuationCandidate();
|
||||
}
|
||||
|
||||
static inline bool IsOnEvacuationCandidate(MaybeObject* obj) {
|
||||
return Page::FromAddress(reinterpret_cast<Address>(obj))
|
||||
->IsEvacuationCandidate();
|
||||
}
|
||||
@ -666,6 +674,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
void RecordRelocSlot(Code* host, RelocInfo* rinfo, Object* target);
|
||||
V8_INLINE static void RecordSlot(HeapObject* object, Object** slot,
|
||||
Object* target);
|
||||
V8_INLINE static void RecordSlot(HeapObject* object,
|
||||
HeapObjectReference** slot, Object* target);
|
||||
void RecordLiveSlotsOnPage(Page* page);
|
||||
|
||||
void UpdateSlots(SlotsBuffer* buffer);
|
||||
@ -699,6 +709,10 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
weak_objects_.transition_arrays.Push(kMainThread, array);
|
||||
}
|
||||
|
||||
void AddWeakReference(HeapObject* host, HeapObjectReference** slot) {
|
||||
weak_objects_.weak_references.Push(kMainThread, std::make_pair(host, slot));
|
||||
}
|
||||
|
||||
Sweeper* sweeper() { return sweeper_; }
|
||||
|
||||
#ifdef DEBUG
|
||||
@ -809,6 +823,7 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {
|
||||
// transition.
|
||||
void ClearWeakCellsAndSimpleMapTransitions(
|
||||
DependentCode** dependent_code_list);
|
||||
void ClearWeakReferences();
|
||||
void AbortWeakObjects();
|
||||
|
||||
// Starts sweeping of spaces by contributing on the main thread and setting
|
||||
@ -917,8 +932,11 @@ class MarkingVisitor final
|
||||
|
||||
// ObjectVisitor implementation.
|
||||
V8_INLINE void VisitPointer(HeapObject* host, Object** p) final;
|
||||
V8_INLINE void VisitPointer(HeapObject* host, MaybeObject** p) final;
|
||||
V8_INLINE void VisitPointers(HeapObject* host, Object** start,
|
||||
Object** end) final;
|
||||
V8_INLINE void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final;
|
||||
V8_INLINE void VisitEmbeddedPointer(Code* host, RelocInfo* rinfo) final;
|
||||
V8_INLINE void VisitCodeTarget(Code* host, RelocInfo* rinfo) final;
|
||||
|
||||
|
@ -280,7 +280,9 @@ class UpdateTypedSlotHelper {
|
||||
Callback callback) {
|
||||
Object* code = Code::GetObjectFromEntryAddress(entry_address);
|
||||
Object* old_code = code;
|
||||
SlotCallbackResult result = callback(&code);
|
||||
SlotCallbackResult result =
|
||||
callback(reinterpret_cast<MaybeObject**>(&code));
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(code));
|
||||
if (code != old_code) {
|
||||
Memory::Address_at(entry_address) =
|
||||
reinterpret_cast<Code*>(code)->entry();
|
||||
@ -296,7 +298,9 @@ class UpdateTypedSlotHelper {
|
||||
DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
|
||||
Code* old_target = Code::GetCodeFromTargetAddress(rinfo->target_address());
|
||||
Object* new_target = old_target;
|
||||
SlotCallbackResult result = callback(&new_target);
|
||||
SlotCallbackResult result =
|
||||
callback(reinterpret_cast<MaybeObject**>(&new_target));
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(new_target));
|
||||
if (new_target != old_target) {
|
||||
rinfo->set_target_address(Code::cast(new_target)->instruction_start());
|
||||
}
|
||||
@ -311,7 +315,9 @@ class UpdateTypedSlotHelper {
|
||||
DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
|
||||
HeapObject* old_target = rinfo->target_object();
|
||||
Object* new_target = old_target;
|
||||
SlotCallbackResult result = callback(&new_target);
|
||||
SlotCallbackResult result =
|
||||
callback(reinterpret_cast<MaybeObject**>(&new_target));
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(new_target));
|
||||
if (new_target != old_target) {
|
||||
rinfo->set_target_object(HeapObject::cast(new_target));
|
||||
}
|
||||
@ -319,7 +325,7 @@ class UpdateTypedSlotHelper {
|
||||
}
|
||||
|
||||
// Updates a typed slot using an untyped slot callback.
|
||||
// The callback accepts Object** and returns SlotCallbackResult.
|
||||
// The callback accepts MaybeObject** and returns SlotCallbackResult.
|
||||
template <typename Callback>
|
||||
static SlotCallbackResult UpdateTypedSlot(Isolate* isolate,
|
||||
SlotType slot_type, Address addr,
|
||||
@ -337,7 +343,7 @@ class UpdateTypedSlotHelper {
|
||||
return UpdateEmbeddedPointer(&rinfo, callback);
|
||||
}
|
||||
case OBJECT_SLOT: {
|
||||
return callback(reinterpret_cast<Object**>(addr));
|
||||
return callback(reinterpret_cast<MaybeObject**>(addr));
|
||||
}
|
||||
case CLEARED_SLOT:
|
||||
break;
|
||||
|
@ -6,6 +6,7 @@
|
||||
#define V8_HEAP_SCAVENGER_INL_H_
|
||||
|
||||
#include "src/heap/scavenger.h"
|
||||
#include "src/objects-inl.h"
|
||||
#include "src/objects/map.h"
|
||||
|
||||
namespace v8 {
|
||||
@ -30,13 +31,13 @@ bool Scavenger::ContainsOnlyData(VisitorId visitor_id) {
|
||||
return false;
|
||||
}
|
||||
|
||||
void Scavenger::PageMemoryFence(Object* object) {
|
||||
void Scavenger::PageMemoryFence(MaybeObject* object) {
|
||||
#ifdef THREAD_SANITIZER
|
||||
// Perform a dummy acquire load to tell TSAN that there is no data race
|
||||
// with page initialization.
|
||||
if (object->IsHeapObject()) {
|
||||
MemoryChunk* chunk =
|
||||
MemoryChunk::FromAddress(HeapObject::cast(object)->address());
|
||||
HeapObject* heap_object;
|
||||
if (object->ToStrongOrWeakHeapObject(&heap_object)) {
|
||||
MemoryChunk* chunk = MemoryChunk::FromAddress(heap_object->address());
|
||||
CHECK_NOT_NULL(chunk->synchronized_heap());
|
||||
}
|
||||
#endif
|
||||
@ -68,7 +69,7 @@ bool Scavenger::MigrateObject(Map* map, HeapObject* source, HeapObject* target,
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObject** slot,
|
||||
bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
DCHECK(heap()->AllowedToBeMigrated(object, NEW_SPACE));
|
||||
AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
|
||||
@ -83,10 +84,10 @@ bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObject** slot,
|
||||
if (!self_success) {
|
||||
allocator_.FreeLast(NEW_SPACE, target, object_size);
|
||||
MapWord map_word = object->map_word();
|
||||
*slot = map_word.ToForwardingAddress();
|
||||
HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
|
||||
return true;
|
||||
}
|
||||
*slot = target;
|
||||
HeapObjectReference::Update(slot, target);
|
||||
|
||||
copied_list_.Push(ObjectAndSize(target, object_size));
|
||||
copied_size_ += object_size;
|
||||
@ -95,8 +96,8 @@ bool Scavenger::SemiSpaceCopyObject(Map* map, HeapObject** slot,
|
||||
return false;
|
||||
}
|
||||
|
||||
bool Scavenger::PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
|
||||
int object_size) {
|
||||
bool Scavenger::PromoteObject(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
|
||||
AllocationResult allocation =
|
||||
allocator_.Allocate(OLD_SPACE, object_size, alignment);
|
||||
@ -109,11 +110,10 @@ bool Scavenger::PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
|
||||
if (!self_success) {
|
||||
allocator_.FreeLast(OLD_SPACE, target, object_size);
|
||||
MapWord map_word = object->map_word();
|
||||
*slot = map_word.ToForwardingAddress();
|
||||
HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
|
||||
return true;
|
||||
}
|
||||
*slot = target;
|
||||
|
||||
HeapObjectReference::Update(slot, target);
|
||||
if (!ContainsOnlyData(map->visitor_id())) {
|
||||
promotion_list_.Push(ObjectAndSize(target, object_size));
|
||||
}
|
||||
@ -123,7 +123,7 @@ bool Scavenger::PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
|
||||
return false;
|
||||
}
|
||||
|
||||
void Scavenger::EvacuateObjectDefault(Map* map, HeapObject** slot,
|
||||
void Scavenger::EvacuateObjectDefault(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size) {
|
||||
SLOW_DCHECK(object_size <= Page::kAllocatableMemory);
|
||||
SLOW_DCHECK(object->SizeFromMap(map) == object_size);
|
||||
@ -157,7 +157,8 @@ void Scavenger::EvacuateThinString(Map* map, HeapObject** slot,
|
||||
return;
|
||||
}
|
||||
|
||||
EvacuateObjectDefault(map, slot, object, object_size);
|
||||
EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
|
||||
object, object_size);
|
||||
}
|
||||
|
||||
void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
|
||||
@ -187,17 +188,19 @@ void Scavenger::EvacuateShortcutCandidate(Map* map, HeapObject** slot,
|
||||
return;
|
||||
}
|
||||
Map* map = first_word.ToMap();
|
||||
EvacuateObjectDefault(map, slot, first, first->SizeFromMap(map));
|
||||
EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
|
||||
first, first->SizeFromMap(map));
|
||||
base::AsAtomicPointer::Relaxed_Store(
|
||||
reinterpret_cast<Map**>(object->address()),
|
||||
MapWord::FromForwardingAddress(*slot).ToMap());
|
||||
return;
|
||||
}
|
||||
|
||||
EvacuateObjectDefault(map, slot, object, object_size);
|
||||
EvacuateObjectDefault(map, reinterpret_cast<HeapObjectReference**>(slot),
|
||||
object, object_size);
|
||||
}
|
||||
|
||||
void Scavenger::EvacuateObject(HeapObject** slot, Map* map,
|
||||
void Scavenger::EvacuateObject(HeapObjectReference** slot, Map* map,
|
||||
HeapObject* source) {
|
||||
SLOW_DCHECK(heap_->InFromSpace(source));
|
||||
SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
|
||||
@ -206,11 +209,15 @@ void Scavenger::EvacuateObject(HeapObject** slot, Map* map,
|
||||
// that require re-reading the map.
|
||||
switch (map->visitor_id()) {
|
||||
case kVisitThinString:
|
||||
EvacuateThinString(map, slot, reinterpret_cast<ThinString*>(source),
|
||||
size);
|
||||
// At the moment we don't allow weak pointers to thin strings.
|
||||
DCHECK(!(*slot)->IsWeakHeapObject());
|
||||
EvacuateThinString(map, reinterpret_cast<HeapObject**>(slot),
|
||||
reinterpret_cast<ThinString*>(source), size);
|
||||
break;
|
||||
case kVisitShortcutCandidate:
|
||||
EvacuateShortcutCandidate(map, slot,
|
||||
DCHECK(!(*slot)->IsWeakHeapObject());
|
||||
// At the moment we don't allow weak pointers to cons strings.
|
||||
EvacuateShortcutCandidate(map, reinterpret_cast<HeapObject**>(slot),
|
||||
reinterpret_cast<ConsString*>(source), size);
|
||||
break;
|
||||
default:
|
||||
@ -219,7 +226,7 @@ void Scavenger::EvacuateObject(HeapObject** slot, Map* map,
|
||||
}
|
||||
}
|
||||
|
||||
void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
|
||||
void Scavenger::ScavengeObject(HeapObjectReference** p, HeapObject* object) {
|
||||
DCHECK(heap()->InFromSpace(object));
|
||||
|
||||
// Synchronized load that consumes the publishing CAS of MigrateObject.
|
||||
@ -228,8 +235,14 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
|
||||
// If the first word is a forwarding address, the object has already been
|
||||
// copied.
|
||||
if (first_word.IsForwardingAddress()) {
|
||||
HeapObject* dest = first_word.ToForwardingAddress();
|
||||
DCHECK(heap()->InFromSpace(*p));
|
||||
*p = first_word.ToForwardingAddress();
|
||||
if ((*p)->IsWeakHeapObject()) {
|
||||
*p = HeapObjectReference::Weak(dest);
|
||||
} else {
|
||||
DCHECK((*p)->IsStrongHeapObject());
|
||||
*p = HeapObjectReference::Strong(dest);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
@ -242,13 +255,16 @@ void Scavenger::ScavengeObject(HeapObject** p, HeapObject* object) {
|
||||
|
||||
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap,
|
||||
Address slot_address) {
|
||||
Object** slot = reinterpret_cast<Object**>(slot_address);
|
||||
Object* object = *slot;
|
||||
MaybeObject** slot = reinterpret_cast<MaybeObject**>(slot_address);
|
||||
MaybeObject* object = *slot;
|
||||
if (heap->InFromSpace(object)) {
|
||||
HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
|
||||
HeapObject* heap_object;
|
||||
bool success = object->ToStrongOrWeakHeapObject(&heap_object);
|
||||
USE(success);
|
||||
DCHECK(success);
|
||||
DCHECK(heap_object->IsHeapObject());
|
||||
|
||||
ScavengeObject(reinterpret_cast<HeapObject**>(slot), heap_object);
|
||||
ScavengeObject(reinterpret_cast<HeapObjectReference**>(slot), heap_object);
|
||||
|
||||
object = *slot;
|
||||
// If the object was in from space before and is after executing the
|
||||
@ -274,11 +290,27 @@ void ScavengeVisitor::VisitPointers(HeapObject* host, Object** start,
|
||||
for (Object** p = start; p < end; p++) {
|
||||
Object* object = *p;
|
||||
if (!heap_->InNewSpace(object)) continue;
|
||||
scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(p),
|
||||
scavenger_->ScavengeObject(reinterpret_cast<HeapObjectReference**>(p),
|
||||
reinterpret_cast<HeapObject*>(object));
|
||||
}
|
||||
}
|
||||
|
||||
void ScavengeVisitor::VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) {
|
||||
for (MaybeObject** p = start; p < end; p++) {
|
||||
MaybeObject* object = *p;
|
||||
if (!heap_->InNewSpace(object)) continue;
|
||||
// Treat the weak reference as strong.
|
||||
HeapObject* heap_object;
|
||||
if (object->ToStrongOrWeakHeapObject(&heap_object)) {
|
||||
scavenger_->ScavengeObject(reinterpret_cast<HeapObjectReference**>(p),
|
||||
heap_object);
|
||||
} else {
|
||||
UNREACHABLE();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
|
@ -23,37 +23,56 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
|
||||
|
||||
inline void VisitPointers(HeapObject* host, Object** start,
|
||||
Object** end) final {
|
||||
for (Address slot_address = reinterpret_cast<Address>(start);
|
||||
slot_address < reinterpret_cast<Address>(end);
|
||||
slot_address += kPointerSize) {
|
||||
Object** slot = reinterpret_cast<Object**>(slot_address);
|
||||
for (Object** slot = start; slot < end; ++slot) {
|
||||
Object* target = *slot;
|
||||
scavenger_->PageMemoryFence(target);
|
||||
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(target));
|
||||
if (target->IsHeapObject()) {
|
||||
if (heap_->InFromSpace(target)) {
|
||||
scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(slot),
|
||||
HeapObject::cast(target));
|
||||
target = *slot;
|
||||
scavenger_->PageMemoryFence(target);
|
||||
|
||||
if (heap_->InNewSpace(target)) {
|
||||
SLOW_DCHECK(target->IsHeapObject());
|
||||
SLOW_DCHECK(heap_->InToSpace(target));
|
||||
RememberedSet<OLD_TO_NEW>::Insert(Page::FromAddress(slot_address),
|
||||
slot_address);
|
||||
}
|
||||
SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
|
||||
HeapObject::cast(target)));
|
||||
} else if (record_slots_ &&
|
||||
MarkCompactCollector::IsOnEvacuationCandidate(
|
||||
HeapObject::cast(target))) {
|
||||
heap_->mark_compact_collector()->RecordSlot(host, slot, target);
|
||||
}
|
||||
HandleSlot(host, reinterpret_cast<Address>(slot),
|
||||
HeapObject::cast(target));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inline void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final {
|
||||
// Treat weak references as strong. TODO(marja): Proper weakness handling in
|
||||
// the young generation.
|
||||
for (MaybeObject** slot = start; slot < end; ++slot) {
|
||||
MaybeObject* target = *slot;
|
||||
HeapObject* heap_object;
|
||||
if (target->ToStrongOrWeakHeapObject(&heap_object)) {
|
||||
HandleSlot(host, reinterpret_cast<Address>(slot), heap_object);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inline void HandleSlot(HeapObject* host, Address slot_address,
|
||||
HeapObject* target) {
|
||||
HeapObjectReference** slot =
|
||||
reinterpret_cast<HeapObjectReference**>(slot_address);
|
||||
scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target));
|
||||
|
||||
if (heap_->InFromSpace(target)) {
|
||||
scavenger_->ScavengeObject(slot, target);
|
||||
bool success = (*slot)->ToStrongOrWeakHeapObject(&target);
|
||||
USE(success);
|
||||
DCHECK(success);
|
||||
scavenger_->PageMemoryFence(reinterpret_cast<MaybeObject*>(target));
|
||||
|
||||
if (heap_->InNewSpace(target)) {
|
||||
SLOW_DCHECK(target->IsHeapObject());
|
||||
SLOW_DCHECK(heap_->InToSpace(target));
|
||||
RememberedSet<OLD_TO_NEW>::Insert(Page::FromAddress(slot_address),
|
||||
slot_address);
|
||||
}
|
||||
SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
|
||||
HeapObject::cast(target)));
|
||||
} else if (record_slots_ && MarkCompactCollector::IsOnEvacuationCandidate(
|
||||
HeapObject::cast(target))) {
|
||||
heap_->mark_compact_collector()->RecordSlot(host, slot, target);
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
Heap* const heap_;
|
||||
Scavenger* const scavenger_;
|
||||
@ -106,7 +125,7 @@ void Scavenger::ScavengePage(MemoryChunk* page) {
|
||||
RememberedSet<OLD_TO_NEW>::IterateTyped(
|
||||
page, [this](SlotType type, Address host_addr, Address addr) {
|
||||
return UpdateTypedSlotHelper::UpdateTypedSlot(
|
||||
heap_->isolate(), type, addr, [this](Object** addr) {
|
||||
heap_->isolate(), type, addr, [this](MaybeObject** addr) {
|
||||
return CheckAndScavengeObject(heap(),
|
||||
reinterpret_cast<Address>(addr));
|
||||
});
|
||||
@ -164,6 +183,7 @@ void Scavenger::Finalize() {
|
||||
|
||||
void RootScavengeVisitor::VisitRootPointer(Root root, const char* description,
|
||||
Object** p) {
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(*p));
|
||||
ScavengePointer(p);
|
||||
}
|
||||
|
||||
@ -175,9 +195,10 @@ void RootScavengeVisitor::VisitRootPointers(Root root, const char* description,
|
||||
|
||||
void RootScavengeVisitor::ScavengePointer(Object** p) {
|
||||
Object* object = *p;
|
||||
DCHECK(!Internals::HasWeakHeapObjectTag(object));
|
||||
if (!heap_->InNewSpace(object)) return;
|
||||
|
||||
scavenger_->ScavengeObject(reinterpret_cast<HeapObject**>(p),
|
||||
scavenger_->ScavengeObject(reinterpret_cast<HeapObjectReference**>(p),
|
||||
reinterpret_cast<HeapObject*>(object));
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ class Scavenger {
|
||||
|
||||
inline Heap* heap() { return heap_; }
|
||||
|
||||
inline void PageMemoryFence(Object* object);
|
||||
inline void PageMemoryFence(MaybeObject* object);
|
||||
|
||||
void AddPageToSweeperIfNecessary(MemoryChunk* page);
|
||||
|
||||
@ -61,24 +61,24 @@ class Scavenger {
|
||||
|
||||
// Scavenges an object |object| referenced from slot |p|. |object| is required
|
||||
// to be in from space.
|
||||
inline void ScavengeObject(HeapObject** p, HeapObject* object);
|
||||
inline void ScavengeObject(HeapObjectReference** p, HeapObject* object);
|
||||
|
||||
// Copies |source| to |target| and sets the forwarding pointer in |source|.
|
||||
V8_INLINE bool MigrateObject(Map* map, HeapObject* source, HeapObject* target,
|
||||
int size);
|
||||
|
||||
V8_INLINE bool SemiSpaceCopyObject(Map* map, HeapObject** slot,
|
||||
V8_INLINE bool SemiSpaceCopyObject(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size);
|
||||
|
||||
V8_INLINE bool PromoteObject(Map* map, HeapObject** slot, HeapObject* object,
|
||||
int object_size);
|
||||
V8_INLINE bool PromoteObject(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size);
|
||||
|
||||
V8_INLINE void EvacuateObject(HeapObject** slot, Map* map,
|
||||
V8_INLINE void EvacuateObject(HeapObjectReference** slot, Map* map,
|
||||
HeapObject* source);
|
||||
|
||||
// Different cases for object evacuation.
|
||||
|
||||
V8_INLINE void EvacuateObjectDefault(Map* map, HeapObject** slot,
|
||||
V8_INLINE void EvacuateObjectDefault(Map* map, HeapObjectReference** slot,
|
||||
HeapObject* object, int object_size);
|
||||
|
||||
V8_INLINE void EvacuateJSFunction(Map* map, HeapObject** slot,
|
||||
@ -135,6 +135,8 @@ class ScavengeVisitor final : public NewSpaceVisitor<ScavengeVisitor> {
|
||||
|
||||
V8_INLINE void VisitPointers(HeapObject* host, Object** start,
|
||||
Object** end) final;
|
||||
V8_INLINE void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) final;
|
||||
|
||||
private:
|
||||
Heap* const heap_;
|
||||
|
@ -1420,6 +1420,12 @@ void TurboAssembler::Popcnt(Register dst, Operand src) {
|
||||
UNREACHABLE();
|
||||
}
|
||||
|
||||
void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
|
||||
cmp(in_out, Immediate(kClearedWeakHeapObject));
|
||||
j(equal, target_if_cleared);
|
||||
|
||||
and_(in_out, Immediate(~kWeakHeapObjectMask));
|
||||
}
|
||||
|
||||
void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
|
||||
DCHECK_GT(value, 0);
|
||||
|
@ -604,6 +604,10 @@ class MacroAssembler : public TurboAssembler {
|
||||
void PushReturnAddressFrom(Register src) { push(src); }
|
||||
void PopReturnAddressTo(Register dst) { pop(dst); }
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// In-place weak references.
|
||||
void LoadWeakValue(Register in_out, Label* target_if_cleared);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// StatsCounter support
|
||||
|
||||
|
@ -4557,6 +4557,13 @@ void MacroAssembler::JumpToInstructionStream(const InstructionStream* stream) {
|
||||
Jump(kOffHeapTrampolineRegister);
|
||||
}
|
||||
|
||||
void MacroAssembler::LoadWeakValue(Register out, Register in,
|
||||
Label* target_if_cleared) {
|
||||
Branch(target_if_cleared, eq, in, Operand(kClearedWeakHeapObject));
|
||||
|
||||
And(out, in, Operand(~kWeakHeapObjectMask));
|
||||
}
|
||||
|
||||
void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
DCHECK_GT(value, 0);
|
||||
|
@ -1101,6 +1101,10 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
|
||||
// Generates a trampoline to jump to the off-heap instruction stream.
|
||||
void JumpToInstructionStream(const InstructionStream* stream);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// In-place weak references.
|
||||
void LoadWeakValue(Register out, Register in, Label* target_if_cleared);
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// StatsCounter support.
|
||||
|
||||
|
@ -4867,6 +4867,13 @@ void MacroAssembler::JumpToInstructionStream(const InstructionStream* stream) {
|
||||
Jump(kOffHeapTrampolineRegister);
|
||||
}
|
||||
|
||||
void MacroAssembler::LoadWeakValue(Register out, Register in,
|
||||
Label* target_if_cleared) {
|
||||
Branch(target_if_cleared, eq, in, Operand(kClearedWeakHeapObject));
|
||||
|
||||
And(out, in, Operand(~kWeakHeapObjectMask));
|
||||
}
|
||||
|
||||
void MacroAssembler::IncrementCounter(StatsCounter* counter, int value,
|
||||
Register scratch1, Register scratch2) {
|
||||
DCHECK_GT(value, 0);
|
||||
|
@ -1170,6 +1170,10 @@ const Operand& rt = Operand(zero_reg), BranchDelaySlot bd = PROTECT
|
||||
// Generates a trampoline to jump to the off-heap instruction stream.
|
||||
void JumpToInstructionStream(const InstructionStream* stream);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// In-place weak references.
|
||||
void LoadWeakValue(Register out, Register in, Label* target_if_cleared);
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// StatsCounter support.
|
||||
|
||||
|
@ -69,6 +69,12 @@ void BodyDescriptorBase::IteratePointer(HeapObject* obj, int offset,
|
||||
v->VisitPointer(obj, HeapObject::RawField(obj, offset));
|
||||
}
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
void BodyDescriptorBase::IterateMaybeWeakPointer(HeapObject* obj, int offset,
|
||||
ObjectVisitor* v) {
|
||||
v->VisitPointer(obj, HeapObject::RawMaybeWeakField(obj, offset));
|
||||
}
|
||||
|
||||
class JSObject::BodyDescriptor final : public BodyDescriptorBase {
|
||||
public:
|
||||
static const int kStartOffset = JSReceiver::kPropertiesOrHashOffset;
|
||||
@ -273,7 +279,7 @@ class FeedbackVector::BodyDescriptor final : public BodyDescriptorBase {
|
||||
static inline void IterateBody(HeapObject* obj, int object_size,
|
||||
ObjectVisitor* v) {
|
||||
IteratePointer(obj, kSharedFunctionInfoOffset, v);
|
||||
IteratePointer(obj, kOptimizedCodeOffset, v);
|
||||
IterateMaybeWeakPointer(obj, kOptimizedCodeOffset, v);
|
||||
IteratePointers(obj, kFeedbackSlotsOffset, object_size, v);
|
||||
}
|
||||
|
||||
|
@ -36,6 +36,10 @@ class BodyDescriptorBase BASE_EMBEDDED {
|
||||
static inline void IteratePointer(HeapObject* obj, int offset,
|
||||
ObjectVisitor* v);
|
||||
|
||||
template <typename ObjectVisitor>
|
||||
static inline void IterateMaybeWeakPointer(HeapObject* obj, int offset,
|
||||
ObjectVisitor* v);
|
||||
|
||||
protected:
|
||||
// Returns true for all header and embedder fields.
|
||||
static inline bool IsValidSlotImpl(HeapObject* obj, int offset);
|
||||
|
@ -17,6 +17,7 @@
|
||||
#include "src/objects/data-handler-inl.h"
|
||||
#include "src/objects/debug-objects-inl.h"
|
||||
#include "src/objects/literal-objects.h"
|
||||
#include "src/objects/maybe-object.h"
|
||||
#include "src/objects/microtask-inl.h"
|
||||
#include "src/objects/module.h"
|
||||
#include "src/objects/promise-inl.h"
|
||||
@ -70,6 +71,15 @@ void Object::VerifyPointer(Object* p) {
|
||||
}
|
||||
}
|
||||
|
||||
void MaybeObject::VerifyMaybeObjectPointer(MaybeObject* p) {
|
||||
HeapObject* heap_object;
|
||||
if (p->ToStrongOrWeakHeapObject(&heap_object)) {
|
||||
HeapObject::VerifyHeapPointer(heap_object);
|
||||
} else {
|
||||
CHECK(p->IsSmi() || p->IsClearedWeakHeapObject());
|
||||
}
|
||||
}
|
||||
|
||||
namespace {
|
||||
void VerifyForeignPointer(HeapObject* host, Object* foreign) {
|
||||
host->VerifyPointer(foreign);
|
||||
@ -346,7 +356,13 @@ void FeedbackCell::FeedbackCellVerify() {
|
||||
CHECK(value()->IsUndefined(isolate) || value()->IsFeedbackVector());
|
||||
}
|
||||
|
||||
void FeedbackVector::FeedbackVectorVerify() { CHECK(IsFeedbackVector()); }
|
||||
void FeedbackVector::FeedbackVectorVerify() {
|
||||
CHECK(IsFeedbackVector());
|
||||
MaybeObject* code = optimized_code_weak_or_smi();
|
||||
MaybeObject::VerifyMaybeObjectPointer(code);
|
||||
CHECK(code->IsSmi() || code->IsClearedWeakHeapObject() ||
|
||||
code->IsWeakHeapObject());
|
||||
}
|
||||
|
||||
template <class Traits>
|
||||
void FixedTypedArray<Traits>::FixedTypedArrayVerify() {
|
||||
|
@ -920,6 +920,10 @@ Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
|
||||
return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
|
||||
}
|
||||
|
||||
MaybeObject** HeapObject::RawMaybeWeakField(HeapObject* obj, int byte_offset) {
|
||||
return reinterpret_cast<MaybeObject**>(FIELD_ADDR(obj, byte_offset));
|
||||
}
|
||||
|
||||
int Smi::ToInt(const Object* object) { return Smi::cast(object)->value(); }
|
||||
|
||||
MapWord MapWord::FromMap(const Map* map) {
|
||||
|
@ -1740,6 +1740,7 @@ class HeapObject: public Object {
|
||||
// Does not invoke write barrier, so should only be assigned to
|
||||
// during marking GC.
|
||||
static inline Object** RawField(HeapObject* obj, int offset);
|
||||
static inline MaybeObject** RawMaybeWeakField(HeapObject* obj, int offset);
|
||||
|
||||
DECL_CAST(HeapObject)
|
||||
|
||||
|
71
src/objects/maybe-object-inl.h
Normal file
71
src/objects/maybe-object-inl.h
Normal file
@ -0,0 +1,71 @@
|
||||
// Copyright 2018 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_OBJECTS_MAYBE_OBJECT_INL_H_
|
||||
#define V8_OBJECTS_MAYBE_OBJECT_INL_H_
|
||||
|
||||
#include "include/v8.h"
|
||||
#include "src/globals.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
bool MaybeObject::IsSmi(Smi** value) {
|
||||
if (HAS_SMI_TAG(this)) {
|
||||
*value = Smi::cast(reinterpret_cast<Object*>(this));
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool MaybeObject::IsStrongOrWeakHeapObject() {
|
||||
if (IsSmi() || IsClearedWeakHeapObject()) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MaybeObject::ToStrongOrWeakHeapObject(HeapObject** result) {
|
||||
if (IsSmi() || IsClearedWeakHeapObject()) {
|
||||
return false;
|
||||
}
|
||||
*result = GetHeapObject();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MaybeObject::IsStrongHeapObject() {
|
||||
return !Internals::HasWeakHeapObjectTag(this) && !IsSmi();
|
||||
}
|
||||
|
||||
bool MaybeObject::ToStrongHeapObject(HeapObject** result) {
|
||||
if (!Internals::HasWeakHeapObjectTag(this) && !IsSmi()) {
|
||||
*result = reinterpret_cast<HeapObject*>(this);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool MaybeObject::IsWeakHeapObject() {
|
||||
return Internals::HasWeakHeapObjectTag(this) && !IsClearedWeakHeapObject();
|
||||
}
|
||||
|
||||
bool MaybeObject::ToWeakHeapObject(HeapObject** result) {
|
||||
if (Internals::HasWeakHeapObjectTag(this) && !IsClearedWeakHeapObject()) {
|
||||
*result = GetHeapObject();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
HeapObject* MaybeObject::GetHeapObject() {
|
||||
DCHECK(!IsSmi());
|
||||
DCHECK(!IsClearedWeakHeapObject());
|
||||
return Internals::RemoveWeakHeapObjectMask(
|
||||
reinterpret_cast<HeapObjectReference*>(this));
|
||||
}
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_OBJECTS_MAYBE_OBJECT_INL_H_
|
101
src/objects/maybe-object.h
Normal file
101
src/objects/maybe-object.h
Normal file
@ -0,0 +1,101 @@
|
||||
// Copyright 2018 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#ifndef V8_OBJECTS_MAYBE_OBJECT_H_
|
||||
#define V8_OBJECTS_MAYBE_OBJECT_H_
|
||||
|
||||
#include "include/v8.h"
|
||||
#include "src/globals.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
class HeapObject;
|
||||
class Smi;
|
||||
|
||||
// A MaybeObject is either a SMI, a strong reference to a HeapObject, a weak
|
||||
// reference to a HeapObject, or a cleared weak reference. It's used for
|
||||
// implementing in-place weak references (see design doc: goo.gl/j6SdcK )
|
||||
class MaybeObject {
|
||||
public:
|
||||
bool IsSmi() const { return HAS_SMI_TAG(this); }
|
||||
inline bool IsSmi(Smi** value);
|
||||
|
||||
bool IsClearedWeakHeapObject() {
|
||||
return Internals::IsClearedWeakHeapObject(this);
|
||||
}
|
||||
|
||||
inline bool IsStrongOrWeakHeapObject();
|
||||
inline bool ToStrongOrWeakHeapObject(HeapObject** result);
|
||||
inline bool IsStrongHeapObject();
|
||||
inline bool ToStrongHeapObject(HeapObject** result);
|
||||
inline bool IsWeakHeapObject();
|
||||
inline bool ToWeakHeapObject(HeapObject** result);
|
||||
|
||||
inline HeapObject* GetHeapObject();
|
||||
|
||||
static MaybeObject* FromSmi(Smi* smi) {
|
||||
DCHECK(HAS_SMI_TAG(smi));
|
||||
return reinterpret_cast<MaybeObject*>(smi);
|
||||
}
|
||||
|
||||
static MaybeObject* FromObject(Object* object) {
|
||||
return reinterpret_cast<MaybeObject*>(object);
|
||||
}
|
||||
|
||||
#ifdef VERIFY_HEAP
|
||||
static void VerifyMaybeObjectPointer(MaybeObject* p);
|
||||
#endif
|
||||
|
||||
private:
|
||||
DISALLOW_IMPLICIT_CONSTRUCTORS(MaybeObject);
|
||||
};
|
||||
|
||||
// A HeapObjectReference is either a strong reference to a HeapObject, a weak
|
||||
// reference to a HeapObject, or a cleared weak reference.
|
||||
class HeapObjectReference : public MaybeObject {
|
||||
public:
|
||||
enum ReferenceType {
|
||||
WEAK,
|
||||
STRONG,
|
||||
};
|
||||
|
||||
static HeapObjectReference* Strong(HeapObject* object) {
|
||||
return reinterpret_cast<HeapObjectReference*>(object);
|
||||
}
|
||||
|
||||
static HeapObjectReference* Weak(HeapObject* object) {
|
||||
return Internals::AddWeakHeapObjectMask(object);
|
||||
}
|
||||
|
||||
static HeapObjectReference* ClearedValue() {
|
||||
return reinterpret_cast<HeapObjectReference*>(kClearedWeakHeapObject);
|
||||
}
|
||||
|
||||
static void Update(HeapObjectReference** slot, HeapObject* value) {
|
||||
DCHECK(!HAS_SMI_TAG(*slot));
|
||||
DCHECK(Internals::HasHeapObjectTag(value));
|
||||
|
||||
#ifdef DEBUG
|
||||
bool weak_before = Internals::HasWeakHeapObjectTag(*slot);
|
||||
#endif
|
||||
|
||||
*slot = reinterpret_cast<HeapObjectReference*>(
|
||||
reinterpret_cast<intptr_t>(value) |
|
||||
(reinterpret_cast<intptr_t>(*slot) & kWeakHeapObjectMask));
|
||||
|
||||
#ifdef DEBUG
|
||||
bool weak_after = Internals::HasWeakHeapObjectTag(*slot);
|
||||
DCHECK_EQ(weak_before, weak_after);
|
||||
#endif
|
||||
}
|
||||
|
||||
private:
|
||||
DISALLOW_IMPLICIT_CONSTRUCTORS(HeapObjectReference);
|
||||
};
|
||||
|
||||
} // namespace internal
|
||||
} // namespace v8
|
||||
|
||||
#endif // V8_OBJECTS_MAYBE_OBJECT_H_
|
@ -14,6 +14,9 @@
|
||||
#undef ACCESSORS_CHECKED2
|
||||
#undef ACCESSORS_CHECKED
|
||||
#undef ACCESSORS
|
||||
#undef WEAK_ACCESSORS_CHECKED
|
||||
#undef WEAK_ACCESSORS_CHECKED2
|
||||
#undef WEAK_ACCESSORS
|
||||
#undef SMI_ACCESSORS_CHECKED
|
||||
#undef SMI_ACCESSORS
|
||||
#undef SYNCHRONIZED_SMI_ACCESSORS
|
||||
@ -25,13 +28,16 @@
|
||||
#undef FIELD_ADDR
|
||||
#undef FIELD_ADDR_CONST
|
||||
#undef READ_FIELD
|
||||
#undef READ_WEAK_FIELD
|
||||
#undef ACQUIRE_READ_FIELD
|
||||
#undef RELAXED_READ_FIELD
|
||||
#undef WRITE_FIELD
|
||||
#undef WRITE_WEAK_FIELD
|
||||
#undef RELEASE_WRITE_FIELD
|
||||
#undef RELAXED_WRITE_FIELD
|
||||
#undef WRITE_BARRIER
|
||||
#undef CONDITIONAL_WRITE_BARRIER
|
||||
#undef CONDITIONAL_WEAK_WRITE_BARRIER
|
||||
#undef READ_DOUBLE_FIELD
|
||||
#undef WRITE_DOUBLE_FIELD
|
||||
#undef READ_INT_FIELD
|
||||
|
@ -71,6 +71,25 @@
|
||||
#define ACCESSORS(holder, name, type, offset) \
|
||||
ACCESSORS_CHECKED(holder, name, type, offset, true)
|
||||
|
||||
#define WEAK_ACCESSORS_CHECKED2(holder, name, offset, get_condition, \
|
||||
set_condition) \
|
||||
MaybeObject* holder::name() const { \
|
||||
MaybeObject* value = READ_WEAK_FIELD(this, offset); \
|
||||
DCHECK(get_condition); \
|
||||
return value; \
|
||||
} \
|
||||
void holder::set_##name(MaybeObject* value, WriteBarrierMode mode) { \
|
||||
DCHECK(set_condition); \
|
||||
WRITE_WEAK_FIELD(this, offset, value); \
|
||||
CONDITIONAL_WEAK_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
|
||||
}
|
||||
|
||||
#define WEAK_ACCESSORS_CHECKED(holder, name, offset, condition) \
|
||||
WEAK_ACCESSORS_CHECKED2(holder, name, offset, condition, condition)
|
||||
|
||||
#define WEAK_ACCESSORS(holder, name, offset) \
|
||||
WEAK_ACCESSORS_CHECKED(holder, name, offset, true)
|
||||
|
||||
// Getter that returns a Smi as an int and writes an int as a Smi.
|
||||
#define SMI_ACCESSORS_CHECKED(holder, name, offset, condition) \
|
||||
int holder::name() const { \
|
||||
@ -135,6 +154,9 @@
|
||||
#define READ_FIELD(p, offset) \
|
||||
(*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
|
||||
|
||||
#define READ_WEAK_FIELD(p, offset) \
|
||||
(*reinterpret_cast<MaybeObject* const*>(FIELD_ADDR_CONST(p, offset)))
|
||||
|
||||
#define ACQUIRE_READ_FIELD(p, offset) \
|
||||
reinterpret_cast<Object*>(base::Acquire_Load( \
|
||||
reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
|
||||
@ -148,9 +170,15 @@
|
||||
base::Relaxed_Store( \
|
||||
reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
|
||||
reinterpret_cast<base::AtomicWord>(value));
|
||||
#define WRITE_WEAK_FIELD(p, offset, value) \
|
||||
base::Relaxed_Store( \
|
||||
reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
|
||||
reinterpret_cast<base::AtomicWord>(value));
|
||||
#else
|
||||
#define WRITE_FIELD(p, offset, value) \
|
||||
(*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
|
||||
#define WRITE_WEAK_FIELD(p, offset, value) \
|
||||
(*reinterpret_cast<MaybeObject**>(FIELD_ADDR(p, offset)) = value)
|
||||
#endif
|
||||
|
||||
#define RELEASE_WRITE_FIELD(p, offset, value) \
|
||||
@ -177,6 +205,16 @@
|
||||
heap->RecordWrite(object, HeapObject::RawField(object, offset), value); \
|
||||
}
|
||||
|
||||
#define CONDITIONAL_WEAK_WRITE_BARRIER(heap, object, offset, value, mode) \
|
||||
if (mode != SKIP_WRITE_BARRIER) { \
|
||||
if (mode == UPDATE_WRITE_BARRIER) { \
|
||||
heap->incremental_marking()->RecordMaybeWeakWrite( \
|
||||
object, HeapObject::RawMaybeWeakField(object, offset), value); \
|
||||
} \
|
||||
heap->RecordWrite(object, HeapObject::RawMaybeWeakField(object, offset), \
|
||||
value); \
|
||||
}
|
||||
|
||||
#define READ_DOUBLE_FIELD(p, offset) \
|
||||
ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
|
||||
|
||||
|
@ -793,17 +793,30 @@ class IndexedReferencesExtractor : public ObjectVisitor {
|
||||
parent_(parent),
|
||||
next_index_(0) {}
|
||||
void VisitPointers(HeapObject* host, Object** start, Object** end) override {
|
||||
for (Object** p = start; p < end; p++) {
|
||||
int index = static_cast<int>(p - HeapObject::RawField(parent_obj_, 0));
|
||||
VisitPointers(host, reinterpret_cast<MaybeObject**>(start),
|
||||
reinterpret_cast<MaybeObject**>(end));
|
||||
}
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) override {
|
||||
for (MaybeObject** p = start; p < end; p++) {
|
||||
int index = static_cast<int>(reinterpret_cast<Object**>(p) -
|
||||
HeapObject::RawField(parent_obj_, 0));
|
||||
++next_index_;
|
||||
// |p| could be outside of the object, e.g., while visiting RelocInfo of
|
||||
// code objects.
|
||||
if (p >= parent_start_ && p < parent_end_ && generator_->marks_[index]) {
|
||||
if (reinterpret_cast<Object**>(p) >= parent_start_ &&
|
||||
reinterpret_cast<Object**>(p) < parent_end_ &&
|
||||
generator_->marks_[index]) {
|
||||
generator_->marks_[index] = false;
|
||||
continue;
|
||||
}
|
||||
generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p,
|
||||
index * kPointerSize);
|
||||
HeapObject* heap_object;
|
||||
// Weak references have been handled explicitly.
|
||||
DCHECK(!(*p)->ToWeakHeapObject(&heap_object));
|
||||
if ((*p)->ToStrongHeapObject(&heap_object)) {
|
||||
generator_->SetHiddenReference(parent_obj_, parent_, next_index_,
|
||||
heap_object, index * kPointerSize);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -863,6 +876,8 @@ bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
|
||||
ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
|
||||
} else if (obj->IsAllocationSite()) {
|
||||
ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
|
||||
} else if (obj->IsFeedbackVector()) {
|
||||
ExtractFeedbackVectorReferences(entry, FeedbackVector::cast(obj));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -1362,6 +1377,16 @@ void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
|
||||
}
|
||||
}
|
||||
|
||||
void V8HeapExplorer::ExtractFeedbackVectorReferences(
|
||||
int entry, FeedbackVector* feedback_vector) {
|
||||
MaybeObject* code = feedback_vector->optimized_code_weak_or_smi();
|
||||
HeapObject* code_heap_object;
|
||||
if (code->ToWeakHeapObject(&code_heap_object)) {
|
||||
SetWeakReference(feedback_vector, entry, "optimized code", code_heap_object,
|
||||
FeedbackVector::kOptimizedCodeOffset);
|
||||
}
|
||||
}
|
||||
|
||||
void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
|
||||
Isolate* isolate = js_obj->GetIsolate();
|
||||
if (js_obj->HasFastProperties()) {
|
||||
|
@ -392,6 +392,8 @@ class V8HeapExplorer : public HeapEntriesAllocator {
|
||||
void ExtractJSArrayBufferReferences(int entry, JSArrayBuffer* buffer);
|
||||
void ExtractJSPromiseReferences(int entry, JSPromise* promise);
|
||||
void ExtractFixedArrayReferences(int entry, FixedArray* array);
|
||||
void ExtractFeedbackVectorReferences(int entry,
|
||||
FeedbackVector* feedback_vector);
|
||||
void ExtractPropertyReferences(JSObject* js_obj, int entry);
|
||||
void ExtractAccessorPairProperty(JSObject* js_obj, int entry, Name* key,
|
||||
Object* callback_obj, int field_offset = -1);
|
||||
|
@ -685,23 +685,40 @@ template <class AllocatorT>
|
||||
void Serializer<AllocatorT>::ObjectSerializer::VisitPointers(HeapObject* host,
|
||||
Object** start,
|
||||
Object** end) {
|
||||
Object** current = start;
|
||||
while (current < end) {
|
||||
while (current < end && (*current)->IsSmi()) current++;
|
||||
if (current < end) OutputRawData(reinterpret_cast<Address>(current));
|
||||
VisitPointers(host, reinterpret_cast<MaybeObject**>(start),
|
||||
reinterpret_cast<MaybeObject**>(end));
|
||||
}
|
||||
|
||||
while (current < end && !(*current)->IsSmi()) {
|
||||
HeapObject* current_contents = HeapObject::cast(*current);
|
||||
template <class AllocatorT>
|
||||
void Serializer<AllocatorT>::ObjectSerializer::VisitPointers(
|
||||
HeapObject* host, MaybeObject** start, MaybeObject** end) {
|
||||
MaybeObject** current = start;
|
||||
while (current < end) {
|
||||
while (current < end &&
|
||||
((*current)->IsSmi() || (*current)->IsClearedWeakHeapObject())) {
|
||||
current++;
|
||||
}
|
||||
if (current < end) {
|
||||
OutputRawData(reinterpret_cast<Address>(current));
|
||||
|
||||
// At the moment, there are no weak references reachable by the
|
||||
// serializer. TODO(marja): Implement this, once the relevant objects can
|
||||
// contain weak references.
|
||||
CHECK(!(*current)->IsWeakHeapObject());
|
||||
CHECK(!(*current)->IsClearedWeakHeapObject());
|
||||
}
|
||||
HeapObject* current_contents;
|
||||
while (current < end && (*current)->ToStrongHeapObject(¤t_contents)) {
|
||||
int root_index = serializer_->root_index_map()->Lookup(current_contents);
|
||||
// Repeats are not subject to the write barrier so we can only use
|
||||
// immortal immovable root members. They are never in new space.
|
||||
if (current != start && root_index != RootIndexMap::kInvalidRootIndex &&
|
||||
Heap::RootIsImmortalImmovable(root_index) &&
|
||||
current_contents == current[-1]) {
|
||||
*current == current[-1]) {
|
||||
DCHECK(!serializer_->isolate()->heap()->InNewSpace(current_contents));
|
||||
int repeat_count = 1;
|
||||
while (¤t[repeat_count] < end - 1 &&
|
||||
current[repeat_count] == current_contents) {
|
||||
current[repeat_count] == *current) {
|
||||
repeat_count++;
|
||||
}
|
||||
current += repeat_count;
|
||||
@ -718,6 +735,12 @@ void Serializer<AllocatorT>::ObjectSerializer::VisitPointers(HeapObject* host,
|
||||
bytes_processed_so_far_ += kPointerSize;
|
||||
current++;
|
||||
}
|
||||
|
||||
// TODO(marja): ditto.
|
||||
if (current < end) {
|
||||
CHECK(!(*current)->IsWeakHeapObject());
|
||||
CHECK(!(*current)->IsClearedWeakHeapObject());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -301,6 +301,8 @@ class Serializer<AllocatorT>::ObjectSerializer : public ObjectVisitor {
|
||||
void SerializeObject();
|
||||
void SerializeDeferred();
|
||||
void VisitPointers(HeapObject* host, Object** start, Object** end) override;
|
||||
void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) override;
|
||||
void VisitEmbeddedPointer(Code* host, RelocInfo* target) override;
|
||||
void VisitExternalReference(Foreign* host, Address* p) override;
|
||||
void VisitExternalReference(Code* host, RelocInfo* rinfo) override;
|
||||
|
@ -11,6 +11,7 @@ namespace v8 {
|
||||
namespace internal {
|
||||
|
||||
class CodeDataContainer;
|
||||
class MaybeObject;
|
||||
class Object;
|
||||
|
||||
#define ROOT_ID_LIST(V) \
|
||||
@ -87,11 +88,16 @@ class ObjectVisitor BASE_EMBEDDED {
|
||||
// [start, end). Any or all of the values may be modified on return.
|
||||
virtual void VisitPointers(HeapObject* host, Object** start,
|
||||
Object** end) = 0;
|
||||
virtual void VisitPointers(HeapObject* host, MaybeObject** start,
|
||||
MaybeObject** end) = 0;
|
||||
|
||||
// Handy shorthand for visiting a single pointer.
|
||||
virtual void VisitPointer(HeapObject* host, Object** p) {
|
||||
VisitPointers(host, p, p + 1);
|
||||
}
|
||||
virtual void VisitPointer(HeapObject* host, MaybeObject** p) {
|
||||
VisitPointers(host, p, p + 1);
|
||||
}
|
||||
|
||||
// To allow lazy clearing of inline caches the visitor has
|
||||
// a rich interface for iterating over Code objects ...
|
||||
|
@ -1885,6 +1885,13 @@ void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
|
||||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::LoadWeakValue(Register in_out, Label* target_if_cleared) {
|
||||
cmpp(in_out, Immediate(kClearedWeakHeapObject));
|
||||
j(equal, target_if_cleared);
|
||||
|
||||
andp(in_out, Immediate(~kWeakHeapObjectMask));
|
||||
}
|
||||
|
||||
void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
|
||||
DCHECK_GT(value, 0);
|
||||
if (FLAG_native_code_counters && counter->Enabled()) {
|
||||
|
@ -886,6 +886,9 @@ class MacroAssembler : public TurboAssembler {
|
||||
void IncrementCounter(StatsCounter* counter, int value);
|
||||
void DecrementCounter(StatsCounter* counter, int value);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// In-place weak references.
|
||||
void LoadWeakValue(Register in_out, Label* target_if_cleared);
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Debugging
|
||||
|
@ -113,6 +113,7 @@ v8_source_set("cctest_sources") {
|
||||
"heap/test-mark-compact.cc",
|
||||
"heap/test-page-promotion.cc",
|
||||
"heap/test-spaces.cc",
|
||||
"heap/test-weak-references.cc",
|
||||
"interpreter/bytecode-expectations-printer.cc",
|
||||
"interpreter/bytecode-expectations-printer.h",
|
||||
"interpreter/interpreter-tester.cc",
|
||||
|
312
test/cctest/heap/test-weak-references.cc
Normal file
312
test/cctest/heap/test-weak-references.cc
Normal file
@ -0,0 +1,312 @@
|
||||
// Copyright 2018 the V8 project authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style license that can be
|
||||
// found in the LICENSE file.
|
||||
|
||||
#include "src/api.h"
|
||||
#include "src/assembler-inl.h"
|
||||
#include "src/factory.h"
|
||||
#include "src/isolate.h"
|
||||
#include "test/cctest/cctest.h"
|
||||
#include "test/cctest/heap/heap-utils.h"
|
||||
|
||||
namespace v8 {
|
||||
namespace internal {
|
||||
namespace heap {
|
||||
|
||||
Handle<FeedbackVector> CreateFeedbackVectorForTest(
|
||||
v8::Isolate* isolate, Factory* factory,
|
||||
PretenureFlag pretenure_flag = NOT_TENURED) {
|
||||
v8::Local<v8::Script> script =
|
||||
v8::Script::Compile(isolate->GetCurrentContext(),
|
||||
v8::String::NewFromUtf8(isolate, "function foo() {}",
|
||||
v8::NewStringType::kNormal)
|
||||
.ToLocalChecked())
|
||||
.ToLocalChecked();
|
||||
Handle<Object> obj = v8::Utils::OpenHandle(*script);
|
||||
Handle<SharedFunctionInfo> shared_function =
|
||||
Handle<SharedFunctionInfo>(JSFunction::cast(*obj)->shared());
|
||||
Handle<FeedbackVector> fv =
|
||||
factory->NewFeedbackVector(shared_function, pretenure_flag);
|
||||
return fv;
|
||||
}
|
||||
|
||||
TEST(WeakReferencesBasic) {
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
HandleScope outer_scope(isolate);
|
||||
|
||||
Handle<FeedbackVector> fv =
|
||||
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
|
||||
CHECK(heap->InNewSpace(*fv));
|
||||
|
||||
MaybeObject* code_object = fv->optimized_code_weak_or_smi();
|
||||
CHECK(code_object->IsSmi());
|
||||
CcTest::CollectAllGarbage();
|
||||
CHECK(heap->InNewSpace(*fv));
|
||||
CHECK_EQ(code_object, fv->optimized_code_weak_or_smi());
|
||||
|
||||
{
|
||||
HandleScope inner_scope(isolate);
|
||||
|
||||
// Create a new Code.
|
||||
Assembler assm(isolate, nullptr, 0);
|
||||
assm.nop(); // supported on all architectures
|
||||
CodeDesc desc;
|
||||
assm.GetCode(isolate, &desc);
|
||||
Handle<Code> code =
|
||||
isolate->factory()->NewCode(desc, Code::STUB, Handle<Code>());
|
||||
CHECK(code->IsCode());
|
||||
|
||||
fv->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*code));
|
||||
HeapObject* code_heap_object;
|
||||
CHECK(
|
||||
fv->optimized_code_weak_or_smi()->ToWeakHeapObject(&code_heap_object));
|
||||
CHECK_EQ(*code, code_heap_object);
|
||||
|
||||
CcTest::CollectAllGarbage();
|
||||
|
||||
CHECK(
|
||||
fv->optimized_code_weak_or_smi()->ToWeakHeapObject(&code_heap_object));
|
||||
CHECK_EQ(*code, code_heap_object);
|
||||
} // code will go out of scope.
|
||||
|
||||
CcTest::CollectAllGarbage();
|
||||
CHECK(fv->optimized_code_weak_or_smi()->IsClearedWeakHeapObject());
|
||||
}
|
||||
|
||||
TEST(WeakReferencesOldToOld) {
|
||||
// Like WeakReferencesBasic, but the updated weak slot is in the old space,
|
||||
// and referring to an old space object.
|
||||
ManualGCScope manual_gc_scope;
|
||||
FLAG_manual_evacuation_candidates_selection = true;
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
HandleScope outer_scope(isolate);
|
||||
Handle<FeedbackVector> fv =
|
||||
CreateFeedbackVectorForTest(CcTest::isolate(), factory, TENURED);
|
||||
CHECK(heap->InOldSpace(*fv));
|
||||
|
||||
// Create a new FixedArray which the FeedbackVector will point to.
|
||||
Handle<FixedArray> fixed_array = factory->NewFixedArray(1, TENURED);
|
||||
CHECK(heap->InOldSpace(*fixed_array));
|
||||
fv->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*fixed_array));
|
||||
|
||||
Page* page_before_gc = Page::FromAddress(fixed_array->address());
|
||||
heap::ForceEvacuationCandidate(page_before_gc);
|
||||
CcTest::CollectAllGarbage();
|
||||
CHECK(heap->InOldSpace(*fixed_array));
|
||||
|
||||
HeapObject* heap_object;
|
||||
CHECK(fv->optimized_code_weak_or_smi()->ToWeakHeapObject(&heap_object));
|
||||
CHECK_EQ(heap_object, *fixed_array);
|
||||
}
|
||||
|
||||
TEST(WeakReferencesOldToNew) {
|
||||
// Like WeakReferencesBasic, but the updated weak slot is in the old space,
|
||||
// and referring to an new space object.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
HandleScope outer_scope(isolate);
|
||||
Handle<FeedbackVector> fv =
|
||||
CreateFeedbackVectorForTest(CcTest::isolate(), factory, TENURED);
|
||||
CHECK(heap->InOldSpace(*fv));
|
||||
|
||||
// Create a new FixedArray which the FeedbackVector will point to.
|
||||
Handle<FixedArray> fixed_array = factory->NewFixedArray(1);
|
||||
CHECK(heap->InNewSpace(*fixed_array));
|
||||
fv->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*fixed_array));
|
||||
|
||||
CcTest::CollectAllGarbage();
|
||||
|
||||
HeapObject* heap_object;
|
||||
CHECK(fv->optimized_code_weak_or_smi()->ToWeakHeapObject(&heap_object));
|
||||
CHECK_EQ(heap_object, *fixed_array);
|
||||
}
|
||||
|
||||
TEST(WeakReferencesOldToNewScavenged) {
|
||||
// Like WeakReferencesBasic, but the updated weak slot is in the old space,
|
||||
// and referring to an new space object, which is then scavenged.
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
HandleScope outer_scope(isolate);
|
||||
Handle<FeedbackVector> fv =
|
||||
CreateFeedbackVectorForTest(CcTest::isolate(), factory, TENURED);
|
||||
CHECK(heap->InOldSpace(*fv));
|
||||
|
||||
// Create a new FixedArray which the FeedbackVector will point to.
|
||||
Handle<FixedArray> fixed_array = factory->NewFixedArray(1);
|
||||
CHECK(heap->InNewSpace(*fixed_array));
|
||||
fv->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*fixed_array));
|
||||
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
|
||||
HeapObject* heap_object;
|
||||
CHECK(fv->optimized_code_weak_or_smi()->ToWeakHeapObject(&heap_object));
|
||||
CHECK_EQ(heap_object, *fixed_array);
|
||||
}
|
||||
|
||||
TEST(WeakReferencesOldToCleared) {
|
||||
// Like WeakReferencesBasic, but the updated weak slot is in the old space,
|
||||
// and is cleared.
|
||||
ManualGCScope manual_gc_scope;
|
||||
FLAG_manual_evacuation_candidates_selection = true;
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
HandleScope outer_scope(isolate);
|
||||
Handle<FeedbackVector> fv =
|
||||
CreateFeedbackVectorForTest(CcTest::isolate(), factory, TENURED);
|
||||
CHECK(heap->InOldSpace(*fv));
|
||||
fv->set_optimized_code_weak_or_smi(HeapObjectReference::ClearedValue());
|
||||
|
||||
CcTest::CollectAllGarbage();
|
||||
CHECK(fv->optimized_code_weak_or_smi()->IsClearedWeakHeapObject());
|
||||
}
|
||||
|
||||
TEST(ObjectMovesBeforeClearingWeakField) {
|
||||
if (!FLAG_incremental_marking) {
|
||||
return;
|
||||
}
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
HandleScope outer_scope(isolate);
|
||||
Handle<FeedbackVector> fv =
|
||||
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
|
||||
CHECK(heap->InNewSpace(*fv));
|
||||
FeedbackVector* fv_location = *fv;
|
||||
{
|
||||
HandleScope inner_scope(isolate);
|
||||
// Create a new FixedArray which the FeedbackVector will point to.
|
||||
Handle<FixedArray> fixed_array = factory->NewFixedArray(1);
|
||||
CHECK(heap->InNewSpace(*fixed_array));
|
||||
fv->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*fixed_array));
|
||||
// inner_scope will go out of scope, so when marking the next time,
|
||||
// *fixed_array will stay white.
|
||||
}
|
||||
|
||||
// Do marking steps; this will store *fv into the list for later processing
|
||||
// (since it points to a white object).
|
||||
SimulateIncrementalMarking(heap, true);
|
||||
|
||||
// Scavenger will move *fv.
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
FeedbackVector* new_fv_location = *fv;
|
||||
CHECK_NE(fv_location, new_fv_location);
|
||||
CHECK(fv->optimized_code_weak_or_smi()->IsWeakHeapObject());
|
||||
|
||||
// Now we try to clear *fv.
|
||||
CcTest::CollectAllGarbage();
|
||||
CHECK(fv->optimized_code_weak_or_smi()->IsClearedWeakHeapObject());
|
||||
}
|
||||
|
||||
TEST(ObjectWithWeakReferencePromoted) {
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
HandleScope outer_scope(isolate);
|
||||
Handle<FeedbackVector> fv =
|
||||
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
|
||||
CHECK(heap->InNewSpace(*fv));
|
||||
|
||||
// Create a new FixedArray which the FeedbackVector will point to.
|
||||
Handle<FixedArray> fixed_array = factory->NewFixedArray(1);
|
||||
CHECK(heap->InNewSpace(*fixed_array));
|
||||
fv->set_optimized_code_weak_or_smi(HeapObjectReference::Weak(*fixed_array));
|
||||
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
CHECK(heap->InOldSpace(*fv));
|
||||
CHECK(heap->InOldSpace(*fixed_array));
|
||||
|
||||
HeapObject* heap_object;
|
||||
CHECK(fv->optimized_code_weak_or_smi()->ToWeakHeapObject(&heap_object));
|
||||
CHECK_EQ(heap_object, *fixed_array);
|
||||
}
|
||||
|
||||
TEST(ObjectWithClearedWeakReferencePromoted) {
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
HandleScope outer_scope(isolate);
|
||||
Handle<FeedbackVector> fv =
|
||||
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
|
||||
CHECK(heap->InNewSpace(*fv));
|
||||
|
||||
fv->set_optimized_code_weak_or_smi(HeapObjectReference::ClearedValue());
|
||||
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
CHECK(heap->InNewSpace(*fv));
|
||||
CHECK(fv->optimized_code_weak_or_smi()->IsClearedWeakHeapObject());
|
||||
|
||||
CcTest::CollectGarbage(NEW_SPACE);
|
||||
CHECK(heap->InOldSpace(*fv));
|
||||
CHECK(fv->optimized_code_weak_or_smi()->IsClearedWeakHeapObject());
|
||||
|
||||
CcTest::CollectAllGarbage();
|
||||
CHECK(fv->optimized_code_weak_or_smi()->IsClearedWeakHeapObject());
|
||||
}
|
||||
|
||||
TEST(WeakReferenceWriteBarrier) {
|
||||
if (!FLAG_incremental_marking) {
|
||||
return;
|
||||
}
|
||||
|
||||
ManualGCScope manual_gc_scope;
|
||||
CcTest::InitializeVM();
|
||||
Isolate* isolate = CcTest::i_isolate();
|
||||
Factory* factory = isolate->factory();
|
||||
Heap* heap = isolate->heap();
|
||||
|
||||
HandleScope outer_scope(isolate);
|
||||
Handle<FeedbackVector> fv =
|
||||
CreateFeedbackVectorForTest(CcTest::isolate(), factory);
|
||||
CHECK(heap->InNewSpace(*fv));
|
||||
|
||||
{
|
||||
HandleScope inner_scope(isolate);
|
||||
|
||||
// Create a new FixedArray which the FeedbackVector will point to.
|
||||
Handle<FixedArray> fixed_array1 = factory->NewFixedArray(1);
|
||||
CHECK(heap->InNewSpace(*fixed_array1));
|
||||
fv->set_optimized_code_weak_or_smi(
|
||||
HeapObjectReference::Weak(*fixed_array1));
|
||||
|
||||
SimulateIncrementalMarking(heap, true);
|
||||
|
||||
Handle<FixedArray> fixed_array2 = factory->NewFixedArray(1);
|
||||
CHECK(heap->InNewSpace(*fixed_array2));
|
||||
// This write will trigger the write barrier.
|
||||
fv->set_optimized_code_weak_or_smi(
|
||||
HeapObjectReference::Weak(*fixed_array2));
|
||||
}
|
||||
|
||||
CcTest::CollectAllGarbage();
|
||||
|
||||
// Check that the write barrier treated the weak reference as strong.
|
||||
CHECK(fv->optimized_code_weak_or_smi()->IsWeakHeapObject());
|
||||
}
|
||||
|
||||
} // namespace heap
|
||||
} // namespace internal
|
||||
} // namespace v8
|
@ -35,6 +35,7 @@
|
||||
|
||||
#include "include/v8-profiler.h"
|
||||
#include "src/api.h"
|
||||
#include "src/assembler-inl.h"
|
||||
#include "src/base/hashmap.h"
|
||||
#include "src/collector.h"
|
||||
#include "src/debug/debug.h"
|
||||
@ -3475,3 +3476,41 @@ TEST(SamplingHeapProfilerSampleDuringDeopt) {
|
||||
CHECK(profile);
|
||||
heap_profiler->StopSamplingHeapProfiler();
|
||||
}
|
||||
|
||||
TEST(WeakReference) {
|
||||
v8::Isolate* isolate = CcTest::isolate();
|
||||
i::Isolate* i_isolate = CcTest::i_isolate();
|
||||
i::Factory* factory = i_isolate->factory();
|
||||
i::HandleScope scope(i_isolate);
|
||||
LocalContext env;
|
||||
|
||||
// Create a FeedbackVector.
|
||||
v8::Local<v8::Script> script =
|
||||
v8::Script::Compile(isolate->GetCurrentContext(),
|
||||
v8::String::NewFromUtf8(isolate, "function foo() {}",
|
||||
v8::NewStringType::kNormal)
|
||||
.ToLocalChecked())
|
||||
.ToLocalChecked();
|
||||
v8::MaybeLocal<v8::Value> value = script->Run(isolate->GetCurrentContext());
|
||||
CHECK(!value.IsEmpty());
|
||||
|
||||
i::Handle<i::Object> obj = v8::Utils::OpenHandle(*script);
|
||||
i::Handle<i::SharedFunctionInfo> shared_function =
|
||||
i::Handle<i::SharedFunctionInfo>(i::JSFunction::cast(*obj)->shared());
|
||||
i::Handle<i::FeedbackVector> fv = factory->NewFeedbackVector(shared_function);
|
||||
|
||||
// Create a Code.
|
||||
i::Assembler assm(i_isolate, nullptr, 0);
|
||||
assm.nop(); // supported on all architectures
|
||||
i::CodeDesc desc;
|
||||
assm.GetCode(i_isolate, &desc);
|
||||
i::Handle<i::Code> code =
|
||||
factory->NewCode(desc, i::Code::STUB, i::Handle<i::Code>());
|
||||
CHECK(code->IsCode());
|
||||
|
||||
fv->set_optimized_code_weak_or_smi(i::HeapObjectReference::Weak(*code));
|
||||
|
||||
v8::HeapProfiler* heap_profiler = isolate->GetHeapProfiler();
|
||||
const v8::HeapSnapshot* snapshot = heap_profiler->TakeHeapSnapshot();
|
||||
CHECK(ValidateSnapshot(snapshot));
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user