Reland "[heap] Clean-up keys of oldspace weakmaps during scavenge"
This is a reland of 6604f1826d
Original change's description:
> [heap] Clean-up keys of oldspace weakmaps during scavenge
>
> This CL adds handling for cleaning up weakmap (EphemeronHashTable)
> keys during scavenge, even if the weakmap resides in oldspace.
>
> Change-Id: If8d711c050ddbcae4dd6e8da549e0c0d08ba47b2
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1523787
> Commit-Queue: Sigurd Schneider <sigurds@chromium.org>
> Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
> Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#60432}
CQ_INCLUDE_TRYBOTS=luci.chrome.try:Mac Builder Perf
Change-Id: Ie640f2b0340637a5391fb17ba3c9e6422eaf306a
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1541476
Commit-Queue: Sigurd Schneider <sigurds@chromium.org>
Reviewed-by: Jaroslav Sevcik <jarin@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#60554}
This commit is contained in:
parent
173d234aae
commit
d4550f4ac1
@ -34,6 +34,18 @@ void RecordWriteDescriptor::InitializePlatformSpecific(
|
|||||||
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void EphemeronKeyBarrierDescriptor::InitializePlatformSpecific(
|
||||||
|
CallInterfaceDescriptorData* data) {
|
||||||
|
const Register default_stub_registers[] = {r0, r1, r2, r3, r4};
|
||||||
|
|
||||||
|
data->RestrictAllocatableRegisters(default_stub_registers,
|
||||||
|
arraysize(default_stub_registers));
|
||||||
|
|
||||||
|
CHECK_LE(static_cast<size_t>(kParameterCount),
|
||||||
|
arraysize(default_stub_registers));
|
||||||
|
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
||||||
|
}
|
||||||
|
|
||||||
const Register FastNewFunctionContextDescriptor::ScopeInfoRegister() {
|
const Register FastNewFunctionContextDescriptor::ScopeInfoRegister() {
|
||||||
return r1;
|
return r1;
|
||||||
}
|
}
|
||||||
|
@ -675,6 +675,27 @@ void TurboAssembler::RestoreRegisters(RegList registers) {
|
|||||||
ldm(ia_w, sp, regs);
|
ldm(ia_w, sp, regs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void TurboAssembler::CallEphemeronKeyBarrier(Register object, Register address,
|
||||||
|
SaveFPRegsMode fp_mode) {
|
||||||
|
EphemeronKeyBarrierDescriptor descriptor;
|
||||||
|
RegList registers = descriptor.allocatable_registers();
|
||||||
|
|
||||||
|
SaveRegisters(registers);
|
||||||
|
|
||||||
|
Register object_parameter(
|
||||||
|
descriptor.GetRegisterParameter(EphemeronKeyBarrierDescriptor::kObject));
|
||||||
|
Register slot_parameter(descriptor.GetRegisterParameter(
|
||||||
|
EphemeronKeyBarrierDescriptor::kSlotAddress));
|
||||||
|
Register fp_mode_parameter(
|
||||||
|
descriptor.GetRegisterParameter(EphemeronKeyBarrierDescriptor::kFPMode));
|
||||||
|
|
||||||
|
MovePair(object_parameter, object, slot_parameter, address);
|
||||||
|
Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
|
||||||
|
Call(isolate()->builtins()->builtin_handle(Builtins::kEphemeronKeyBarrier),
|
||||||
|
RelocInfo::CODE_TARGET);
|
||||||
|
RestoreRegisters(registers);
|
||||||
|
}
|
||||||
|
|
||||||
void TurboAssembler::CallRecordWriteStub(
|
void TurboAssembler::CallRecordWriteStub(
|
||||||
Register object, Register address,
|
Register object, Register address,
|
||||||
RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
|
RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
|
||||||
|
@ -353,6 +353,8 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
|||||||
void CallRecordWriteStub(Register object, Register address,
|
void CallRecordWriteStub(Register object, Register address,
|
||||||
RememberedSetAction remembered_set_action,
|
RememberedSetAction remembered_set_action,
|
||||||
SaveFPRegsMode fp_mode, Address wasm_target);
|
SaveFPRegsMode fp_mode, Address wasm_target);
|
||||||
|
void CallEphemeronKeyBarrier(Register object, Register address,
|
||||||
|
SaveFPRegsMode fp_mode);
|
||||||
|
|
||||||
// Does a runtime check for 16/32 FP registers. Either way, pushes 32 double
|
// Does a runtime check for 16/32 FP registers. Either way, pushes 32 double
|
||||||
// values to location, saving [d0..(d15|d31)].
|
// values to location, saving [d0..(d15|d31)].
|
||||||
|
@ -34,6 +34,18 @@ void RecordWriteDescriptor::InitializePlatformSpecific(
|
|||||||
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void EphemeronKeyBarrierDescriptor::InitializePlatformSpecific(
|
||||||
|
CallInterfaceDescriptorData* data) {
|
||||||
|
const Register default_stub_registers[] = {x0, x1, x2, x3, x4};
|
||||||
|
|
||||||
|
data->RestrictAllocatableRegisters(default_stub_registers,
|
||||||
|
arraysize(default_stub_registers));
|
||||||
|
|
||||||
|
CHECK_LE(static_cast<size_t>(kParameterCount),
|
||||||
|
arraysize(default_stub_registers));
|
||||||
|
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
||||||
|
}
|
||||||
|
|
||||||
const Register FastNewFunctionContextDescriptor::ScopeInfoRegister() {
|
const Register FastNewFunctionContextDescriptor::ScopeInfoRegister() {
|
||||||
return x1;
|
return x1;
|
||||||
}
|
}
|
||||||
|
@ -3033,6 +3033,28 @@ void TurboAssembler::RestoreRegisters(RegList registers) {
|
|||||||
PopCPURegList(regs);
|
PopCPURegList(regs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void TurboAssembler::CallEphemeronKeyBarrier(Register object, Register address,
|
||||||
|
SaveFPRegsMode fp_mode) {
|
||||||
|
EphemeronKeyBarrierDescriptor descriptor;
|
||||||
|
RegList registers = descriptor.allocatable_registers();
|
||||||
|
|
||||||
|
SaveRegisters(registers);
|
||||||
|
|
||||||
|
Register object_parameter(
|
||||||
|
descriptor.GetRegisterParameter(EphemeronKeyBarrierDescriptor::kObject));
|
||||||
|
Register slot_parameter(descriptor.GetRegisterParameter(
|
||||||
|
EphemeronKeyBarrierDescriptor::kSlotAddress));
|
||||||
|
Register fp_mode_parameter(
|
||||||
|
descriptor.GetRegisterParameter(EphemeronKeyBarrierDescriptor::kFPMode));
|
||||||
|
|
||||||
|
MovePair(object_parameter, object, slot_parameter, address);
|
||||||
|
|
||||||
|
Mov(fp_mode_parameter, Smi::FromEnum(fp_mode));
|
||||||
|
Call(isolate()->builtins()->builtin_handle(Builtins::kEphemeronKeyBarrier),
|
||||||
|
RelocInfo::CODE_TARGET);
|
||||||
|
RestoreRegisters(registers);
|
||||||
|
}
|
||||||
|
|
||||||
void TurboAssembler::CallRecordWriteStub(
|
void TurboAssembler::CallRecordWriteStub(
|
||||||
Register object, Register address,
|
Register object, Register address,
|
||||||
RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
|
RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
|
||||||
|
@ -746,6 +746,8 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
|||||||
void CallRecordWriteStub(Register object, Register address,
|
void CallRecordWriteStub(Register object, Register address,
|
||||||
RememberedSetAction remembered_set_action,
|
RememberedSetAction remembered_set_action,
|
||||||
SaveFPRegsMode fp_mode, Address wasm_target);
|
SaveFPRegsMode fp_mode, Address wasm_target);
|
||||||
|
void CallEphemeronKeyBarrier(Register object, Register address,
|
||||||
|
SaveFPRegsMode fp_mode);
|
||||||
|
|
||||||
// Alternative forms of Push and Pop, taking a RegList or CPURegList that
|
// Alternative forms of Push and Pop, taking a RegList or CPURegList that
|
||||||
// specifies the registers that are to be pushed or popped. Higher-numbered
|
// specifies the registers that are to be pushed or popped. Higher-numbered
|
||||||
|
@ -2342,7 +2342,8 @@ void WeakCollectionsBuiltinsAssembler::AddEntry(
|
|||||||
TNode<Object> key, TNode<Object> value, TNode<IntPtrT> number_of_elements) {
|
TNode<Object> key, TNode<Object> value, TNode<IntPtrT> number_of_elements) {
|
||||||
// See EphemeronHashTable::AddEntry().
|
// See EphemeronHashTable::AddEntry().
|
||||||
TNode<IntPtrT> value_index = ValueIndexFromKeyIndex(key_index);
|
TNode<IntPtrT> value_index = ValueIndexFromKeyIndex(key_index);
|
||||||
UnsafeStoreFixedArrayElement(table, key_index, key);
|
UnsafeStoreFixedArrayElement(table, key_index, key,
|
||||||
|
UPDATE_EPHEMERON_KEY_WRITE_BARRIER);
|
||||||
UnsafeStoreFixedArrayElement(table, value_index, value);
|
UnsafeStoreFixedArrayElement(table, value_index, value);
|
||||||
|
|
||||||
// See HashTableBase::ElementAdded().
|
// See HashTableBase::ElementAdded().
|
||||||
|
@ -36,6 +36,7 @@ namespace internal {
|
|||||||
#define BUILTIN_LIST_BASE(CPP, API, TFJ, TFC, TFS, TFH, ASM) \
|
#define BUILTIN_LIST_BASE(CPP, API, TFJ, TFC, TFS, TFH, ASM) \
|
||||||
/* GC write barrirer */ \
|
/* GC write barrirer */ \
|
||||||
TFC(RecordWrite, RecordWrite) \
|
TFC(RecordWrite, RecordWrite) \
|
||||||
|
TFC(EphemeronKeyBarrier, EphemeronKeyBarrier) \
|
||||||
\
|
\
|
||||||
/* Adaptors for CPP/API builtin */ \
|
/* Adaptors for CPP/API builtin */ \
|
||||||
TFC(AdaptorWithExitFrame, CppBuiltinAdaptor) \
|
TFC(AdaptorWithExitFrame, CppBuiltinAdaptor) \
|
||||||
|
@ -449,6 +449,25 @@ TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
|
|||||||
Return(TrueConstant());
|
Return(TrueConstant());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TF_BUILTIN(EphemeronKeyBarrier, RecordWriteCodeStubAssembler) {
|
||||||
|
Label exit(this);
|
||||||
|
|
||||||
|
Node* function = ExternalConstant(
|
||||||
|
ExternalReference::ephemeron_key_write_barrier_function());
|
||||||
|
Node* isolate_constant =
|
||||||
|
ExternalConstant(ExternalReference::isolate_address(isolate()));
|
||||||
|
Node* address = Parameter(Descriptor::kSlotAddress);
|
||||||
|
Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
|
||||||
|
Node* fp_mode = Parameter(Descriptor::kFPMode);
|
||||||
|
CallCFunction3WithCallerSavedRegistersMode(
|
||||||
|
MachineType::Int32(), MachineType::Pointer(), MachineType::Pointer(),
|
||||||
|
MachineType::Pointer(), function, object, address, isolate_constant,
|
||||||
|
fp_mode, &exit);
|
||||||
|
|
||||||
|
BIND(&exit);
|
||||||
|
Return(TrueConstant());
|
||||||
|
}
|
||||||
|
|
||||||
class DeletePropertyBaseAssembler : public AccessorAssembler {
|
class DeletePropertyBaseAssembler : public AccessorAssembler {
|
||||||
public:
|
public:
|
||||||
explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
|
explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
|
||||||
|
@ -2781,7 +2781,8 @@ void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
|
|||||||
this, Word32Or(IsFixedArraySubclass(object), IsPropertyArray(object)));
|
this, Word32Or(IsFixedArraySubclass(object), IsPropertyArray(object)));
|
||||||
CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
|
CSA_SLOW_ASSERT(this, MatchesParameterMode(index_node, parameter_mode));
|
||||||
DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
|
DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
|
||||||
barrier_mode == UPDATE_WRITE_BARRIER);
|
barrier_mode == UPDATE_WRITE_BARRIER ||
|
||||||
|
barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER);
|
||||||
DCHECK(IsAligned(additional_offset, kTaggedSize));
|
DCHECK(IsAligned(additional_offset, kTaggedSize));
|
||||||
STATIC_ASSERT(static_cast<int>(FixedArray::kHeaderSize) ==
|
STATIC_ASSERT(static_cast<int>(FixedArray::kHeaderSize) ==
|
||||||
static_cast<int>(PropertyArray::kHeaderSize));
|
static_cast<int>(PropertyArray::kHeaderSize));
|
||||||
@ -2814,6 +2815,8 @@ void CodeStubAssembler::StoreFixedArrayOrPropertyArrayElement(
|
|||||||
FixedArray::kHeaderSize));
|
FixedArray::kHeaderSize));
|
||||||
if (barrier_mode == SKIP_WRITE_BARRIER) {
|
if (barrier_mode == SKIP_WRITE_BARRIER) {
|
||||||
StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
|
StoreNoWriteBarrier(MachineRepresentation::kTagged, object, offset, value);
|
||||||
|
} else if (barrier_mode == UPDATE_EPHEMERON_KEY_WRITE_BARRIER) {
|
||||||
|
StoreEphemeronKey(object, offset, value);
|
||||||
} else {
|
} else {
|
||||||
Store(object, offset, value);
|
Store(object, offset, value);
|
||||||
}
|
}
|
||||||
|
@ -225,7 +225,9 @@ class OutOfLineRecordWrite final : public OutOfLineCode {
|
|||||||
__ Push(lr);
|
__ Push(lr);
|
||||||
unwinding_info_writer_->MarkLinkRegisterOnTopOfStack(__ pc_offset());
|
unwinding_info_writer_->MarkLinkRegisterOnTopOfStack(__ pc_offset());
|
||||||
}
|
}
|
||||||
if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
|
if (mode_ == RecordWriteMode::kValueIsEphemeronKey) {
|
||||||
|
__ CallEphemeronKeyBarrier(object_, scratch1_, save_fp_mode);
|
||||||
|
} else if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
|
||||||
__ CallRecordWriteStub(object_, scratch1_, remembered_set_action,
|
__ CallRecordWriteStub(object_, scratch1_, remembered_set_action,
|
||||||
save_fp_mode, wasm::WasmCode::kWasmRecordWrite);
|
save_fp_mode, wasm::WasmCode::kWasmRecordWrite);
|
||||||
} else {
|
} else {
|
||||||
|
@ -294,7 +294,9 @@ class OutOfLineRecordWrite final : public OutOfLineCode {
|
|||||||
__ Push(lr, padreg);
|
__ Push(lr, padreg);
|
||||||
unwinding_info_writer_->MarkLinkRegisterOnTopOfStack(__ pc_offset(), sp);
|
unwinding_info_writer_->MarkLinkRegisterOnTopOfStack(__ pc_offset(), sp);
|
||||||
}
|
}
|
||||||
if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
|
if (mode_ == RecordWriteMode::kValueIsEphemeronKey) {
|
||||||
|
__ CallEphemeronKeyBarrier(object_, scratch1_, save_fp_mode);
|
||||||
|
} else if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
|
||||||
// A direct call to a wasm runtime stub defined in this module.
|
// A direct call to a wasm runtime stub defined in this module.
|
||||||
// Just encode the stub index. This will be patched when the code
|
// Just encode the stub index. This will be patched when the code
|
||||||
// is added to the native module and copied into wasm code space.
|
// is added to the native module and copied into wasm code space.
|
||||||
|
@ -297,7 +297,9 @@ class OutOfLineRecordWrite final : public OutOfLineCode {
|
|||||||
: OMIT_REMEMBERED_SET;
|
: OMIT_REMEMBERED_SET;
|
||||||
SaveFPRegsMode const save_fp_mode =
|
SaveFPRegsMode const save_fp_mode =
|
||||||
frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
|
frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
|
||||||
if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
|
if (mode_ == RecordWriteMode::kValueIsEphemeronKey) {
|
||||||
|
__ CallEphemeronKeyBarrier(object_, scratch1_, save_fp_mode);
|
||||||
|
} else if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
|
||||||
// A direct call to a wasm runtime stub defined in this module.
|
// A direct call to a wasm runtime stub defined in this module.
|
||||||
// Just encode the stub index. This will be patched when the code
|
// Just encode the stub index. This will be patched when the code
|
||||||
// is added to the native module and copied into wasm code space.
|
// is added to the native module and copied into wasm code space.
|
||||||
|
@ -35,7 +35,12 @@ namespace internal {
|
|||||||
namespace compiler {
|
namespace compiler {
|
||||||
|
|
||||||
// Modes for ArchStoreWithWriteBarrier below.
|
// Modes for ArchStoreWithWriteBarrier below.
|
||||||
enum class RecordWriteMode { kValueIsMap, kValueIsPointer, kValueIsAny };
|
enum class RecordWriteMode {
|
||||||
|
kValueIsMap,
|
||||||
|
kValueIsPointer,
|
||||||
|
kValueIsEphemeronKey,
|
||||||
|
kValueIsAny,
|
||||||
|
};
|
||||||
|
|
||||||
inline RecordWriteMode WriteBarrierKindToRecordWriteMode(
|
inline RecordWriteMode WriteBarrierKindToRecordWriteMode(
|
||||||
WriteBarrierKind write_barrier_kind) {
|
WriteBarrierKind write_barrier_kind) {
|
||||||
@ -44,6 +49,8 @@ inline RecordWriteMode WriteBarrierKindToRecordWriteMode(
|
|||||||
return RecordWriteMode::kValueIsMap;
|
return RecordWriteMode::kValueIsMap;
|
||||||
case kPointerWriteBarrier:
|
case kPointerWriteBarrier:
|
||||||
return RecordWriteMode::kValueIsPointer;
|
return RecordWriteMode::kValueIsPointer;
|
||||||
|
case kEphemeronKeyWriteBarrier:
|
||||||
|
return RecordWriteMode::kValueIsEphemeronKey;
|
||||||
case kFullWriteBarrier:
|
case kFullWriteBarrier:
|
||||||
return RecordWriteMode::kValueIsAny;
|
return RecordWriteMode::kValueIsAny;
|
||||||
case kNoWriteBarrier:
|
case kNoWriteBarrier:
|
||||||
|
@ -258,7 +258,9 @@ class OutOfLineRecordWrite final : public OutOfLineCode {
|
|||||||
SaveFPRegsMode const save_fp_mode =
|
SaveFPRegsMode const save_fp_mode =
|
||||||
frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
|
frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
|
||||||
|
|
||||||
if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
|
if (mode_ == RecordWriteMode::kValueIsEphemeronKey) {
|
||||||
|
__ CallEphemeronKeyBarrier(object_, scratch1_, save_fp_mode);
|
||||||
|
} else if (stub_mode_ == StubCallMode::kCallWasmRuntimeStub) {
|
||||||
// A direct call to a wasm runtime stub defined in this module.
|
// A direct call to a wasm runtime stub defined in this module.
|
||||||
// Just encode the stub index. This will be patched when the code
|
// Just encode the stub index. This will be patched when the code
|
||||||
// is added to the native module and copied into wasm code space.
|
// is added to the native module and copied into wasm code space.
|
||||||
|
@ -1009,6 +1009,11 @@ Node* CodeAssembler::Store(Node* base, Node* offset, Node* value) {
|
|||||||
value, kFullWriteBarrier);
|
value, kFullWriteBarrier);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Node* CodeAssembler::StoreEphemeronKey(Node* base, Node* offset, Node* value) {
|
||||||
|
return raw_assembler()->Store(MachineRepresentation::kTagged, base, offset,
|
||||||
|
value, kEphemeronKeyWriteBarrier);
|
||||||
|
}
|
||||||
|
|
||||||
Node* CodeAssembler::StoreNoWriteBarrier(MachineRepresentation rep, Node* base,
|
Node* CodeAssembler::StoreNoWriteBarrier(MachineRepresentation rep, Node* base,
|
||||||
Node* value) {
|
Node* value) {
|
||||||
return raw_assembler()->Store(rep, base, value, kNoWriteBarrier);
|
return raw_assembler()->Store(rep, base, value, kNoWriteBarrier);
|
||||||
|
@ -943,6 +943,7 @@ class V8_EXPORT_PRIVATE CodeAssembler {
|
|||||||
// Store value to raw memory location.
|
// Store value to raw memory location.
|
||||||
Node* Store(Node* base, Node* value);
|
Node* Store(Node* base, Node* value);
|
||||||
Node* Store(Node* base, Node* offset, Node* value);
|
Node* Store(Node* base, Node* offset, Node* value);
|
||||||
|
Node* StoreEphemeronKey(Node* base, Node* offset, Node* value);
|
||||||
Node* StoreNoWriteBarrier(MachineRepresentation rep, Node* base, Node* value);
|
Node* StoreNoWriteBarrier(MachineRepresentation rep, Node* base, Node* value);
|
||||||
Node* StoreNoWriteBarrier(MachineRepresentation rep, Node* base, Node* offset,
|
Node* StoreNoWriteBarrier(MachineRepresentation rep, Node* base, Node* offset,
|
||||||
Node* value);
|
Node* value);
|
||||||
|
@ -535,61 +535,68 @@ struct MachineOperatorGlobalCache {
|
|||||||
STACK_SLOT_CACHED_SIZES_ALIGNMENTS_LIST(STACKSLOT)
|
STACK_SLOT_CACHED_SIZES_ALIGNMENTS_LIST(STACKSLOT)
|
||||||
#undef STACKSLOT
|
#undef STACKSLOT
|
||||||
|
|
||||||
#define STORE(Type) \
|
#define STORE(Type) \
|
||||||
struct Store##Type##Operator : public Operator1<StoreRepresentation> { \
|
struct Store##Type##Operator : public Operator1<StoreRepresentation> { \
|
||||||
explicit Store##Type##Operator(WriteBarrierKind write_barrier_kind) \
|
explicit Store##Type##Operator(WriteBarrierKind write_barrier_kind) \
|
||||||
: Operator1<StoreRepresentation>( \
|
: Operator1<StoreRepresentation>( \
|
||||||
IrOpcode::kStore, \
|
IrOpcode::kStore, \
|
||||||
Operator::kNoDeopt | Operator::kNoRead | Operator::kNoThrow, \
|
Operator::kNoDeopt | Operator::kNoRead | Operator::kNoThrow, \
|
||||||
"Store", 3, 1, 1, 0, 1, 0, \
|
"Store", 3, 1, 1, 0, 1, 0, \
|
||||||
StoreRepresentation(MachineRepresentation::Type, \
|
StoreRepresentation(MachineRepresentation::Type, \
|
||||||
write_barrier_kind)) {} \
|
write_barrier_kind)) {} \
|
||||||
}; \
|
}; \
|
||||||
struct Store##Type##NoWriteBarrier##Operator final \
|
struct Store##Type##NoWriteBarrier##Operator final \
|
||||||
: public Store##Type##Operator { \
|
: public Store##Type##Operator { \
|
||||||
Store##Type##NoWriteBarrier##Operator() \
|
Store##Type##NoWriteBarrier##Operator() \
|
||||||
: Store##Type##Operator(kNoWriteBarrier) {} \
|
: Store##Type##Operator(kNoWriteBarrier) {} \
|
||||||
}; \
|
}; \
|
||||||
struct Store##Type##MapWriteBarrier##Operator final \
|
struct Store##Type##MapWriteBarrier##Operator final \
|
||||||
: public Store##Type##Operator { \
|
: public Store##Type##Operator { \
|
||||||
Store##Type##MapWriteBarrier##Operator() \
|
Store##Type##MapWriteBarrier##Operator() \
|
||||||
: Store##Type##Operator(kMapWriteBarrier) {} \
|
: Store##Type##Operator(kMapWriteBarrier) {} \
|
||||||
}; \
|
}; \
|
||||||
struct Store##Type##PointerWriteBarrier##Operator final \
|
struct Store##Type##PointerWriteBarrier##Operator final \
|
||||||
: public Store##Type##Operator { \
|
: public Store##Type##Operator { \
|
||||||
Store##Type##PointerWriteBarrier##Operator() \
|
Store##Type##PointerWriteBarrier##Operator() \
|
||||||
: Store##Type##Operator(kPointerWriteBarrier) {} \
|
: Store##Type##Operator(kPointerWriteBarrier) {} \
|
||||||
}; \
|
}; \
|
||||||
struct Store##Type##FullWriteBarrier##Operator final \
|
struct Store##Type##EphemeronKeyWriteBarrier##Operator final \
|
||||||
: public Store##Type##Operator { \
|
: public Store##Type##Operator { \
|
||||||
Store##Type##FullWriteBarrier##Operator() \
|
Store##Type##EphemeronKeyWriteBarrier##Operator() \
|
||||||
: Store##Type##Operator(kFullWriteBarrier) {} \
|
: Store##Type##Operator(kEphemeronKeyWriteBarrier) {} \
|
||||||
}; \
|
}; \
|
||||||
struct UnalignedStore##Type##Operator final \
|
struct Store##Type##FullWriteBarrier##Operator final \
|
||||||
: public Operator1<UnalignedStoreRepresentation> { \
|
: public Store##Type##Operator { \
|
||||||
UnalignedStore##Type##Operator() \
|
Store##Type##FullWriteBarrier##Operator() \
|
||||||
: Operator1<UnalignedStoreRepresentation>( \
|
: Store##Type##Operator(kFullWriteBarrier) {} \
|
||||||
IrOpcode::kUnalignedStore, \
|
}; \
|
||||||
Operator::kNoDeopt | Operator::kNoRead | Operator::kNoThrow, \
|
struct UnalignedStore##Type##Operator final \
|
||||||
"UnalignedStore", 3, 1, 1, 0, 1, 0, \
|
: public Operator1<UnalignedStoreRepresentation> { \
|
||||||
MachineRepresentation::Type) {} \
|
UnalignedStore##Type##Operator() \
|
||||||
}; \
|
: Operator1<UnalignedStoreRepresentation>( \
|
||||||
struct ProtectedStore##Type##Operator \
|
IrOpcode::kUnalignedStore, \
|
||||||
: public Operator1<StoreRepresentation> { \
|
Operator::kNoDeopt | Operator::kNoRead | Operator::kNoThrow, \
|
||||||
explicit ProtectedStore##Type##Operator() \
|
"UnalignedStore", 3, 1, 1, 0, 1, 0, \
|
||||||
: Operator1<StoreRepresentation>( \
|
MachineRepresentation::Type) {} \
|
||||||
IrOpcode::kProtectedStore, \
|
}; \
|
||||||
Operator::kNoDeopt | Operator::kNoRead | Operator::kNoThrow, \
|
struct ProtectedStore##Type##Operator \
|
||||||
"Store", 3, 1, 1, 0, 1, 0, \
|
: public Operator1<StoreRepresentation> { \
|
||||||
StoreRepresentation(MachineRepresentation::Type, \
|
explicit ProtectedStore##Type##Operator() \
|
||||||
kNoWriteBarrier)) {} \
|
: Operator1<StoreRepresentation>( \
|
||||||
}; \
|
IrOpcode::kProtectedStore, \
|
||||||
Store##Type##NoWriteBarrier##Operator kStore##Type##NoWriteBarrier; \
|
Operator::kNoDeopt | Operator::kNoRead | Operator::kNoThrow, \
|
||||||
Store##Type##MapWriteBarrier##Operator kStore##Type##MapWriteBarrier; \
|
"Store", 3, 1, 1, 0, 1, 0, \
|
||||||
Store##Type##PointerWriteBarrier##Operator \
|
StoreRepresentation(MachineRepresentation::Type, \
|
||||||
kStore##Type##PointerWriteBarrier; \
|
kNoWriteBarrier)) {} \
|
||||||
Store##Type##FullWriteBarrier##Operator kStore##Type##FullWriteBarrier; \
|
}; \
|
||||||
UnalignedStore##Type##Operator kUnalignedStore##Type; \
|
Store##Type##NoWriteBarrier##Operator kStore##Type##NoWriteBarrier; \
|
||||||
|
Store##Type##MapWriteBarrier##Operator kStore##Type##MapWriteBarrier; \
|
||||||
|
Store##Type##PointerWriteBarrier##Operator \
|
||||||
|
kStore##Type##PointerWriteBarrier; \
|
||||||
|
Store##Type##EphemeronKeyWriteBarrier##Operator \
|
||||||
|
kStore##Type##EphemeronKeyWriteBarrier; \
|
||||||
|
Store##Type##FullWriteBarrier##Operator kStore##Type##FullWriteBarrier; \
|
||||||
|
UnalignedStore##Type##Operator kUnalignedStore##Type; \
|
||||||
ProtectedStore##Type##Operator kProtectedStore##Type;
|
ProtectedStore##Type##Operator kProtectedStore##Type;
|
||||||
MACHINE_REPRESENTATION_LIST(STORE)
|
MACHINE_REPRESENTATION_LIST(STORE)
|
||||||
#undef STORE
|
#undef STORE
|
||||||
@ -933,18 +940,20 @@ const Operator* MachineOperatorBuilder::StackSlot(MachineRepresentation rep,
|
|||||||
|
|
||||||
const Operator* MachineOperatorBuilder::Store(StoreRepresentation store_rep) {
|
const Operator* MachineOperatorBuilder::Store(StoreRepresentation store_rep) {
|
||||||
switch (store_rep.representation()) {
|
switch (store_rep.representation()) {
|
||||||
#define STORE(kRep) \
|
#define STORE(kRep) \
|
||||||
case MachineRepresentation::kRep: \
|
case MachineRepresentation::kRep: \
|
||||||
switch (store_rep.write_barrier_kind()) { \
|
switch (store_rep.write_barrier_kind()) { \
|
||||||
case kNoWriteBarrier: \
|
case kNoWriteBarrier: \
|
||||||
return &cache_.k##Store##kRep##NoWriteBarrier; \
|
return &cache_.k##Store##kRep##NoWriteBarrier; \
|
||||||
case kMapWriteBarrier: \
|
case kMapWriteBarrier: \
|
||||||
return &cache_.k##Store##kRep##MapWriteBarrier; \
|
return &cache_.k##Store##kRep##MapWriteBarrier; \
|
||||||
case kPointerWriteBarrier: \
|
case kPointerWriteBarrier: \
|
||||||
return &cache_.k##Store##kRep##PointerWriteBarrier; \
|
return &cache_.k##Store##kRep##PointerWriteBarrier; \
|
||||||
case kFullWriteBarrier: \
|
case kEphemeronKeyWriteBarrier: \
|
||||||
return &cache_.k##Store##kRep##FullWriteBarrier; \
|
return &cache_.k##Store##kRep##EphemeronKeyWriteBarrier; \
|
||||||
} \
|
case kFullWriteBarrier: \
|
||||||
|
return &cache_.k##Store##kRep##FullWriteBarrier; \
|
||||||
|
} \
|
||||||
break;
|
break;
|
||||||
MACHINE_REPRESENTATION_LIST(STORE)
|
MACHINE_REPRESENTATION_LIST(STORE)
|
||||||
#undef STORE
|
#undef STORE
|
||||||
|
@ -244,6 +244,9 @@ ExternalReference ExternalReference::store_buffer_overflow_function() {
|
|||||||
FUNCTION_REFERENCE(delete_handle_scope_extensions,
|
FUNCTION_REFERENCE(delete_handle_scope_extensions,
|
||||||
HandleScope::DeleteExtensions)
|
HandleScope::DeleteExtensions)
|
||||||
|
|
||||||
|
FUNCTION_REFERENCE(ephemeron_key_write_barrier_function,
|
||||||
|
Heap::EphemeronKeyWriteBarrierFromCode)
|
||||||
|
|
||||||
FUNCTION_REFERENCE(get_date_field_function, JSDate::GetField)
|
FUNCTION_REFERENCE(get_date_field_function, JSDate::GetField)
|
||||||
|
|
||||||
ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
|
ExternalReference ExternalReference::date_cache_stamp(Isolate* isolate) {
|
||||||
|
@ -109,6 +109,8 @@ class StatsCounter;
|
|||||||
"copy_typed_array_elements_to_typed_array") \
|
"copy_typed_array_elements_to_typed_array") \
|
||||||
V(cpu_features, "cpu_features") \
|
V(cpu_features, "cpu_features") \
|
||||||
V(delete_handle_scope_extensions, "HandleScope::DeleteExtensions") \
|
V(delete_handle_scope_extensions, "HandleScope::DeleteExtensions") \
|
||||||
|
V(ephemeron_key_write_barrier_function, \
|
||||||
|
"Heap::EphemeronKeyWriteBarrierFromCode") \
|
||||||
V(f64_acos_wrapper_function, "f64_acos_wrapper") \
|
V(f64_acos_wrapper_function, "f64_acos_wrapper") \
|
||||||
V(f64_asin_wrapper_function, "f64_asin_wrapper") \
|
V(f64_asin_wrapper_function, "f64_asin_wrapper") \
|
||||||
V(f64_mod_wrapper_function, "f64_mod_wrapper") \
|
V(f64_mod_wrapper_function, "f64_mod_wrapper") \
|
||||||
|
@ -731,6 +731,7 @@ enum WriteBarrierKind : uint8_t {
|
|||||||
kNoWriteBarrier,
|
kNoWriteBarrier,
|
||||||
kMapWriteBarrier,
|
kMapWriteBarrier,
|
||||||
kPointerWriteBarrier,
|
kPointerWriteBarrier,
|
||||||
|
kEphemeronKeyWriteBarrier,
|
||||||
kFullWriteBarrier
|
kFullWriteBarrier
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -746,6 +747,8 @@ inline std::ostream& operator<<(std::ostream& os, WriteBarrierKind kind) {
|
|||||||
return os << "MapWriteBarrier";
|
return os << "MapWriteBarrier";
|
||||||
case kPointerWriteBarrier:
|
case kPointerWriteBarrier:
|
||||||
return os << "PointerWriteBarrier";
|
return os << "PointerWriteBarrier";
|
||||||
|
case kEphemeronKeyWriteBarrier:
|
||||||
|
return os << "EphemeronKeyWriteBarrier";
|
||||||
case kFullWriteBarrier:
|
case kFullWriteBarrier:
|
||||||
return os << "FullWriteBarrier";
|
return os << "FullWriteBarrier";
|
||||||
}
|
}
|
||||||
|
@ -115,6 +115,23 @@ inline void GenerationalBarrierInternal(HeapObject object, Address slot,
|
|||||||
Heap_GenerationalBarrierSlow(object, slot, value);
|
Heap_GenerationalBarrierSlow(object, slot, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline void GenerationalEphemeronKeyBarrierInternal(EphemeronHashTable table,
|
||||||
|
Address slot,
|
||||||
|
HeapObject value) {
|
||||||
|
DCHECK(Heap::PageFlagsAreConsistent(table));
|
||||||
|
heap_internals::MemoryChunk* value_chunk =
|
||||||
|
heap_internals::MemoryChunk::FromHeapObject(value);
|
||||||
|
heap_internals::MemoryChunk* table_chunk =
|
||||||
|
heap_internals::MemoryChunk::FromHeapObject(table);
|
||||||
|
|
||||||
|
if (!value_chunk->InYoungGeneration() || table_chunk->InYoungGeneration()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
Heap* heap = GetHeapFromWritableObject(table);
|
||||||
|
heap->RecordEphemeronKeyWrite(table, slot);
|
||||||
|
}
|
||||||
|
|
||||||
inline void MarkingBarrierInternal(HeapObject object, Address slot,
|
inline void MarkingBarrierInternal(HeapObject object, Address slot,
|
||||||
HeapObject value) {
|
HeapObject value) {
|
||||||
DCHECK(Heap_PageFlagsAreConsistent(object));
|
DCHECK(Heap_PageFlagsAreConsistent(object));
|
||||||
@ -149,6 +166,15 @@ inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
|
|||||||
HeapObject::cast(value));
|
HeapObject::cast(value));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline void GenerationalEphemeronKeyBarrier(EphemeronHashTable table,
|
||||||
|
ObjectSlot slot, Object value) {
|
||||||
|
DCHECK(!HasWeakHeapObjectTag(*slot));
|
||||||
|
DCHECK(!HasWeakHeapObjectTag(value));
|
||||||
|
DCHECK(value->IsHeapObject());
|
||||||
|
heap_internals::GenerationalEphemeronKeyBarrierInternal(
|
||||||
|
table, slot.address(), HeapObject::cast(value));
|
||||||
|
}
|
||||||
|
|
||||||
inline void GenerationalBarrier(HeapObject object, MaybeObjectSlot slot,
|
inline void GenerationalBarrier(HeapObject object, MaybeObjectSlot slot,
|
||||||
MaybeObject value) {
|
MaybeObject value) {
|
||||||
HeapObject value_heap_object;
|
HeapObject value_heap_object;
|
||||||
|
@ -18,6 +18,7 @@ class HeapObject;
|
|||||||
class MaybeObject;
|
class MaybeObject;
|
||||||
class Object;
|
class Object;
|
||||||
class RelocInfo;
|
class RelocInfo;
|
||||||
|
class EphemeronHashTable;
|
||||||
|
|
||||||
// Note: In general it is preferred to use the macros defined in
|
// Note: In general it is preferred to use the macros defined in
|
||||||
// object-macros.h.
|
// object-macros.h.
|
||||||
@ -37,6 +38,8 @@ void WriteBarrierForCode(Code host);
|
|||||||
void GenerationalBarrier(HeapObject object, ObjectSlot slot, Object value);
|
void GenerationalBarrier(HeapObject object, ObjectSlot slot, Object value);
|
||||||
void GenerationalBarrier(HeapObject object, MaybeObjectSlot slot,
|
void GenerationalBarrier(HeapObject object, MaybeObjectSlot slot,
|
||||||
MaybeObject value);
|
MaybeObject value);
|
||||||
|
void GenerationalEphemeronKeyBarrier(EphemeronHashTable table, ObjectSlot slot,
|
||||||
|
Object value);
|
||||||
void GenerationalBarrierForElements(Heap* heap, FixedArray array, int offset,
|
void GenerationalBarrierForElements(Heap* heap, FixedArray array, int offset,
|
||||||
int length);
|
int length);
|
||||||
void GenerationalBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object);
|
void GenerationalBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object);
|
||||||
|
@ -3755,6 +3755,11 @@ class SlotVerifyingVisitor : public ObjectVisitor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
bool InUntypedSet(ObjectSlot slot) {
|
||||||
|
return untyped_->count(slot.address()) > 0;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool InTypedSet(SlotType type, Address slot) {
|
bool InTypedSet(SlotType type, Address slot) {
|
||||||
return typed_->count(std::make_pair(type, slot)) > 0;
|
return typed_->count(std::make_pair(type, slot)) > 0;
|
||||||
@ -3766,8 +3771,10 @@ class SlotVerifyingVisitor : public ObjectVisitor {
|
|||||||
class OldToNewSlotVerifyingVisitor : public SlotVerifyingVisitor {
|
class OldToNewSlotVerifyingVisitor : public SlotVerifyingVisitor {
|
||||||
public:
|
public:
|
||||||
OldToNewSlotVerifyingVisitor(std::set<Address>* untyped,
|
OldToNewSlotVerifyingVisitor(std::set<Address>* untyped,
|
||||||
std::set<std::pair<SlotType, Address>>* typed)
|
std::set<std::pair<SlotType, Address>>* typed,
|
||||||
: SlotVerifyingVisitor(untyped, typed) {}
|
EphemeronRememberedSet* ephemeron_remembered_set)
|
||||||
|
: SlotVerifyingVisitor(untyped, typed),
|
||||||
|
ephemeron_remembered_set_(ephemeron_remembered_set) {}
|
||||||
|
|
||||||
bool ShouldHaveBeenRecorded(HeapObject host, MaybeObject target) override {
|
bool ShouldHaveBeenRecorded(HeapObject host, MaybeObject target) override {
|
||||||
DCHECK_IMPLIES(target->IsStrongOrWeak() && Heap::InYoungGeneration(target),
|
DCHECK_IMPLIES(target->IsStrongOrWeak() && Heap::InYoungGeneration(target),
|
||||||
@ -3775,6 +3782,30 @@ class OldToNewSlotVerifyingVisitor : public SlotVerifyingVisitor {
|
|||||||
return target->IsStrongOrWeak() && Heap::InYoungGeneration(target) &&
|
return target->IsStrongOrWeak() && Heap::InYoungGeneration(target) &&
|
||||||
!Heap::InYoungGeneration(host);
|
!Heap::InYoungGeneration(host);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void VisitEphemeron(HeapObject host, int index, ObjectSlot key,
|
||||||
|
ObjectSlot target) override {
|
||||||
|
VisitPointer(host, target);
|
||||||
|
if (FLAG_minor_mc) {
|
||||||
|
VisitPointer(host, target);
|
||||||
|
} else {
|
||||||
|
// Keys are handled separately and should never appear in this set.
|
||||||
|
CHECK(!InUntypedSet(key));
|
||||||
|
Object k = *key;
|
||||||
|
if (!ObjectInYoungGeneration(host) && ObjectInYoungGeneration(k)) {
|
||||||
|
EphemeronHashTable table = EphemeronHashTable::cast(host);
|
||||||
|
auto it = ephemeron_remembered_set_->find(table);
|
||||||
|
CHECK(it != ephemeron_remembered_set_->end());
|
||||||
|
int slot_index =
|
||||||
|
EphemeronHashTable::SlotToIndex(table.address(), key.address());
|
||||||
|
int entry = EphemeronHashTable::IndexToEntry(slot_index);
|
||||||
|
CHECK(it->second.find(entry) != it->second.end());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
EphemeronRememberedSet* ephemeron_remembered_set_;
|
||||||
};
|
};
|
||||||
|
|
||||||
template <RememberedSetType direction>
|
template <RememberedSetType direction>
|
||||||
@ -3812,7 +3843,8 @@ void Heap::VerifyRememberedSetFor(HeapObject object) {
|
|||||||
if (!InYoungGeneration(object)) {
|
if (!InYoungGeneration(object)) {
|
||||||
store_buffer()->MoveAllEntriesToRememberedSet();
|
store_buffer()->MoveAllEntriesToRememberedSet();
|
||||||
CollectSlots<OLD_TO_NEW>(chunk, start, end, &old_to_new, &typed_old_to_new);
|
CollectSlots<OLD_TO_NEW>(chunk, start, end, &old_to_new, &typed_old_to_new);
|
||||||
OldToNewSlotVerifyingVisitor visitor(&old_to_new, &typed_old_to_new);
|
OldToNewSlotVerifyingVisitor visitor(&old_to_new, &typed_old_to_new,
|
||||||
|
&this->ephemeron_remembered_set_);
|
||||||
object->IterateBody(&visitor);
|
object->IterateBody(&visitor);
|
||||||
}
|
}
|
||||||
// TODO(ulan): Add old to old slot set verification once all weak objects
|
// TODO(ulan): Add old to old slot set verification once all weak objects
|
||||||
@ -5791,6 +5823,27 @@ void Heap::GenerationalBarrierSlow(HeapObject object, Address slot,
|
|||||||
heap->store_buffer()->InsertEntry(slot);
|
heap->store_buffer()->InsertEntry(slot);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Heap::RecordEphemeronKeyWrite(EphemeronHashTable table, Address slot) {
|
||||||
|
DCHECK(ObjectInYoungGeneration(HeapObjectSlot(slot).ToHeapObject()));
|
||||||
|
int slot_index = EphemeronHashTable::SlotToIndex(table.address(), slot);
|
||||||
|
int entry = EphemeronHashTable::IndexToEntry(slot_index);
|
||||||
|
auto it =
|
||||||
|
ephemeron_remembered_set_.insert({table, std::unordered_set<int>()});
|
||||||
|
it.first->second.insert(entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Heap::EphemeronKeyWriteBarrierFromCode(Address raw_object,
|
||||||
|
Address key_slot_address,
|
||||||
|
Isolate* isolate) {
|
||||||
|
EphemeronHashTable table = EphemeronHashTable::cast(Object(raw_object));
|
||||||
|
if (!ObjectInYoungGeneration(table)) {
|
||||||
|
isolate->heap()->RecordEphemeronKeyWrite(table, key_slot_address);
|
||||||
|
}
|
||||||
|
MaybeObjectSlot key_slot(key_slot_address);
|
||||||
|
isolate->heap()->incremental_marking()->RecordMaybeWeakWrite(table, key_slot,
|
||||||
|
*key_slot);
|
||||||
|
}
|
||||||
|
|
||||||
void Heap::GenerationalBarrierForElementsSlow(Heap* heap, FixedArray array,
|
void Heap::GenerationalBarrierForElementsSlow(Heap* heap, FixedArray array,
|
||||||
int offset, int length) {
|
int offset, int length) {
|
||||||
for (int i = 0; i < length; i++) {
|
for (int i = 0; i < length; i++) {
|
||||||
|
@ -199,8 +199,16 @@ struct CommentStatistic {
|
|||||||
};
|
};
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
using EphemeronRememberedSet =
|
||||||
|
std::unordered_map<EphemeronHashTable, std::unordered_set<int>,
|
||||||
|
Object::Hasher>;
|
||||||
|
|
||||||
class Heap {
|
class Heap {
|
||||||
public:
|
public:
|
||||||
|
// Stores ephemeron entries where the EphemeronHashTable is in old-space,
|
||||||
|
// and the key of the entry is in new-space. Such keys do not appear in the
|
||||||
|
// usual OLD_TO_NEW remembered set.
|
||||||
|
EphemeronRememberedSet ephemeron_remembered_set_;
|
||||||
enum FindMementoMode { kForRuntime, kForGC };
|
enum FindMementoMode { kForRuntime, kForGC };
|
||||||
|
|
||||||
enum HeapState {
|
enum HeapState {
|
||||||
@ -343,6 +351,10 @@ class Heap {
|
|||||||
V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject object,
|
V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject object,
|
||||||
Address slot,
|
Address slot,
|
||||||
HeapObject value);
|
HeapObject value);
|
||||||
|
V8_EXPORT_PRIVATE void RecordEphemeronKeyWrite(EphemeronHashTable table,
|
||||||
|
Address key_slot);
|
||||||
|
V8_EXPORT_PRIVATE static void EphemeronKeyWriteBarrierFromCode(
|
||||||
|
Address raw_object, Address address, Isolate* isolate);
|
||||||
V8_EXPORT_PRIVATE static void GenerationalBarrierForElementsSlow(
|
V8_EXPORT_PRIVATE static void GenerationalBarrierForElementsSlow(
|
||||||
Heap* heap, FixedArray array, int offset, int length);
|
Heap* heap, FixedArray array, int offset, int length);
|
||||||
V8_EXPORT_PRIVATE static void GenerationalBarrierForCodeSlow(
|
V8_EXPORT_PRIVATE static void GenerationalBarrierForCodeSlow(
|
||||||
|
@ -506,9 +506,7 @@ void MarkCompactCollector::CollectGarbage() {
|
|||||||
RecordObjectStats();
|
RecordObjectStats();
|
||||||
|
|
||||||
StartSweepSpaces();
|
StartSweepSpaces();
|
||||||
|
|
||||||
Evacuate();
|
Evacuate();
|
||||||
|
|
||||||
Finish();
|
Finish();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1086,8 +1084,11 @@ class MarkCompactWeakObjectRetainer : public WeakObjectRetainer {
|
|||||||
|
|
||||||
class RecordMigratedSlotVisitor : public ObjectVisitor {
|
class RecordMigratedSlotVisitor : public ObjectVisitor {
|
||||||
public:
|
public:
|
||||||
explicit RecordMigratedSlotVisitor(MarkCompactCollector* collector)
|
explicit RecordMigratedSlotVisitor(
|
||||||
: collector_(collector) {}
|
MarkCompactCollector* collector,
|
||||||
|
EphemeronRememberedSet* ephemeron_remembered_set)
|
||||||
|
: collector_(collector),
|
||||||
|
ephemeron_remembered_set_(ephemeron_remembered_set) {}
|
||||||
|
|
||||||
inline void VisitPointer(HeapObject host, ObjectSlot p) final {
|
inline void VisitPointer(HeapObject host, ObjectSlot p) final {
|
||||||
DCHECK(!HasWeakHeapObjectTag(*p));
|
DCHECK(!HasWeakHeapObjectTag(*p));
|
||||||
@ -1114,6 +1115,23 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline void VisitEphemeron(HeapObject host, int index, ObjectSlot key,
|
||||||
|
ObjectSlot value) override {
|
||||||
|
DCHECK(host->IsEphemeronHashTable());
|
||||||
|
DCHECK(!Heap::InYoungGeneration(host));
|
||||||
|
|
||||||
|
VisitPointer(host, value);
|
||||||
|
|
||||||
|
if (ephemeron_remembered_set_ && Heap::InYoungGeneration(*key)) {
|
||||||
|
auto table = EphemeronHashTable::unchecked_cast(host);
|
||||||
|
auto insert_result =
|
||||||
|
ephemeron_remembered_set_->insert({table, std::unordered_set<int>()});
|
||||||
|
insert_result.first->second.insert(index);
|
||||||
|
} else {
|
||||||
|
VisitPointer(host, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
inline void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
|
inline void VisitCodeTarget(Code host, RelocInfo* rinfo) override {
|
||||||
DCHECK_EQ(host, rinfo->host());
|
DCHECK_EQ(host, rinfo->host());
|
||||||
DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
|
DCHECK(RelocInfo::IsCodeTargetMode(rinfo->rmode()));
|
||||||
@ -1157,6 +1175,7 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
MarkCompactCollector* collector_;
|
MarkCompactCollector* collector_;
|
||||||
|
EphemeronRememberedSet* ephemeron_remembered_set_;
|
||||||
};
|
};
|
||||||
|
|
||||||
class MigrationObserver {
|
class MigrationObserver {
|
||||||
@ -1455,7 +1474,8 @@ class EvacuateRecordOnlyVisitor final : public HeapObjectVisitor {
|
|||||||
explicit EvacuateRecordOnlyVisitor(Heap* heap) : heap_(heap) {}
|
explicit EvacuateRecordOnlyVisitor(Heap* heap) : heap_(heap) {}
|
||||||
|
|
||||||
inline bool Visit(HeapObject object, int size) override {
|
inline bool Visit(HeapObject object, int size) override {
|
||||||
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector());
|
RecordMigratedSlotVisitor visitor(heap_->mark_compact_collector(),
|
||||||
|
&heap_->ephemeron_remembered_set_);
|
||||||
object->IterateBodyFast(&visitor);
|
object->IterateBodyFast(&visitor);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -2254,6 +2274,14 @@ void MarkCompactCollector::ClearWeakCollections() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
for (auto it = heap_->ephemeron_remembered_set_.begin();
|
||||||
|
it != heap_->ephemeron_remembered_set_.end();) {
|
||||||
|
if (!non_atomic_marking_state()->IsBlackOrGrey(it->first)) {
|
||||||
|
it = heap_->ephemeron_remembered_set_.erase(it);
|
||||||
|
} else {
|
||||||
|
++it;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void MarkCompactCollector::ClearWeakReferences() {
|
void MarkCompactCollector::ClearWeakReferences() {
|
||||||
@ -2762,16 +2790,36 @@ void Evacuator::Finalize() {
|
|||||||
|
|
||||||
class FullEvacuator : public Evacuator {
|
class FullEvacuator : public Evacuator {
|
||||||
public:
|
public:
|
||||||
FullEvacuator(MarkCompactCollector* collector,
|
explicit FullEvacuator(MarkCompactCollector* collector)
|
||||||
RecordMigratedSlotVisitor* record_visitor)
|
: Evacuator(collector->heap(), &record_visitor_),
|
||||||
: Evacuator(collector->heap(), record_visitor), collector_(collector) {}
|
record_visitor_(collector, &ephemeron_remembered_set_),
|
||||||
|
collector_(collector) {}
|
||||||
|
|
||||||
GCTracer::BackgroundScope::ScopeId GetBackgroundTracingScope() override {
|
GCTracer::BackgroundScope::ScopeId GetBackgroundTracingScope() override {
|
||||||
return GCTracer::BackgroundScope::MC_BACKGROUND_EVACUATE_COPY;
|
return GCTracer::BackgroundScope::MC_BACKGROUND_EVACUATE_COPY;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline void Finalize() {
|
||||||
|
Evacuator::Finalize();
|
||||||
|
|
||||||
|
for (auto it = ephemeron_remembered_set_.begin();
|
||||||
|
it != ephemeron_remembered_set_.end(); ++it) {
|
||||||
|
auto insert_result =
|
||||||
|
heap()->ephemeron_remembered_set_.insert({it->first, it->second});
|
||||||
|
if (!insert_result.second) {
|
||||||
|
// Insertion didn't happen, there was already an item.
|
||||||
|
auto set = insert_result.first->second;
|
||||||
|
for (int entry : it->second) {
|
||||||
|
set.insert(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
void RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override;
|
void RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override;
|
||||||
|
EphemeronRememberedSet ephemeron_remembered_set_;
|
||||||
|
RecordMigratedSlotVisitor record_visitor_;
|
||||||
|
|
||||||
MarkCompactCollector* collector_;
|
MarkCompactCollector* collector_;
|
||||||
};
|
};
|
||||||
@ -2858,7 +2906,6 @@ class PageEvacuationTask : public ItemParallelJob::Task {
|
|||||||
template <class Evacuator, class Collector>
|
template <class Evacuator, class Collector>
|
||||||
void MarkCompactCollectorBase::CreateAndExecuteEvacuationTasks(
|
void MarkCompactCollectorBase::CreateAndExecuteEvacuationTasks(
|
||||||
Collector* collector, ItemParallelJob* job,
|
Collector* collector, ItemParallelJob* job,
|
||||||
RecordMigratedSlotVisitor* record_visitor,
|
|
||||||
MigrationObserver* migration_observer, const intptr_t live_bytes) {
|
MigrationObserver* migration_observer, const intptr_t live_bytes) {
|
||||||
// Used for trace summary.
|
// Used for trace summary.
|
||||||
double compaction_speed = 0;
|
double compaction_speed = 0;
|
||||||
@ -2873,7 +2920,7 @@ void MarkCompactCollectorBase::CreateAndExecuteEvacuationTasks(
|
|||||||
NumberOfParallelCompactionTasks(job->NumberOfItems());
|
NumberOfParallelCompactionTasks(job->NumberOfItems());
|
||||||
Evacuator** evacuators = new Evacuator*[wanted_num_tasks];
|
Evacuator** evacuators = new Evacuator*[wanted_num_tasks];
|
||||||
for (int i = 0; i < wanted_num_tasks; i++) {
|
for (int i = 0; i < wanted_num_tasks; i++) {
|
||||||
evacuators[i] = new Evacuator(collector, record_visitor);
|
evacuators[i] = new Evacuator(collector);
|
||||||
if (profiling) evacuators[i]->AddObserver(&profiling_observer);
|
if (profiling) evacuators[i]->AddObserver(&profiling_observer);
|
||||||
if (migration_observer != nullptr)
|
if (migration_observer != nullptr)
|
||||||
evacuators[i]->AddObserver(migration_observer);
|
evacuators[i]->AddObserver(migration_observer);
|
||||||
@ -2955,9 +3002,8 @@ void MarkCompactCollector::EvacuatePagesInParallel() {
|
|||||||
|
|
||||||
if (evacuation_job.NumberOfItems() == 0) return;
|
if (evacuation_job.NumberOfItems() == 0) return;
|
||||||
|
|
||||||
RecordMigratedSlotVisitor record_visitor(this);
|
CreateAndExecuteEvacuationTasks<FullEvacuator>(this, &evacuation_job, nullptr,
|
||||||
CreateAndExecuteEvacuationTasks<FullEvacuator>(
|
live_bytes);
|
||||||
this, &evacuation_job, &record_visitor, nullptr, live_bytes);
|
|
||||||
PostProcessEvacuationCandidates();
|
PostProcessEvacuationCandidates();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -3501,6 +3547,57 @@ int MarkCompactCollector::CollectOldSpaceArrayBufferTrackerItems(
|
|||||||
return pages;
|
return pages;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class EphemeronTableUpdatingItem : public UpdatingItem {
|
||||||
|
public:
|
||||||
|
enum EvacuationState { kRegular, kAborted };
|
||||||
|
|
||||||
|
explicit EphemeronTableUpdatingItem(Heap* heap) : heap_(heap) {}
|
||||||
|
~EphemeronTableUpdatingItem() override = default;
|
||||||
|
|
||||||
|
void Process() override {
|
||||||
|
TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
|
||||||
|
"EphemeronTableUpdatingItem::Process");
|
||||||
|
|
||||||
|
for (auto it = heap_->ephemeron_remembered_set_.begin();
|
||||||
|
it != heap_->ephemeron_remembered_set_.end();) {
|
||||||
|
EphemeronHashTable table = it->first;
|
||||||
|
auto& indices = it->second;
|
||||||
|
if (table.map_word().IsForwardingAddress()) {
|
||||||
|
// The table has moved, and RecordMigratedSlotVisitor::VisitEphemeron
|
||||||
|
// inserts entries for the moved table into ephemeron_remembered_set_.
|
||||||
|
it = heap_->ephemeron_remembered_set_.erase(it);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
DCHECK(table.map().IsMap());
|
||||||
|
DCHECK(table.Object::IsEphemeronHashTable());
|
||||||
|
for (auto iti = indices.begin(); iti != indices.end();) {
|
||||||
|
// EphemeronHashTable keys must be heap objects.
|
||||||
|
HeapObjectSlot key_slot(
|
||||||
|
table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(*iti)));
|
||||||
|
HeapObject key = key_slot.ToHeapObject();
|
||||||
|
MapWord map_word = key->map_word();
|
||||||
|
if (map_word.IsForwardingAddress()) {
|
||||||
|
key = map_word.ToForwardingAddress();
|
||||||
|
key_slot.StoreHeapObject(key);
|
||||||
|
}
|
||||||
|
if (!heap_->InYoungGeneration(key)) {
|
||||||
|
iti = indices.erase(iti);
|
||||||
|
} else {
|
||||||
|
++iti;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (indices.size() == 0) {
|
||||||
|
it = heap_->ephemeron_remembered_set_.erase(it);
|
||||||
|
} else {
|
||||||
|
++it;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
Heap* const heap_;
|
||||||
|
};
|
||||||
|
|
||||||
void MarkCompactCollector::UpdatePointersAfterEvacuation() {
|
void MarkCompactCollector::UpdatePointersAfterEvacuation() {
|
||||||
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
|
TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_UPDATE_POINTERS);
|
||||||
|
|
||||||
@ -3533,12 +3630,16 @@ void MarkCompactCollector::UpdatePointersAfterEvacuation() {
|
|||||||
: NumberOfParallelPointerUpdateTasks(remembered_set_pages,
|
: NumberOfParallelPointerUpdateTasks(remembered_set_pages,
|
||||||
old_to_new_slots_);
|
old_to_new_slots_);
|
||||||
const int to_space_tasks = CollectToSpaceUpdatingItems(&updating_job);
|
const int to_space_tasks = CollectToSpaceUpdatingItems(&updating_job);
|
||||||
const int num_tasks = Max(to_space_tasks, remembered_set_tasks);
|
const int num_ephemeron_table_updating_tasks = 1;
|
||||||
|
const int num_tasks =
|
||||||
|
Max(to_space_tasks,
|
||||||
|
remembered_set_tasks + num_ephemeron_table_updating_tasks);
|
||||||
for (int i = 0; i < num_tasks; i++) {
|
for (int i = 0; i < num_tasks; i++) {
|
||||||
updating_job.AddTask(new PointersUpdatingTask(
|
updating_job.AddTask(new PointersUpdatingTask(
|
||||||
isolate(),
|
isolate(),
|
||||||
GCTracer::BackgroundScope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS));
|
GCTracer::BackgroundScope::MC_BACKGROUND_EVACUATE_UPDATE_POINTERS));
|
||||||
}
|
}
|
||||||
|
updating_job.AddItem(new EphemeronTableUpdatingItem(heap()));
|
||||||
updating_job.Run();
|
updating_job.Run();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -4026,7 +4127,7 @@ class YoungGenerationRecordMigratedSlotVisitor final
|
|||||||
public:
|
public:
|
||||||
explicit YoungGenerationRecordMigratedSlotVisitor(
|
explicit YoungGenerationRecordMigratedSlotVisitor(
|
||||||
MarkCompactCollector* collector)
|
MarkCompactCollector* collector)
|
||||||
: RecordMigratedSlotVisitor(collector) {}
|
: RecordMigratedSlotVisitor(collector, nullptr) {}
|
||||||
|
|
||||||
void VisitCodeTarget(Code host, RelocInfo* rinfo) final { UNREACHABLE(); }
|
void VisitCodeTarget(Code host, RelocInfo* rinfo) final { UNREACHABLE(); }
|
||||||
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
|
void VisitEmbeddedPointer(Code host, RelocInfo* rinfo) final {
|
||||||
@ -4650,9 +4751,10 @@ namespace {
|
|||||||
|
|
||||||
class YoungGenerationEvacuator : public Evacuator {
|
class YoungGenerationEvacuator : public Evacuator {
|
||||||
public:
|
public:
|
||||||
YoungGenerationEvacuator(MinorMarkCompactCollector* collector,
|
explicit YoungGenerationEvacuator(MinorMarkCompactCollector* collector)
|
||||||
RecordMigratedSlotVisitor* record_visitor)
|
: Evacuator(collector->heap(), &record_visitor_),
|
||||||
: Evacuator(collector->heap(), record_visitor), collector_(collector) {}
|
record_visitor_(collector->heap()->mark_compact_collector()),
|
||||||
|
collector_(collector) {}
|
||||||
|
|
||||||
GCTracer::BackgroundScope::ScopeId GetBackgroundTracingScope() override {
|
GCTracer::BackgroundScope::ScopeId GetBackgroundTracingScope() override {
|
||||||
return GCTracer::BackgroundScope::MINOR_MC_BACKGROUND_EVACUATE_COPY;
|
return GCTracer::BackgroundScope::MINOR_MC_BACKGROUND_EVACUATE_COPY;
|
||||||
@ -4661,6 +4763,7 @@ class YoungGenerationEvacuator : public Evacuator {
|
|||||||
protected:
|
protected:
|
||||||
void RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override;
|
void RawEvacuatePage(MemoryChunk* chunk, intptr_t* live_bytes) override;
|
||||||
|
|
||||||
|
YoungGenerationRecordMigratedSlotVisitor record_visitor_;
|
||||||
MinorMarkCompactCollector* collector_;
|
MinorMarkCompactCollector* collector_;
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -4766,10 +4869,8 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() {
|
|||||||
|
|
||||||
YoungGenerationMigrationObserver observer(heap(),
|
YoungGenerationMigrationObserver observer(heap(),
|
||||||
heap()->mark_compact_collector());
|
heap()->mark_compact_collector());
|
||||||
YoungGenerationRecordMigratedSlotVisitor record_visitor(
|
|
||||||
heap()->mark_compact_collector());
|
|
||||||
CreateAndExecuteEvacuationTasks<YoungGenerationEvacuator>(
|
CreateAndExecuteEvacuationTasks<YoungGenerationEvacuator>(
|
||||||
this, &evacuation_job, &record_visitor, &observer, live_bytes);
|
this, &evacuation_job, &observer, live_bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
int MinorMarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
|
int MinorMarkCompactCollector::CollectNewSpaceArrayBufferTrackerItems(
|
||||||
|
@ -273,10 +273,10 @@ class MarkCompactCollectorBase {
|
|||||||
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) = 0;
|
MemoryChunk* chunk, RememberedSetUpdatingMode updating_mode) = 0;
|
||||||
|
|
||||||
template <class Evacuator, class Collector>
|
template <class Evacuator, class Collector>
|
||||||
void CreateAndExecuteEvacuationTasks(
|
void CreateAndExecuteEvacuationTasks(Collector* collector,
|
||||||
Collector* collector, ItemParallelJob* job,
|
ItemParallelJob* job,
|
||||||
RecordMigratedSlotVisitor* record_visitor,
|
MigrationObserver* migration_observer,
|
||||||
MigrationObserver* migration_observer, const intptr_t live_bytes);
|
const intptr_t live_bytes);
|
||||||
|
|
||||||
// Returns whether this page should be moved according to heuristics.
|
// Returns whether this page should be moved according to heuristics.
|
||||||
bool ShouldMovePage(Page* p, intptr_t live_bytes);
|
bool ShouldMovePage(Page* p, intptr_t live_bytes);
|
||||||
|
@ -98,6 +98,20 @@ class IterateAndScavengePromotedObjectsVisitor final : public ObjectVisitor {
|
|||||||
HandleSlot(host, FullHeapObjectSlot(&heap_object), heap_object);
|
HandleSlot(host, FullHeapObjectSlot(&heap_object), heap_object);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline void VisitEphemeron(HeapObject obj, int entry, ObjectSlot key,
|
||||||
|
ObjectSlot value) override {
|
||||||
|
DCHECK(Heap::IsLargeObject(obj) || obj->IsEphemeronHashTable());
|
||||||
|
VisitPointer(obj, value);
|
||||||
|
|
||||||
|
if (ObjectInYoungGeneration(*key)) {
|
||||||
|
// We cannot check the map here, as it might be a large object.
|
||||||
|
scavenger_->RememberPromotedEphemeron(
|
||||||
|
EphemeronHashTable::unchecked_cast(obj), entry);
|
||||||
|
} else {
|
||||||
|
VisitPointer(obj, key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
template <typename TSlot>
|
template <typename TSlot>
|
||||||
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
|
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
|
||||||
@ -392,6 +406,12 @@ void Scavenger::IterateAndScavengePromotedObject(HeapObject target, Map map,
|
|||||||
target->IterateBodyFast(map, size, &visitor);
|
target->IterateBodyFast(map, size, &visitor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Scavenger::RememberPromotedEphemeron(EphemeronHashTable table, int entry) {
|
||||||
|
auto indices =
|
||||||
|
ephemeron_remembered_set_.insert({table, std::unordered_set<int>()});
|
||||||
|
indices.first->second.insert(entry);
|
||||||
|
}
|
||||||
|
|
||||||
void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
|
void Scavenger::AddPageToSweeperIfNecessary(MemoryChunk* page) {
|
||||||
AllocationSpace space = page->owner()->identity();
|
AllocationSpace space = page->owner()->identity();
|
||||||
if ((space == OLD_SPACE) && !page->SweepingDone()) {
|
if ((space == OLD_SPACE) && !page->SweepingDone()) {
|
||||||
@ -460,29 +480,64 @@ void ScavengerCollector::ProcessWeakReferences(
|
|||||||
ScavengeWeakObjectRetainer weak_object_retainer;
|
ScavengeWeakObjectRetainer weak_object_retainer;
|
||||||
heap_->ProcessYoungWeakReferences(&weak_object_retainer);
|
heap_->ProcessYoungWeakReferences(&weak_object_retainer);
|
||||||
ClearYoungEphemerons(ephemeron_table_list);
|
ClearYoungEphemerons(ephemeron_table_list);
|
||||||
|
ClearOldEphemerons();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clears ephemerons contained in {EphemeronHashTable}s in young generation.
|
// Clear ephemeron entries from EphemeronHashTables in new-space whenever the
|
||||||
|
// entry has a dead new-space key.
|
||||||
void ScavengerCollector::ClearYoungEphemerons(
|
void ScavengerCollector::ClearYoungEphemerons(
|
||||||
EphemeronTableList* ephemeron_table_list) {
|
EphemeronTableList* ephemeron_table_list) {
|
||||||
ephemeron_table_list->Iterate([this](EphemeronHashTable table) {
|
ephemeron_table_list->Iterate([this](EphemeronHashTable table) {
|
||||||
for (int i = 0; i < table->Capacity(); i++) {
|
for (int i = 0; i < table->Capacity(); i++) {
|
||||||
ObjectSlot key_slot =
|
// Keys in EphemeronHashTables must be heap objects.
|
||||||
table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i));
|
HeapObjectSlot key_slot(
|
||||||
Object key = *key_slot;
|
table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(i)));
|
||||||
if (key->IsHeapObject()) {
|
HeapObject key = key_slot.ToHeapObject();
|
||||||
if (IsUnscavengedHeapObject(heap_, HeapObject::cast(key))) {
|
if (IsUnscavengedHeapObject(heap_, key)) {
|
||||||
table->RemoveEntry(i);
|
table->RemoveEntry(i);
|
||||||
} else {
|
} else {
|
||||||
HeapObject forwarded = ForwardingAddress(HeapObject::cast(key));
|
HeapObject forwarded = ForwardingAddress(key);
|
||||||
HeapObjectReference::Update(HeapObjectSlot(key_slot), forwarded);
|
key_slot.StoreHeapObject(forwarded);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
ephemeron_table_list->Clear();
|
ephemeron_table_list->Clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Clear ephemeron entries from EphemeronHashTables in old-space whenever the
|
||||||
|
// entry has a dead new-space key.
|
||||||
|
void ScavengerCollector::ClearOldEphemerons() {
|
||||||
|
for (auto it = heap_->ephemeron_remembered_set_.begin();
|
||||||
|
it != heap_->ephemeron_remembered_set_.end();) {
|
||||||
|
EphemeronHashTable table = it->first;
|
||||||
|
auto& indices = it->second;
|
||||||
|
for (auto iti = indices.begin(); iti != indices.end();) {
|
||||||
|
// Keys in EphemeronHashTables must be heap objects.
|
||||||
|
HeapObjectSlot key_slot(
|
||||||
|
table->RawFieldOfElementAt(EphemeronHashTable::EntryToIndex(*iti)));
|
||||||
|
HeapObject key = key_slot.ToHeapObject();
|
||||||
|
if (IsUnscavengedHeapObject(heap_, key)) {
|
||||||
|
table->RemoveEntry(*iti);
|
||||||
|
iti = indices.erase(iti);
|
||||||
|
} else {
|
||||||
|
HeapObject forwarded = ForwardingAddress(key);
|
||||||
|
key_slot.StoreHeapObject(forwarded);
|
||||||
|
if (!Heap::InYoungGeneration(forwarded)) {
|
||||||
|
iti = indices.erase(iti);
|
||||||
|
} else {
|
||||||
|
++iti;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (indices.size() == 0) {
|
||||||
|
it = heap_->ephemeron_remembered_set_.erase(it);
|
||||||
|
} else {
|
||||||
|
++it;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void Scavenger::Finalize() {
|
void Scavenger::Finalize() {
|
||||||
heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
|
heap()->MergeAllocationSitePretenuringFeedback(local_pretenuring_feedback_);
|
||||||
heap()->IncrementSemiSpaceCopiedObjectSize(copied_size_);
|
heap()->IncrementSemiSpaceCopiedObjectSize(copied_size_);
|
||||||
@ -490,6 +545,14 @@ void Scavenger::Finalize() {
|
|||||||
collector_->MergeSurvivingNewLargeObjects(surviving_new_large_objects_);
|
collector_->MergeSurvivingNewLargeObjects(surviving_new_large_objects_);
|
||||||
allocator_.Finalize();
|
allocator_.Finalize();
|
||||||
ephemeron_table_list_.FlushToGlobal();
|
ephemeron_table_list_.FlushToGlobal();
|
||||||
|
for (auto it = ephemeron_remembered_set_.begin();
|
||||||
|
it != ephemeron_remembered_set_.end(); ++it) {
|
||||||
|
auto insert_result = heap()->ephemeron_remembered_set_.insert(
|
||||||
|
{it->first, std::unordered_set<int>()});
|
||||||
|
for (int entry : it->second) {
|
||||||
|
insert_result.first->second.insert(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Scavenger::AddEphemeronHashTable(EphemeronHashTable table) {
|
void Scavenger::AddEphemeronHashTable(EphemeronHashTable table) {
|
||||||
|
@ -48,6 +48,7 @@ class ScavengerCollector {
|
|||||||
|
|
||||||
void ProcessWeakReferences(EphemeronTableList* ephemeron_table_list);
|
void ProcessWeakReferences(EphemeronTableList* ephemeron_table_list);
|
||||||
void ClearYoungEphemerons(EphemeronTableList* ephemeron_table_list);
|
void ClearYoungEphemerons(EphemeronTableList* ephemeron_table_list);
|
||||||
|
void ClearOldEphemerons();
|
||||||
void HandleSurvivingNewLargeObjects();
|
void HandleSurvivingNewLargeObjects();
|
||||||
|
|
||||||
Isolate* const isolate_;
|
Isolate* const isolate_;
|
||||||
@ -201,6 +202,7 @@ class Scavenger {
|
|||||||
int object_size);
|
int object_size);
|
||||||
|
|
||||||
void IterateAndScavengePromotedObject(HeapObject target, Map map, int size);
|
void IterateAndScavengePromotedObject(HeapObject target, Map map, int size);
|
||||||
|
void RememberPromotedEphemeron(EphemeronHashTable table, int index);
|
||||||
|
|
||||||
ScavengerCollector* const collector_;
|
ScavengerCollector* const collector_;
|
||||||
Heap* const heap_;
|
Heap* const heap_;
|
||||||
@ -212,6 +214,8 @@ class Scavenger {
|
|||||||
size_t promoted_size_;
|
size_t promoted_size_;
|
||||||
LocalAllocator allocator_;
|
LocalAllocator allocator_;
|
||||||
SurvivingNewLargeObjectsMap surviving_new_large_objects_;
|
SurvivingNewLargeObjectsMap surviving_new_large_objects_;
|
||||||
|
|
||||||
|
EphemeronRememberedSet ephemeron_remembered_set_;
|
||||||
const bool is_logging_;
|
const bool is_logging_;
|
||||||
const bool is_incremental_marking_;
|
const bool is_incremental_marking_;
|
||||||
const bool is_compacting_;
|
const bool is_compacting_;
|
||||||
|
@ -36,6 +36,19 @@ void RecordWriteDescriptor::InitializePlatformSpecific(
|
|||||||
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void EphemeronKeyBarrierDescriptor::InitializePlatformSpecific(
|
||||||
|
CallInterfaceDescriptorData* data) {
|
||||||
|
static const Register default_stub_registers[] = {ecx, edx, esi, edi,
|
||||||
|
kReturnRegister0};
|
||||||
|
|
||||||
|
data->RestrictAllocatableRegisters(default_stub_registers,
|
||||||
|
arraysize(default_stub_registers));
|
||||||
|
|
||||||
|
CHECK_LE(static_cast<size_t>(kParameterCount),
|
||||||
|
arraysize(default_stub_registers));
|
||||||
|
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
||||||
|
}
|
||||||
|
|
||||||
const Register FastNewFunctionContextDescriptor::ScopeInfoRegister() {
|
const Register FastNewFunctionContextDescriptor::ScopeInfoRegister() {
|
||||||
return edi;
|
return edi;
|
||||||
}
|
}
|
||||||
|
@ -381,6 +381,33 @@ void TurboAssembler::RestoreRegisters(RegList registers) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void TurboAssembler::CallEphemeronKeyBarrier(Register object, Register address,
|
||||||
|
SaveFPRegsMode fp_mode) {
|
||||||
|
EphemeronKeyBarrierDescriptor descriptor;
|
||||||
|
RegList registers = descriptor.allocatable_registers();
|
||||||
|
|
||||||
|
SaveRegisters(registers);
|
||||||
|
|
||||||
|
Register object_parameter(
|
||||||
|
descriptor.GetRegisterParameter(EphemeronKeyBarrierDescriptor::kObject));
|
||||||
|
Register slot_parameter(descriptor.GetRegisterParameter(
|
||||||
|
EphemeronKeyBarrierDescriptor::kSlotAddress));
|
||||||
|
Register fp_mode_parameter(
|
||||||
|
descriptor.GetRegisterParameter(EphemeronKeyBarrierDescriptor::kFPMode));
|
||||||
|
|
||||||
|
push(object);
|
||||||
|
push(address);
|
||||||
|
|
||||||
|
pop(slot_parameter);
|
||||||
|
pop(object_parameter);
|
||||||
|
|
||||||
|
Move(fp_mode_parameter, Smi::FromEnum(fp_mode));
|
||||||
|
Call(isolate()->builtins()->builtin_handle(Builtins::kEphemeronKeyBarrier),
|
||||||
|
RelocInfo::CODE_TARGET);
|
||||||
|
|
||||||
|
RestoreRegisters(registers);
|
||||||
|
}
|
||||||
|
|
||||||
void TurboAssembler::CallRecordWriteStub(
|
void TurboAssembler::CallRecordWriteStub(
|
||||||
Register object, Register address,
|
Register object, Register address,
|
||||||
RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
|
RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
|
||||||
|
@ -389,6 +389,8 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
|||||||
void CallRecordWriteStub(Register object, Register address,
|
void CallRecordWriteStub(Register object, Register address,
|
||||||
RememberedSetAction remembered_set_action,
|
RememberedSetAction remembered_set_action,
|
||||||
SaveFPRegsMode fp_mode, Address wasm_target);
|
SaveFPRegsMode fp_mode, Address wasm_target);
|
||||||
|
void CallEphemeronKeyBarrier(Register object, Register address,
|
||||||
|
SaveFPRegsMode fp_mode);
|
||||||
|
|
||||||
// Calculate how much stack space (in bytes) are required to store caller
|
// Calculate how much stack space (in bytes) are required to store caller
|
||||||
// registers excluding those specified in the arguments.
|
// registers excluding those specified in the arguments.
|
||||||
|
@ -46,6 +46,7 @@ namespace internal {
|
|||||||
V(ConstructWithSpread) \
|
V(ConstructWithSpread) \
|
||||||
V(ContextOnly) \
|
V(ContextOnly) \
|
||||||
V(CppBuiltinAdaptor) \
|
V(CppBuiltinAdaptor) \
|
||||||
|
V(EphemeronKeyBarrier) \
|
||||||
V(FastNewFunctionContext) \
|
V(FastNewFunctionContext) \
|
||||||
V(FastNewObject) \
|
V(FastNewObject) \
|
||||||
V(FrameDropperTrampoline) \
|
V(FrameDropperTrampoline) \
|
||||||
@ -728,6 +729,16 @@ class RecordWriteDescriptor final : public CallInterfaceDescriptor {
|
|||||||
DECLARE_DESCRIPTOR(RecordWriteDescriptor, CallInterfaceDescriptor)
|
DECLARE_DESCRIPTOR(RecordWriteDescriptor, CallInterfaceDescriptor)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class EphemeronKeyBarrierDescriptor final : public CallInterfaceDescriptor {
|
||||||
|
public:
|
||||||
|
DEFINE_PARAMETERS_NO_CONTEXT(kObject, kSlotAddress, kFPMode)
|
||||||
|
DEFINE_PARAMETER_TYPES(MachineType::TaggedPointer(), // kObject
|
||||||
|
MachineType::Pointer(), // kSlotAddress
|
||||||
|
MachineType::TaggedSigned()) // kFPMode
|
||||||
|
|
||||||
|
DECLARE_DESCRIPTOR(EphemeronKeyBarrierDescriptor, CallInterfaceDescriptor)
|
||||||
|
};
|
||||||
|
|
||||||
class TypeConversionDescriptor final : public CallInterfaceDescriptor {
|
class TypeConversionDescriptor final : public CallInterfaceDescriptor {
|
||||||
public:
|
public:
|
||||||
DEFINE_PARAMETERS(kArgument)
|
DEFINE_PARAMETERS(kArgument)
|
||||||
|
@ -125,7 +125,6 @@ class FlexibleBodyDescriptor final : public BodyDescriptorBase {
|
|||||||
static inline int SizeOf(Map map, HeapObject object);
|
static inline int SizeOf(Map map, HeapObject object);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
typedef FlexibleBodyDescriptor<HeapObject::kHeaderSize> StructBodyDescriptor;
|
typedef FlexibleBodyDescriptor<HeapObject::kHeaderSize> StructBodyDescriptor;
|
||||||
|
|
||||||
template <int start_offset>
|
template <int start_offset>
|
||||||
|
@ -6617,7 +6617,8 @@ void HashTable<Derived, Shape>::Rehash(ReadOnlyRoots roots, Derived new_table) {
|
|||||||
uint32_t hash = Shape::HashForObject(roots, k);
|
uint32_t hash = Shape::HashForObject(roots, k);
|
||||||
uint32_t insertion_index =
|
uint32_t insertion_index =
|
||||||
EntryToIndex(new_table->FindInsertionEntry(hash));
|
EntryToIndex(new_table->FindInsertionEntry(hash));
|
||||||
for (int j = 0; j < Shape::kEntrySize; j++) {
|
new_table->set_key(insertion_index, get(from_index), mode);
|
||||||
|
for (int j = 1; j < Shape::kEntrySize; j++) {
|
||||||
new_table->set(insertion_index + j, get(from_index + j), mode);
|
new_table->set(insertion_index + j, get(from_index + j), mode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -6645,13 +6646,16 @@ void HashTable<Derived, Shape>::Swap(uint32_t entry1, uint32_t entry2,
|
|||||||
int index1 = EntryToIndex(entry1);
|
int index1 = EntryToIndex(entry1);
|
||||||
int index2 = EntryToIndex(entry2);
|
int index2 = EntryToIndex(entry2);
|
||||||
Object temp[Shape::kEntrySize];
|
Object temp[Shape::kEntrySize];
|
||||||
|
Derived* self = static_cast<Derived*>(this);
|
||||||
for (int j = 0; j < Shape::kEntrySize; j++) {
|
for (int j = 0; j < Shape::kEntrySize; j++) {
|
||||||
temp[j] = get(index1 + j);
|
temp[j] = get(index1 + j);
|
||||||
}
|
}
|
||||||
for (int j = 0; j < Shape::kEntrySize; j++) {
|
self->set_key(index1, get(index2), mode);
|
||||||
|
for (int j = 1; j < Shape::kEntrySize; j++) {
|
||||||
set(index1 + j, get(index2 + j), mode);
|
set(index1 + j, get(index2 + j), mode);
|
||||||
}
|
}
|
||||||
for (int j = 0; j < Shape::kEntrySize; j++) {
|
self->set_key(index2, temp[0], mode);
|
||||||
|
for (int j = 1; j < Shape::kEntrySize; j++) {
|
||||||
set(index2 + j, temp[j], mode);
|
set(index2 + j, temp[j], mode);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -6687,10 +6691,12 @@ void HashTable<Derived, Shape>::Rehash(ReadOnlyRoots roots) {
|
|||||||
}
|
}
|
||||||
// Wipe deleted entries.
|
// Wipe deleted entries.
|
||||||
Object the_hole = roots.the_hole_value();
|
Object the_hole = roots.the_hole_value();
|
||||||
Object undefined = roots.undefined_value();
|
HeapObject undefined = roots.undefined_value();
|
||||||
|
Derived* self = static_cast<Derived*>(this);
|
||||||
for (uint32_t current = 0; current < capacity; current++) {
|
for (uint32_t current = 0; current < capacity; current++) {
|
||||||
if (KeyAt(current) == the_hole) {
|
if (KeyAt(current) == the_hole) {
|
||||||
set(EntryToIndex(current) + kEntryKeyIndex, undefined);
|
self->set_key(EntryToIndex(current) + kEntryKeyIndex, undefined,
|
||||||
|
SKIP_WRITE_BARRIER);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
SetNumberOfDeletedElements(0);
|
SetNumberOfDeletedElements(0);
|
||||||
@ -6780,7 +6786,6 @@ uint32_t HashTable<Derived, Shape>::FindInsertionEntry(uint32_t hash) {
|
|||||||
return entry;
|
return entry;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// This class is used for looking up two character strings in the string table.
|
// This class is used for looking up two character strings in the string table.
|
||||||
// If we don't have a hit we don't want to waste much time so we unroll the
|
// If we don't have a hit we don't want to waste much time so we unroll the
|
||||||
// string hash calculation loop here for speed. Doesn't work if the two
|
// string hash calculation loop here for speed. Doesn't work if the two
|
||||||
@ -7965,7 +7970,7 @@ Handle<Derived> ObjectHashTableBase<Derived, Shape>::Put(Isolate* isolate,
|
|||||||
|
|
||||||
// Key is already in table, just overwrite value.
|
// Key is already in table, just overwrite value.
|
||||||
if (entry != kNotFound) {
|
if (entry != kNotFound) {
|
||||||
table->set(Derived::EntryToIndex(entry) + 1, *value);
|
table->set(Derived::EntryToValueIndex(entry), *value);
|
||||||
return table;
|
return table;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -8030,15 +8035,16 @@ Handle<Derived> ObjectHashTableBase<Derived, Shape>::Remove(
|
|||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
void ObjectHashTableBase<Derived, Shape>::AddEntry(int entry, Object key,
|
void ObjectHashTableBase<Derived, Shape>::AddEntry(int entry, Object key,
|
||||||
Object value) {
|
Object value) {
|
||||||
this->set(Derived::EntryToIndex(entry), key);
|
Derived* self = static_cast<Derived*>(this);
|
||||||
this->set(Derived::EntryToIndex(entry) + 1, value);
|
self->set_key(Derived::EntryToIndex(entry), key);
|
||||||
this->ElementAdded();
|
self->set(Derived::EntryToValueIndex(entry), value);
|
||||||
|
self->ElementAdded();
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Derived, typename Shape>
|
template <typename Derived, typename Shape>
|
||||||
void ObjectHashTableBase<Derived, Shape>::RemoveEntry(int entry) {
|
void ObjectHashTableBase<Derived, Shape>::RemoveEntry(int entry) {
|
||||||
this->set_the_hole(Derived::EntryToIndex(entry));
|
this->set_the_hole(Derived::EntryToIndex(entry));
|
||||||
this->set_the_hole(Derived::EntryToIndex(entry) + 1);
|
this->set_the_hole(Derived::EntryToValueIndex(entry));
|
||||||
this->ElementRemoved();
|
this->ElementRemoved();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -193,10 +193,10 @@ class PropertyDescriptorObject;
|
|||||||
enum WriteBarrierMode {
|
enum WriteBarrierMode {
|
||||||
SKIP_WRITE_BARRIER,
|
SKIP_WRITE_BARRIER,
|
||||||
UPDATE_WEAK_WRITE_BARRIER,
|
UPDATE_WEAK_WRITE_BARRIER,
|
||||||
|
UPDATE_EPHEMERON_KEY_WRITE_BARRIER,
|
||||||
UPDATE_WRITE_BARRIER
|
UPDATE_WRITE_BARRIER
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
// PropertyNormalizationMode is used to specify whether to keep
|
// PropertyNormalizationMode is used to specify whether to keep
|
||||||
// inobject properties when normalizing properties of a JSObject.
|
// inobject properties when normalizing properties of a JSObject.
|
||||||
enum PropertyNormalizationMode {
|
enum PropertyNormalizationMode {
|
||||||
|
@ -49,6 +49,27 @@ CAST_ACCESSOR(ObjectHashTable)
|
|||||||
CAST_ACCESSOR(EphemeronHashTable)
|
CAST_ACCESSOR(EphemeronHashTable)
|
||||||
CAST_ACCESSOR(ObjectHashSet)
|
CAST_ACCESSOR(ObjectHashSet)
|
||||||
|
|
||||||
|
void EphemeronHashTable::set_key(int index, Object value) {
|
||||||
|
DCHECK_NE(GetReadOnlyRoots().fixed_cow_array_map(), map());
|
||||||
|
DCHECK(IsEphemeronHashTable());
|
||||||
|
DCHECK_GE(index, 0);
|
||||||
|
DCHECK_LT(index, this->length());
|
||||||
|
int offset = kHeaderSize + index * kTaggedSize;
|
||||||
|
RELAXED_WRITE_FIELD(*this, offset, value);
|
||||||
|
EPHEMERON_KEY_WRITE_BARRIER(*this, offset, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
void EphemeronHashTable::set_key(int index, Object value,
|
||||||
|
WriteBarrierMode mode) {
|
||||||
|
DCHECK_NE(GetReadOnlyRoots().fixed_cow_array_map(), map());
|
||||||
|
DCHECK(IsEphemeronHashTable());
|
||||||
|
DCHECK_GE(index, 0);
|
||||||
|
DCHECK_LT(index, this->length());
|
||||||
|
int offset = kHeaderSize + index * kTaggedSize;
|
||||||
|
RELAXED_WRITE_FIELD(*this, offset, value);
|
||||||
|
CONDITIONAL_EPHEMERON_KEY_WRITE_BARRIER(*this, offset, value, mode);
|
||||||
|
}
|
||||||
|
|
||||||
int HashTableBase::NumberOfElements() const {
|
int HashTableBase::NumberOfElements() const {
|
||||||
return Smi::ToInt(get(kNumberOfElementsIndex));
|
return Smi::ToInt(get(kNumberOfElementsIndex));
|
||||||
}
|
}
|
||||||
@ -143,6 +164,19 @@ bool HashTable<Derived, Shape>::ToKey(ReadOnlyRoots roots, int entry,
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename Derived, typename Shape>
|
||||||
|
void HashTable<Derived, Shape>::set_key(int index, Object value) {
|
||||||
|
DCHECK(!IsEphemeronHashTable());
|
||||||
|
FixedArray::set(index, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename Derived, typename Shape>
|
||||||
|
void HashTable<Derived, Shape>::set_key(int index, Object value,
|
||||||
|
WriteBarrierMode mode) {
|
||||||
|
DCHECK(!IsEphemeronHashTable());
|
||||||
|
FixedArray::set(index, value, mode);
|
||||||
|
}
|
||||||
|
|
||||||
template <typename KeyT>
|
template <typename KeyT>
|
||||||
bool BaseShape<KeyT>::IsKey(ReadOnlyRoots roots, Object key) {
|
bool BaseShape<KeyT>::IsKey(ReadOnlyRoots roots, Object key) {
|
||||||
return IsLive(roots, key);
|
return IsLive(roots, key);
|
||||||
|
@ -183,6 +183,16 @@ class HashTable : public HashTableBase {
|
|||||||
return (entry * kEntrySize) + kElementsStartIndex;
|
return (entry * kEntrySize) + kElementsStartIndex;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns the index for an entry (of the key)
|
||||||
|
static constexpr inline int IndexToEntry(int index) {
|
||||||
|
return (index - kElementsStartIndex) / kEntrySize;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns the index for a slot address in the object.
|
||||||
|
static constexpr inline int SlotToIndex(Address object, Address slot) {
|
||||||
|
return static_cast<int>((slot - object - kHeaderSize) / kTaggedSize);
|
||||||
|
}
|
||||||
|
|
||||||
// Ensure enough space for n additional elements.
|
// Ensure enough space for n additional elements.
|
||||||
V8_WARN_UNUSED_RESULT static Handle<Derived> EnsureCapacity(
|
V8_WARN_UNUSED_RESULT static Handle<Derived> EnsureCapacity(
|
||||||
Isolate* isolate, Handle<Derived> table, int n,
|
Isolate* isolate, Handle<Derived> table, int n,
|
||||||
@ -205,6 +215,9 @@ class HashTable : public HashTableBase {
|
|||||||
V8_WARN_UNUSED_RESULT static Handle<Derived> Shrink(
|
V8_WARN_UNUSED_RESULT static Handle<Derived> Shrink(
|
||||||
Isolate* isolate, Handle<Derived> table, int additionalCapacity = 0);
|
Isolate* isolate, Handle<Derived> table, int additionalCapacity = 0);
|
||||||
|
|
||||||
|
inline void set_key(int index, Object value);
|
||||||
|
inline void set_key(int index, Object value, WriteBarrierMode mode);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// Ensure that kMaxRegularCapacity yields a non-large object dictionary.
|
// Ensure that kMaxRegularCapacity yields a non-large object dictionary.
|
||||||
STATIC_ASSERT(EntryToIndex(kMaxRegularCapacity) < kMaxRegularLength);
|
STATIC_ASSERT(EntryToIndex(kMaxRegularCapacity) < kMaxRegularLength);
|
||||||
@ -347,6 +360,10 @@ class EphemeronHashTable
|
|||||||
protected:
|
protected:
|
||||||
friend class MarkCompactCollector;
|
friend class MarkCompactCollector;
|
||||||
friend class ScavengerCollector;
|
friend class ScavengerCollector;
|
||||||
|
friend class HashTable<EphemeronHashTable, EphemeronHashTableShape>;
|
||||||
|
friend class ObjectHashTableBase<EphemeronHashTable, EphemeronHashTableShape>;
|
||||||
|
inline void set_key(int index, Object value);
|
||||||
|
inline void set_key(int index, Object value, WriteBarrierMode mode);
|
||||||
|
|
||||||
OBJECT_CONSTRUCTORS(
|
OBJECT_CONSTRUCTORS(
|
||||||
EphemeronHashTable,
|
EphemeronHashTable,
|
||||||
|
@ -290,9 +290,18 @@
|
|||||||
GenerationalBarrier(object, (object)->RawMaybeWeakField(offset), value); \
|
GenerationalBarrier(object, (object)->RawMaybeWeakField(offset), value); \
|
||||||
} while (false)
|
} while (false)
|
||||||
|
|
||||||
|
#define EPHEMERON_KEY_WRITE_BARRIER(object, offset, value) \
|
||||||
|
do { \
|
||||||
|
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
|
||||||
|
EphemeronHashTable table = EphemeronHashTable::cast(object); \
|
||||||
|
MarkingBarrier(object, (object)->RawField(offset), value); \
|
||||||
|
GenerationalEphemeronKeyBarrier(table, (object)->RawField(offset), value); \
|
||||||
|
} while (false)
|
||||||
|
|
||||||
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \
|
#define CONDITIONAL_WRITE_BARRIER(object, offset, value, mode) \
|
||||||
do { \
|
do { \
|
||||||
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
|
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
|
||||||
|
DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
|
||||||
if (mode != SKIP_WRITE_BARRIER) { \
|
if (mode != SKIP_WRITE_BARRIER) { \
|
||||||
if (mode == UPDATE_WRITE_BARRIER) { \
|
if (mode == UPDATE_WRITE_BARRIER) { \
|
||||||
MarkingBarrier(object, (object)->RawField(offset), value); \
|
MarkingBarrier(object, (object)->RawField(offset), value); \
|
||||||
@ -304,6 +313,7 @@
|
|||||||
#define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode) \
|
#define CONDITIONAL_WEAK_WRITE_BARRIER(object, offset, value, mode) \
|
||||||
do { \
|
do { \
|
||||||
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
|
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
|
||||||
|
DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
|
||||||
if (mode != SKIP_WRITE_BARRIER) { \
|
if (mode != SKIP_WRITE_BARRIER) { \
|
||||||
if (mode == UPDATE_WRITE_BARRIER) { \
|
if (mode == UPDATE_WRITE_BARRIER) { \
|
||||||
MarkingBarrier(object, (object)->RawMaybeWeakField(offset), value); \
|
MarkingBarrier(object, (object)->RawMaybeWeakField(offset), value); \
|
||||||
@ -312,6 +322,20 @@
|
|||||||
} \
|
} \
|
||||||
} while (false)
|
} while (false)
|
||||||
|
|
||||||
|
#define CONDITIONAL_EPHEMERON_KEY_WRITE_BARRIER(object, offset, value, mode) \
|
||||||
|
do { \
|
||||||
|
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
|
||||||
|
DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
|
||||||
|
EphemeronHashTable table = EphemeronHashTable::cast(object); \
|
||||||
|
if (mode != SKIP_WRITE_BARRIER) { \
|
||||||
|
if (mode == UPDATE_WRITE_BARRIER) { \
|
||||||
|
MarkingBarrier(object, (object)->RawField(offset), value); \
|
||||||
|
} \
|
||||||
|
GenerationalEphemeronKeyBarrier(table, (object)->RawField(offset), \
|
||||||
|
value); \
|
||||||
|
} \
|
||||||
|
} while (false)
|
||||||
|
|
||||||
#define READ_DOUBLE_FIELD(p, offset) ReadDoubleValue(FIELD_ADDR(p, offset))
|
#define READ_DOUBLE_FIELD(p, offset) ReadDoubleValue(FIELD_ADDR(p, offset))
|
||||||
|
|
||||||
#define WRITE_DOUBLE_FIELD(p, offset, value) \
|
#define WRITE_DOUBLE_FIELD(p, offset, value) \
|
||||||
|
@ -35,6 +35,19 @@ void RecordWriteDescriptor::InitializePlatformSpecific(
|
|||||||
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void EphemeronKeyBarrierDescriptor::InitializePlatformSpecific(
|
||||||
|
CallInterfaceDescriptorData* data) {
|
||||||
|
const Register default_stub_registers[] = {arg_reg_1, arg_reg_2, arg_reg_3,
|
||||||
|
arg_reg_4, kReturnRegister0};
|
||||||
|
|
||||||
|
data->RestrictAllocatableRegisters(default_stub_registers,
|
||||||
|
arraysize(default_stub_registers));
|
||||||
|
|
||||||
|
CHECK_LE(static_cast<size_t>(kParameterCount),
|
||||||
|
arraysize(default_stub_registers));
|
||||||
|
data->InitializePlatformSpecific(kParameterCount, default_stub_registers);
|
||||||
|
}
|
||||||
|
|
||||||
const Register FastNewFunctionContextDescriptor::ScopeInfoRegister() {
|
const Register FastNewFunctionContextDescriptor::ScopeInfoRegister() {
|
||||||
return rdi;
|
return rdi;
|
||||||
}
|
}
|
||||||
|
@ -386,6 +386,29 @@ void TurboAssembler::RestoreRegisters(RegList registers) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void TurboAssembler::CallEphemeronKeyBarrier(Register object, Register address,
|
||||||
|
SaveFPRegsMode fp_mode) {
|
||||||
|
EphemeronKeyBarrierDescriptor descriptor;
|
||||||
|
RegList registers = descriptor.allocatable_registers();
|
||||||
|
|
||||||
|
SaveRegisters(registers);
|
||||||
|
|
||||||
|
Register object_parameter(
|
||||||
|
descriptor.GetRegisterParameter(EphemeronKeyBarrierDescriptor::kObject));
|
||||||
|
Register slot_parameter(descriptor.GetRegisterParameter(
|
||||||
|
EphemeronKeyBarrierDescriptor::kSlotAddress));
|
||||||
|
Register fp_mode_parameter(
|
||||||
|
descriptor.GetRegisterParameter(EphemeronKeyBarrierDescriptor::kFPMode));
|
||||||
|
|
||||||
|
MovePair(slot_parameter, address, object_parameter, object);
|
||||||
|
Smi smi_fm = Smi::FromEnum(fp_mode);
|
||||||
|
Move(fp_mode_parameter, smi_fm);
|
||||||
|
Call(isolate()->builtins()->builtin_handle(Builtins::kEphemeronKeyBarrier),
|
||||||
|
RelocInfo::CODE_TARGET);
|
||||||
|
|
||||||
|
RestoreRegisters(registers);
|
||||||
|
}
|
||||||
|
|
||||||
void TurboAssembler::CallRecordWriteStub(
|
void TurboAssembler::CallRecordWriteStub(
|
||||||
Register object, Register address,
|
Register object, Register address,
|
||||||
RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
|
RememberedSetAction remembered_set_action, SaveFPRegsMode fp_mode) {
|
||||||
|
@ -441,6 +441,8 @@ class V8_EXPORT_PRIVATE TurboAssembler : public TurboAssemblerBase {
|
|||||||
void CallRecordWriteStub(Register object, Register address,
|
void CallRecordWriteStub(Register object, Register address,
|
||||||
RememberedSetAction remembered_set_action,
|
RememberedSetAction remembered_set_action,
|
||||||
SaveFPRegsMode fp_mode, Address wasm_target);
|
SaveFPRegsMode fp_mode, Address wasm_target);
|
||||||
|
void CallEphemeronKeyBarrier(Register object, Register address,
|
||||||
|
SaveFPRegsMode fp_mode);
|
||||||
|
|
||||||
void MoveNumber(Register dst, double value);
|
void MoveNumber(Register dst, double value);
|
||||||
void MoveNonSmi(Register dst, double value);
|
void MoveNonSmi(Register dst, double value);
|
||||||
|
@ -149,6 +149,78 @@ TEST(Shrinking) {
|
|||||||
CHECK_EQ(32, EphemeronHashTable::cast(weakmap->table())->Capacity());
|
CHECK_EQ(32, EphemeronHashTable::cast(weakmap->table())->Capacity());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
namespace {
|
||||||
|
bool EphemeronHashTableContainsKey(EphemeronHashTable table, HeapObject key) {
|
||||||
|
for (int i = 0; i < table.Capacity(); ++i) {
|
||||||
|
if (table->KeyAt(i) == key) return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
} // namespace
|
||||||
|
|
||||||
|
TEST(WeakMapPromotion) {
|
||||||
|
LocalContext context;
|
||||||
|
Isolate* isolate = GetIsolateFrom(&context);
|
||||||
|
Factory* factory = isolate->factory();
|
||||||
|
HandleScope scope(isolate);
|
||||||
|
Handle<JSWeakMap> weakmap = isolate->factory()->NewJSWeakMap();
|
||||||
|
|
||||||
|
CcTest::CollectAllGarbage();
|
||||||
|
CHECK(ObjectInYoungGeneration(weakmap->table()));
|
||||||
|
|
||||||
|
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
||||||
|
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
|
||||||
|
Handle<Smi> smi(Smi::FromInt(1), isolate);
|
||||||
|
int32_t object_hash = object->GetOrCreateHash(isolate)->value();
|
||||||
|
JSWeakCollection::Set(weakmap, object, smi, object_hash);
|
||||||
|
|
||||||
|
CHECK(EphemeronHashTableContainsKey(
|
||||||
|
EphemeronHashTable::cast(weakmap->table()), *object));
|
||||||
|
CcTest::CollectAllGarbage();
|
||||||
|
|
||||||
|
CHECK(ObjectInYoungGeneration(*object));
|
||||||
|
CHECK(!ObjectInYoungGeneration(weakmap->table()));
|
||||||
|
CHECK(EphemeronHashTableContainsKey(
|
||||||
|
EphemeronHashTable::cast(weakmap->table()), *object));
|
||||||
|
|
||||||
|
CcTest::CollectAllGarbage();
|
||||||
|
CHECK(!ObjectInYoungGeneration(*object));
|
||||||
|
CHECK(!ObjectInYoungGeneration(weakmap->table()));
|
||||||
|
CHECK(EphemeronHashTableContainsKey(
|
||||||
|
EphemeronHashTable::cast(weakmap->table()), *object));
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST(WeakMapScavenge) {
|
||||||
|
LocalContext context;
|
||||||
|
Isolate* isolate = GetIsolateFrom(&context);
|
||||||
|
Factory* factory = isolate->factory();
|
||||||
|
HandleScope scope(isolate);
|
||||||
|
Handle<JSWeakMap> weakmap = isolate->factory()->NewJSWeakMap();
|
||||||
|
|
||||||
|
CcTest::CollectAllGarbage();
|
||||||
|
CHECK(ObjectInYoungGeneration(weakmap->table()));
|
||||||
|
|
||||||
|
Handle<Map> map = factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
|
||||||
|
Handle<JSObject> object = factory->NewJSObjectFromMap(map);
|
||||||
|
Handle<Smi> smi(Smi::FromInt(1), isolate);
|
||||||
|
int32_t object_hash = object->GetOrCreateHash(isolate)->value();
|
||||||
|
JSWeakCollection::Set(weakmap, object, smi, object_hash);
|
||||||
|
|
||||||
|
CHECK(EphemeronHashTableContainsKey(
|
||||||
|
EphemeronHashTable::cast(weakmap->table()), *object));
|
||||||
|
|
||||||
|
heap::GcAndSweep(isolate->heap(), NEW_SPACE);
|
||||||
|
CHECK(ObjectInYoungGeneration(*object));
|
||||||
|
CHECK(!ObjectInYoungGeneration(weakmap->table()));
|
||||||
|
CHECK(EphemeronHashTableContainsKey(
|
||||||
|
EphemeronHashTable::cast(weakmap->table()), *object));
|
||||||
|
|
||||||
|
heap::GcAndSweep(isolate->heap(), NEW_SPACE);
|
||||||
|
CHECK(!ObjectInYoungGeneration(*object));
|
||||||
|
CHECK(!ObjectInYoungGeneration(weakmap->table()));
|
||||||
|
CHECK(EphemeronHashTableContainsKey(
|
||||||
|
EphemeronHashTable::cast(weakmap->table()), *object));
|
||||||
|
}
|
||||||
|
|
||||||
// Test that weak map values on an evacuation candidate which are not reachable
|
// Test that weak map values on an evacuation candidate which are not reachable
|
||||||
// by other paths are correctly recorded in the slots buffer.
|
// by other paths are correctly recorded in the slots buffer.
|
||||||
|
Loading…
Reference in New Issue
Block a user